load-random-topic-fix
All checks were successful
deploy / deploy (push) Successful in 1m30s

This commit is contained in:
Untone 2023-12-25 01:06:27 +03:00
parent c236768c07
commit 0ea9f45854
4 changed files with 16 additions and 18 deletions

View File

@ -125,14 +125,14 @@ def count_author_rating(session, author_id) -> int:
shouts_likes = ( shouts_likes = (
session.query(Reaction, Shout) session.query(Reaction, Shout)
.join(Shout, Shout.id == Reaction.shout) .join(Shout, Shout.id == Reaction.shout)
.filter(and_(Shout.authors.any(author_id), Reaction.kind == ReactionKind.LIKE.value)) .filter(and_(Shout.authors.any(id=author_id), Reaction.kind == ReactionKind.LIKE.value))
.count() .count()
or 0 or 0
) )
shouts_dislikes = ( shouts_dislikes = (
session.query(Reaction, Shout) session.query(Reaction, Shout)
.join(Shout, Shout.id == Reaction.shout) .join(Shout, Shout.id == Reaction.shout)
.filter(and_(Shout.authors.any(author_id), Reaction.kind == ReactionKind.DISLIKE.value)) .filter(and_(Shout.authors.any(id=author_id), Reaction.kind == ReactionKind.DISLIKE.value))
.count() .count()
or 0 or 0
) )

View File

@ -91,7 +91,7 @@ def is_published_author(session, author_id):
"""checks if author has at least one publication""" """checks if author has at least one publication"""
return ( return (
session.query(Shout) session.query(Shout)
.where(Shout.authors.contains(author_id)) .where(Shout.authors.any(author_id))
.filter(and_(Shout.published_at != "", Shout.deleted_at.is_(None))) .filter(and_(Shout.published_at != "", Shout.deleted_at.is_(None)))
.count() .count()
> 0 > 0

View File

@ -417,21 +417,19 @@ async def load_shouts_random_top(_, _info, params):
@query.field("load_shouts_random_topic") @query.field("load_shouts_random_topic")
async def load_shouts_random_topic(_, info, limit: int = 10): async def load_shouts_random_topic(_, info, limit: int = 10):
topic = get_random_topic() topic = get_random_topic()
shouts = []
if topic:
q = ( q = (
select(Shout) select(Shout)
.options( .options(
joinedload(Shout.authors), joinedload(Shout.authors),
joinedload(Shout.topics), joinedload(Shout.topics),
) )
.join(ShoutTopic, and_(Shout.id == ShoutTopic.shout, ShoutTopic.topic == topic.id)) .filter(and_(Shout.deleted_at.is_(None), Shout.visibility == "public", Shout.topics.any(slug=topic.slug)))
.filter(and_(Shout.deleted_at.is_(None), Shout.visibility == "public"))
) )
q = add_stat_columns(q) q = add_stat_columns(q)
q = q.group_by(Shout.id).order_by(desc(Shout.created_at)).limit(limit) q = q.group_by(Shout.id).order_by(desc(Shout.createdAt)).limit(limit)
shouts = get_shouts_from_query(q) shouts = get_shouts_from_query(q)

View File

@ -22,13 +22,13 @@ logger.setLevel(logging.DEBUG)
@event.listens_for(Engine, "before_cursor_execute") @event.listens_for(Engine, "before_cursor_execute")
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany): def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
conn.info.setdefault("query_start_time", []).append(time.time()) conn.info.setdefault("query_start_time", []).append(time.time())
logger.debug(f" {statement}") # logger.debug(f" {statement}")
@event.listens_for(Engine, "after_cursor_execute") @event.listens_for(Engine, "after_cursor_execute")
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany): def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
total = time.time() - conn.info["query_start_time"].pop(-1) total = time.time() - conn.info["query_start_time"].pop(-1)
logger.debug(f" Finished in {math.floor(total*10000)/10} ms ") print(f" ----------------- Finished in {math.floor(total*10000)/10} ms ")
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20) engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)