fix
Some checks failed
Deploy to core / deploy (push) Failing after 1m10s

This commit is contained in:
Untone 2024-02-21 20:38:12 +03:00
parent 1f0d5ae8e8
commit 9b2d1c96ba
2 changed files with 20 additions and 42 deletions

View File

@ -2,7 +2,7 @@ import json
import time
from typing import List
from sqlalchemy import and_, desc, distinct, func, select, or_
from sqlalchemy import and_, desc, select, or_
from sqlalchemy.orm import aliased
from orm.author import Author, AuthorFollower, AuthorRating
@ -10,50 +10,14 @@ from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from resolvers.follower import get_follows_by_user_id
from resolvers.stat import get_authors_from_query, add_author_stat_columns
from services.auth import login_required
from services.db import local_session
from services.rediscache import redis
from services.schema import mutation, query
from services.viewed import ViewedStorage
from services.logger import root_logger as logger
def add_author_stat_columns(q):
shout_author_aliased = aliased(ShoutAuthor)
q = q.outerjoin(shout_author_aliased).add_columns(
func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
)
followers_table = aliased(AuthorFollower)
q = q.outerjoin(followers_table, followers_table.author == Author.id).add_columns(
func.count(distinct(followers_table.follower)).label('followers_stat')
)
followings_table = aliased(AuthorFollower)
q = q.outerjoin(
followings_table, followings_table.follower == Author.id
).add_columns(func.count(distinct(followers_table.author)).label('followings_stat'))
q = q.group_by(Author.id)
return q
async def get_authors_from_query(q):
authors = []
with local_session() as session:
for [author, shouts_stat, followers_stat, followings_stat] in session.execute(
q
):
author.stat = {
'shouts': shouts_stat,
'viewed': await ViewedStorage.get_author(author.slug),
'followers': followers_stat,
'followings': followings_stat,
}
authors.append(author)
return authors
@mutation.field('update_author')
@login_required
async def update_author(_, info, profile):
@ -213,6 +177,7 @@ async def get_author_by_user_id(user_id: str):
'name': author.name,
'slug': author.slug,
'pic': author.pic,
'bio': author.bio,
}
),
)

View File

@ -1,4 +1,5 @@
import asyncio
from sqlalchemy import select, event
import json
@ -122,7 +123,9 @@ async def handle_topic_follower_change(connection, topic_id, follower_id, is_ins
q = select(Topic).filter(Topic.id == topic_id)
q = add_topic_stat_columns(q)
async with connection.begin() as conn:
[topic, shouts_stat, authors_stat, followers_stat] = await conn.execute(q).first()
[topic, shouts_stat, authors_stat, followers_stat] = await conn.execute(
q
).first()
topic.stat = {
'shouts': shouts_stat,
'authors': authors_stat,
@ -160,13 +163,16 @@ class FollowsCached:
q = select(Author)
q = add_author_stat_columns(q)
authors = session.execute(q)
redis_updates = [] # Store Redis update tasks
while True:
batch = authors.fetchmany(BATCH_SIZE)
if not batch:
break
else:
for [author, shouts_stat, followers_stat, followings_stat] in batch:
await redis.execute('SET', f'user:{author.user}:author', json.dumps({
redis_key = f'user:{author.user}:author'
redis_data = {
'id': author.id,
'name': author.name,
'slug': author.slug,
@ -175,9 +181,16 @@ class FollowsCached:
'stat': {
'followings': followings_stat,
'shouts': shouts_stat,
'followers': followers_stat
'followers': followers_stat,
},
}))
}
# Add Redis update task to the list
redis_updates.append(
redis.execute('SET', redis_key, json.dumps(redis_data))
)
# Execute Redis update tasks concurrently
await asyncio.gather(*redis_updates)
@staticmethod
async def update_author_cache(author: Author):