core/services/follows.py

216 lines
7.0 KiB
Python
Raw Normal View History

2024-02-21 13:06:24 +00:00
import asyncio
from sqlalchemy import select, event
2024-02-21 14:37:58 +00:00
import json
2024-02-21 13:06:24 +00:00
from orm.author import Author, AuthorFollower
from orm.topic import Topic, TopicFollower
from resolvers.author import add_author_stat_columns, get_author_follows
from resolvers.topic import add_topic_stat_columns
2024-02-21 14:37:58 +00:00
from services.logger import root_logger as logger
2024-02-21 13:06:24 +00:00
from services.db import local_session
from services.rediscache import redis
from services.viewed import ViewedStorage
2024-02-21 16:14:58 +00:00
@event.listens_for(Author, 'after_insert')
@event.listens_for(Author, 'after_update')
2024-02-21 16:11:49 +00:00
def after_author_update(mapper, connection, author: Author):
2024-02-21 16:14:58 +00:00
redis_key = f'user:{author.user}:author'
2024-02-21 16:11:49 +00:00
asyncio.create_task(
redis.execute(
2024-02-21 16:14:58 +00:00
'set',
2024-02-21 16:11:49 +00:00
redis_key,
json.dumps(
{
2024-02-21 16:14:58 +00:00
'id': author.id,
'name': author.name,
'slug': author.slug,
'pic': author.pic,
2024-02-21 16:11:49 +00:00
}
),
)
)
2024-02-21 13:06:24 +00:00
2024-02-21 16:14:58 +00:00
@event.listens_for(TopicFollower, 'after_insert')
2024-02-21 15:07:02 +00:00
def after_topic_follower_insert(mapper, connection, target: TopicFollower):
2024-02-21 13:06:24 +00:00
asyncio.create_task(
handle_topic_follower_change(connection, target.topic, target.follower, True)
)
2024-02-21 16:14:58 +00:00
@event.listens_for(TopicFollower, 'after_delete')
2024-02-21 15:07:02 +00:00
def after_topic_follower_delete(mapper, connection, target: TopicFollower):
2024-02-21 13:06:24 +00:00
asyncio.create_task(
handle_topic_follower_change(connection, target.topic, target.follower, False)
)
2024-02-21 16:14:58 +00:00
@event.listens_for(AuthorFollower, 'after_insert')
2024-02-21 15:07:02 +00:00
def after_author_follower_insert(mapper, connection, target: AuthorFollower):
2024-02-21 13:06:24 +00:00
asyncio.create_task(
handle_author_follower_change(connection, target.author, target.follower, True)
)
2024-02-21 16:14:58 +00:00
@event.listens_for(AuthorFollower, 'after_delete')
2024-02-21 15:07:02 +00:00
def after_author_follower_delete(mapper, connection, target: AuthorFollower):
2024-02-21 13:06:24 +00:00
asyncio.create_task(
handle_author_follower_change(connection, target.author, target.follower, False)
)
2024-02-21 16:11:49 +00:00
async def update_follows_for_user(
connection, user_id, entity_type, entity: dict, is_insert
):
2024-02-21 16:14:58 +00:00
redis_key = f'user:{user_id}:follows'
2024-02-21 14:37:58 +00:00
follows_str = await redis.get(redis_key)
if follows_str:
follows = json.loads(follows_str)
else:
2024-02-21 13:06:24 +00:00
follows = {
2024-02-21 16:14:58 +00:00
'topics': [],
'authors': [],
'communities': [
{'slug': 'discours', 'name': 'Дискурс', 'id': 1, 'desc': ''}
2024-02-21 13:06:24 +00:00
],
}
if is_insert:
2024-02-21 16:14:58 +00:00
follows[f'{entity_type}s'].append(entity)
2024-02-21 13:06:24 +00:00
else:
# Remove the entity from follows
2024-02-21 16:14:58 +00:00
follows[f'{entity_type}s'] = [
e for e in follows[f'{entity_type}s'] if e['id'] != entity['id']
2024-02-21 16:11:49 +00:00
]
2024-02-21 16:14:58 +00:00
await redis.execute('set', redis_key, json.dumps(follows))
2024-02-21 13:06:24 +00:00
async def handle_author_follower_change(connection, author_id, follower_id, is_insert):
q = select(Author).filter(Author.id == author_id)
q = add_author_stat_columns(q)
async with connection.begin() as conn:
[author, shouts_stat, followers_stat, followings_stat] = await conn.execute(
q
).first()
author.stat = {
2024-02-21 16:14:58 +00:00
'shouts': shouts_stat,
'viewed': await ViewedStorage.get_author(author.slug),
'followers': followers_stat,
'followings': followings_stat,
2024-02-21 13:06:24 +00:00
}
follower = await conn.execute(
select(Author).filter(Author.id == follower_id)
).first()
if follower and author:
await update_follows_for_user(
2024-02-21 16:48:33 +00:00
connection,
follower.user,
'author',
{
'id': author.id,
'name': author.name,
'slug': author.slug,
'pic': author.pic,
'bio': author.bio,
'stat': author.stat,
},
is_insert,
2024-02-21 13:06:24 +00:00
)
async def handle_topic_follower_change(connection, topic_id, follower_id, is_insert):
q = select(Topic).filter(Topic.id == topic_id)
q = add_topic_stat_columns(q)
async with connection.begin() as conn:
[topic, shouts_stat, authors_stat, followers_stat] = await conn.execute(
q
).first()
topic.stat = {
2024-02-21 16:14:58 +00:00
'shouts': shouts_stat,
'authors': authors_stat,
'followers': followers_stat,
'viewed': await ViewedStorage.get_topic(topic.slug),
2024-02-21 13:06:24 +00:00
}
follower = connection.execute(
select(Author).filter(Author.id == follower_id)
).first()
if follower and topic:
await update_follows_for_user(
2024-02-21 16:48:33 +00:00
connection,
follower.user,
'topic',
{
'id': topic.id,
'title': topic.title,
'slug': topic.slug,
'body': topic.body,
'stat': topic.stat,
},
is_insert,
2024-02-21 13:06:24 +00:00
)
2024-02-21 14:37:58 +00:00
2024-02-21 16:48:33 +00:00
2024-02-21 16:14:58 +00:00
BATCH_SIZE = 33
2024-02-21 14:37:58 +00:00
2024-02-21 16:48:33 +00:00
2024-02-21 14:37:58 +00:00
class FollowsCached:
lock = asyncio.Lock()
@staticmethod
async def update_cache():
with local_session() as session:
2024-02-21 16:14:58 +00:00
q = select(Author)
q = add_author_stat_columns(q)
authors = session.execute(q)
for i in range(0, len(authors), BATCH_SIZE):
2024-02-21 15:38:15 +00:00
batch_authors = authors[i : i + BATCH_SIZE]
await asyncio.gather(
*[
FollowsCached.update_author_cache(author)
for author in batch_authors
]
)
2024-02-21 15:26:18 +00:00
@staticmethod
2024-02-21 16:11:49 +00:00
async def update_author_cache(author: Author):
2024-02-21 16:14:58 +00:00
redis_key = f'user:{author.user}:author'
2024-02-21 16:11:49 +00:00
if isinstance(author, Author):
await redis.execute(
2024-02-21 16:14:58 +00:00
'set',
2024-02-21 16:11:49 +00:00
redis_key,
json.dumps(
{
2024-02-21 16:14:58 +00:00
'id': author.id,
'name': author.name,
'slug': author.slug,
'pic': author.pic,
'bio': author.bio,
2024-02-21 16:48:33 +00:00
'stat': author.stat,
2024-02-21 16:11:49 +00:00
}
2024-02-21 16:12:24 +00:00
),
2024-02-21 16:11:49 +00:00
)
2024-02-21 15:26:18 +00:00
follows = await get_author_follows(None, None, user=author.user)
if isinstance(follows, dict):
2024-02-21 16:14:58 +00:00
redis_key = f'user:{author.user}:follows'
await redis.execute('set', redis_key, json.dumps(follows))
2024-02-21 14:37:58 +00:00
@staticmethod
async def worker():
"""Асинхронная задача обновления"""
self = FollowsCached
while True:
try:
await self.update_cache()
await asyncio.sleep(10 * 60 * 60)
except asyncio.CancelledError:
# Handle cancellation due to SIGTERM
2024-02-21 16:14:58 +00:00
logger.info('Cancellation requested. Cleaning up...')
2024-02-21 14:37:58 +00:00
# Perform any necessary cleanup before exiting the loop
break
except Exception as exc:
logger.error(exc)
2024-02-21 15:26:18 +00:00
2024-02-21 15:38:15 +00:00
2024-02-21 15:26:18 +00:00
async def start_cached_follows():
await FollowsCached.worker()