2024-02-21 13:06:24 +00:00
|
|
|
import asyncio
|
|
|
|
from sqlalchemy import select, event
|
2024-02-21 14:37:58 +00:00
|
|
|
import json
|
2024-02-21 13:06:24 +00:00
|
|
|
|
|
|
|
from orm.author import Author, AuthorFollower
|
|
|
|
from orm.topic import Topic, TopicFollower
|
|
|
|
from resolvers.author import add_author_stat_columns, get_author_follows
|
|
|
|
from resolvers.topic import add_topic_stat_columns
|
2024-02-21 14:37:58 +00:00
|
|
|
from services.logger import root_logger as logger
|
2024-02-21 13:06:24 +00:00
|
|
|
from services.db import local_session
|
|
|
|
from services.rediscache import redis
|
|
|
|
from services.viewed import ViewedStorage
|
|
|
|
|
|
|
|
|
|
|
|
@event.listens_for(Author, "after_insert")
|
|
|
|
@event.listens_for(Author, "after_update")
|
2024-02-21 16:11:49 +00:00
|
|
|
def after_author_update(mapper, connection, author: Author):
|
|
|
|
redis_key = f"user:{author.user}:author"
|
|
|
|
asyncio.create_task(
|
|
|
|
redis.execute(
|
|
|
|
"set",
|
|
|
|
redis_key,
|
|
|
|
json.dumps(
|
|
|
|
{
|
|
|
|
"id": author.id,
|
|
|
|
"name": author.name,
|
|
|
|
"slug": author.slug,
|
|
|
|
"pic": author.pic,
|
|
|
|
}
|
|
|
|
),
|
|
|
|
)
|
|
|
|
)
|
2024-02-21 13:06:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
@event.listens_for(TopicFollower, "after_insert")
|
2024-02-21 15:07:02 +00:00
|
|
|
def after_topic_follower_insert(mapper, connection, target: TopicFollower):
|
2024-02-21 13:06:24 +00:00
|
|
|
asyncio.create_task(
|
|
|
|
handle_topic_follower_change(connection, target.topic, target.follower, True)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@event.listens_for(TopicFollower, "after_delete")
|
2024-02-21 15:07:02 +00:00
|
|
|
def after_topic_follower_delete(mapper, connection, target: TopicFollower):
|
2024-02-21 13:06:24 +00:00
|
|
|
asyncio.create_task(
|
|
|
|
handle_topic_follower_change(connection, target.topic, target.follower, False)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@event.listens_for(AuthorFollower, "after_insert")
|
2024-02-21 15:07:02 +00:00
|
|
|
def after_author_follower_insert(mapper, connection, target: AuthorFollower):
|
2024-02-21 13:06:24 +00:00
|
|
|
asyncio.create_task(
|
|
|
|
handle_author_follower_change(connection, target.author, target.follower, True)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@event.listens_for(AuthorFollower, "after_delete")
|
2024-02-21 15:07:02 +00:00
|
|
|
def after_author_follower_delete(mapper, connection, target: AuthorFollower):
|
2024-02-21 13:06:24 +00:00
|
|
|
asyncio.create_task(
|
|
|
|
handle_author_follower_change(connection, target.author, target.follower, False)
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-02-21 16:11:49 +00:00
|
|
|
async def update_follows_for_user(
|
|
|
|
connection, user_id, entity_type, entity: dict, is_insert
|
|
|
|
):
|
2024-02-21 13:06:24 +00:00
|
|
|
redis_key = f"user:{user_id}:follows"
|
2024-02-21 14:37:58 +00:00
|
|
|
follows_str = await redis.get(redis_key)
|
|
|
|
if follows_str:
|
|
|
|
follows = json.loads(follows_str)
|
|
|
|
else:
|
2024-02-21 13:06:24 +00:00
|
|
|
follows = {
|
|
|
|
"topics": [],
|
|
|
|
"authors": [],
|
|
|
|
"communities": [
|
|
|
|
{"slug": "discours", "name": "Дискурс", "id": 1, "desc": ""}
|
|
|
|
],
|
|
|
|
}
|
|
|
|
if is_insert:
|
2024-02-21 15:51:37 +00:00
|
|
|
follows[f"{entity_type}s"].append(entity)
|
2024-02-21 13:06:24 +00:00
|
|
|
else:
|
|
|
|
# Remove the entity from follows
|
2024-02-21 16:11:49 +00:00
|
|
|
follows[f"{entity_type}s"] = [
|
|
|
|
e for e in follows[f"{entity_type}s"] if e["id"] != entity["id"]
|
|
|
|
]
|
2024-02-21 14:55:54 +00:00
|
|
|
await redis.execute("set", redis_key, json.dumps(follows))
|
2024-02-21 13:06:24 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def handle_author_follower_change(connection, author_id, follower_id, is_insert):
|
|
|
|
q = select(Author).filter(Author.id == author_id)
|
|
|
|
q = add_author_stat_columns(q)
|
|
|
|
async with connection.begin() as conn:
|
|
|
|
[author, shouts_stat, followers_stat, followings_stat] = await conn.execute(
|
|
|
|
q
|
|
|
|
).first()
|
|
|
|
author.stat = {
|
|
|
|
"shouts": shouts_stat,
|
|
|
|
"viewed": await ViewedStorage.get_author(author.slug),
|
|
|
|
"followers": followers_stat,
|
|
|
|
"followings": followings_stat,
|
|
|
|
}
|
|
|
|
follower = await conn.execute(
|
|
|
|
select(Author).filter(Author.id == follower_id)
|
|
|
|
).first()
|
|
|
|
if follower and author:
|
|
|
|
await update_follows_for_user(
|
2024-02-21 15:51:37 +00:00
|
|
|
connection, follower.user, "author", author.dict(), is_insert
|
2024-02-21 13:06:24 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
async def handle_topic_follower_change(connection, topic_id, follower_id, is_insert):
|
|
|
|
q = select(Topic).filter(Topic.id == topic_id)
|
|
|
|
q = add_topic_stat_columns(q)
|
|
|
|
async with connection.begin() as conn:
|
|
|
|
[topic, shouts_stat, authors_stat, followers_stat] = await conn.execute(
|
|
|
|
q
|
|
|
|
).first()
|
|
|
|
topic.stat = {
|
|
|
|
"shouts": shouts_stat,
|
|
|
|
"authors": authors_stat,
|
|
|
|
"followers": followers_stat,
|
|
|
|
"viewed": await ViewedStorage.get_topic(topic.slug),
|
|
|
|
}
|
|
|
|
follower = connection.execute(
|
|
|
|
select(Author).filter(Author.id == follower_id)
|
|
|
|
).first()
|
|
|
|
if follower and topic:
|
|
|
|
await update_follows_for_user(
|
2024-02-21 15:51:37 +00:00
|
|
|
connection, follower.user, "topic", topic.dict(), is_insert
|
2024-02-21 13:06:24 +00:00
|
|
|
)
|
2024-02-21 14:37:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
class FollowsCached:
|
|
|
|
lock = asyncio.Lock()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
async def update_cache():
|
2024-02-21 15:26:18 +00:00
|
|
|
BATCH_SIZE = 30 # Adjust batch size as needed
|
2024-02-21 14:37:58 +00:00
|
|
|
with local_session() as session:
|
2024-02-21 15:26:18 +00:00
|
|
|
authors = session.query(Author).all()
|
|
|
|
total_authors = len(authors)
|
|
|
|
for i in range(0, total_authors, BATCH_SIZE):
|
2024-02-21 15:38:15 +00:00
|
|
|
batch_authors = authors[i : i + BATCH_SIZE]
|
|
|
|
await asyncio.gather(
|
|
|
|
*[
|
|
|
|
FollowsCached.update_author_cache(author)
|
|
|
|
for author in batch_authors
|
|
|
|
]
|
|
|
|
)
|
2024-02-21 15:26:18 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2024-02-21 16:11:49 +00:00
|
|
|
async def update_author_cache(author: Author):
|
2024-02-21 15:26:18 +00:00
|
|
|
redis_key = f"user:{author.user}:author"
|
2024-02-21 16:11:49 +00:00
|
|
|
if isinstance(author, Author):
|
|
|
|
await redis.execute(
|
|
|
|
"set",
|
|
|
|
redis_key,
|
|
|
|
json.dumps(
|
|
|
|
{
|
|
|
|
"id": author.id,
|
|
|
|
"name": author.name,
|
|
|
|
"slug": author.slug,
|
|
|
|
"pic": author.pic,
|
|
|
|
}
|
2024-02-21 16:12:24 +00:00
|
|
|
),
|
2024-02-21 16:11:49 +00:00
|
|
|
)
|
2024-02-21 15:26:18 +00:00
|
|
|
follows = await get_author_follows(None, None, user=author.user)
|
|
|
|
if isinstance(follows, dict):
|
|
|
|
redis_key = f"user:{author.user}:follows"
|
|
|
|
await redis.execute("set", redis_key, json.dumps(follows))
|
2024-02-21 14:37:58 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
async def worker():
|
|
|
|
"""Асинхронная задача обновления"""
|
|
|
|
self = FollowsCached
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
await self.update_cache()
|
|
|
|
await asyncio.sleep(10 * 60 * 60)
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
# Handle cancellation due to SIGTERM
|
|
|
|
logger.info("Cancellation requested. Cleaning up...")
|
|
|
|
# Perform any necessary cleanup before exiting the loop
|
|
|
|
break
|
|
|
|
except Exception as exc:
|
|
|
|
logger.error(exc)
|
2024-02-21 15:26:18 +00:00
|
|
|
|
2024-02-21 15:38:15 +00:00
|
|
|
|
2024-02-21 15:26:18 +00:00
|
|
|
async def start_cached_follows():
|
|
|
|
await FollowsCached.worker()
|