refactored-author-on-login-required
All checks were successful
Deploy on push / deploy (push) Successful in 23s

This commit is contained in:
2024-04-19 18:22:07 +03:00
parent 0ca6676474
commit b7d82d9cc5
18 changed files with 316 additions and 346 deletions

View File

@@ -1,12 +1,24 @@
import json
from functools import wraps
import httpx
from starlette.exceptions import HTTPException
from services.logger import root_logger as logger
from services.rediscache import redis
from settings import ADMIN_SECRET, AUTH_URL
async def get_author_by_user(user: str):
author = None
redis_key = f"user:{user}"
result = await redis.execute("GET", redis_key)
if isinstance(result, str):
author = json.loads(result)
return author
async def request_data(gql, headers=None):
if headers is None:
headers = {"Content-Type": "application/json"}
@@ -78,32 +90,13 @@ def login_required(f):
async def decorated_function(*args, **kwargs):
info = args[1]
req = info.context.get("request")
authorized = await check_auth(req)
if authorized:
logger.info(authorized)
user_id, user_roles = authorized
if user_id and user_roles:
logger.info(f" got {user_id} roles: {user_roles}")
info.context["user_id"] = user_id.strip()
info.context["roles"] = user_roles
user_id, user_roles = await check_auth(req)
if user_id and user_roles:
logger.info(f" got {user_id} roles: {user_roles}")
info.context["user_id"] = user_id.strip()
info.context["roles"] = user_roles
author = await get_author_by_user(user_id)
info.context["author"] = author
return await f(*args, **kwargs)
return decorated_function
def auth_request(f):
@wraps(f)
async def decorated_function(*args, **kwargs):
req = args[0]
authorized = await check_auth(req)
if authorized:
user_id, user_roles = authorized
if user_id and user_roles:
logger.info(f" got {user_id} roles: {user_roles}")
req["user_id"] = user_id.strip()
req["roles"] = user_roles
return await f(*args, **kwargs)
else:
raise HTTPException(status_code=401, detail="Unauthorized")
return decorated_function

View File

@@ -1,8 +1,5 @@
import json
from orm.author import Author
from orm.topic import Topic
from services.encoders import CustomJSONEncoder
from services.rediscache import redis
@@ -67,55 +64,57 @@ async def cache_author(author: dict):
followed_author_followers.append(author)
async def cache_follows(follower: Author, entity_type: str, entity, is_insert=True):
async def cache_follows(follower: dict, entity_type: str, entity: dict, is_insert=True):
# prepare
follows = []
redis_key = f"author:{follower.id}:follows-{entity_type}s"
follows_str = await redis.execute("GET", redis_key)
if isinstance(follows_str, str):
follows = json.loads(follows_str)
if is_insert:
follows.append(entity)
else:
entity_id = entity.get("id")
if not entity_id:
raise Exception("wrong entity")
# Remove the entity from follows
follows = [e for e in follows if e["id"] != entity_id]
follower_id = follower.get("id")
if follower_id:
redis_key = f"author:{follower_id}:follows-{entity_type}s"
follows_str = await redis.execute("GET", redis_key)
if isinstance(follows_str, str):
follows = json.loads(follows_str)
if is_insert:
follows.append(entity)
else:
entity_id = entity.get("id")
if not entity_id:
raise Exception("wrong entity")
# Remove the entity from follows
follows = [e for e in follows if e["id"] != entity_id]
# update follows cache
updated_data = [t.dict() if isinstance(t, Topic) else t for t in follows]
payload = json.dumps(updated_data, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
# update follows cache
payload = json.dumps(follows, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
# update follower's stats everywhere
author_str = await redis.execute("GET", f"author:{follower.id}")
if isinstance(author_str, str):
author = json.loads(author_str)
author["stat"][f"{entity_type}s"] = len(updated_data)
await cache_author(author)
# update follower's stats everywhere
author_str = await redis.execute("GET", f"author:{follower_id}")
if isinstance(author_str, str):
author = json.loads(author_str)
author["stat"][f"{entity_type}s"] = len(follows)
await cache_author(author)
return follows
async def cache_follower(follower: Author, author: Author, is_insert=True):
redis_key = f"author:{author.id}:followers"
followers_str = await redis.execute("GET", redis_key)
async def cache_follower(follower: dict, author: dict, is_insert=True):
author_id = author.get("id")
follower_id = follower.get("id")
followers = []
if isinstance(followers_str, str):
followers = json.loads(followers_str)
if is_insert:
# Remove the entity from followers
followers = [e for e in followers if e["id"] != author.id]
else:
followers.append(follower)
updated_followers = [
f.dict() if isinstance(f, Author) else f for f in followers
]
payload = json.dumps(updated_followers, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
author_str = await redis.execute("GET", f"author:{follower.id}")
if isinstance(author_str, str):
author = json.loads(author_str)
author["stat"]["followers"] = len(updated_followers)
await cache_author(author)
if author_id and follower_id:
redis_key = f"author:{author_id}:followers"
followers_str = await redis.execute("GET", redis_key)
followers = []
if isinstance(followers_str, str):
followers = json.loads(followers_str)
if is_insert:
# Remove the entity from followers
followers = [e for e in followers if e["id"] != author_id]
else:
followers.append(follower)
payload = json.dumps(followers, cls=CustomJSONEncoder)
await redis.execute("SET", redis_key, payload)
author_str = await redis.execute("GET", f"author:{follower_id}")
if isinstance(author_str, str):
author = json.loads(author_str)
author["stat"]["followers"] = len(followers)
await cache_author(author)
return followers

View File

@@ -5,7 +5,8 @@ import traceback
import warnings
from typing import Any, Callable, Dict, TypeVar
from sqlalchemy import JSON, Column, Engine, Integer, create_engine, event, exc, inspect
from sqlalchemy import (JSON, Column, Engine, Integer, create_engine, event,
exc, inspect)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, configure_mappers
from sqlalchemy.sql.schema import Table

View File

@@ -2,8 +2,8 @@ import json
from orm.notification import Notification
from services.db import local_session
from services.rediscache import redis
from services.logger import root_logger as logger
from services.rediscache import redis
def save_notification(action: str, entity: str, payload):

View File

@@ -6,12 +6,12 @@ from sqlalchemy import event, select
from orm.author import Author, AuthorFollower
from orm.reaction import Reaction
from orm.shout import Shout, ShoutAuthor
from orm.topic import TopicFollower, Topic
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat
from services.cache import cache_author, cache_follower, cache_follows
from services.encoders import CustomJSONEncoder
from services.rediscache import redis
from services.logger import root_logger as logger
from services.cache import cache_author, cache_follows, cache_follower
from services.rediscache import redis
DEFAULT_FOLLOWS = {
"topics": [],
@@ -31,8 +31,8 @@ async def handle_author_follower_change(
if follower and author:
await cache_author(author.dict())
await cache_author(follower.dict())
await cache_follows(follower, "author", author.dict(), is_insert)
await cache_follower(follower, author, is_insert)
await cache_follows(follower.dict(), "author", author.dict(), is_insert)
await cache_follower(follower.dict(), author.dict(), is_insert)
async def handle_topic_follower_change(
@@ -48,7 +48,7 @@ async def handle_topic_follower_change(
await redis.execute(
"SET", f"topic:{topic.id}", json.dumps(topic.dict(), cls=CustomJSONEncoder)
)
await cache_follows(follower, "topic", topic.dict(), is_insert)
await cache_follows(follower.dict(), "topic", topic.dict(), is_insert)
# handle_author_follow and handle_topic_follow -> cache_author, cache_follows, cache_followers

View File

@@ -7,12 +7,8 @@ from typing import Dict
# ga
from google.analytics.data_v1beta import BetaAnalyticsDataClient
from google.analytics.data_v1beta.types import (
DateRange,
Dimension,
Metric,
RunReportRequest,
)
from google.analytics.data_v1beta.types import (DateRange, Dimension, Metric,
RunReportRequest)
from orm.author import Author
from orm.shout import Shout, ShoutAuthor, ShoutTopic