add_author_stat-fix+fmt

This commit is contained in:
Untone 2024-03-06 12:25:55 +03:00
parent 70589a35da
commit 9f881c0641
15 changed files with 241 additions and 138 deletions

View File

@ -46,7 +46,7 @@ from resolvers.notifier import (
load_notifications,
notification_mark_seen,
notifications_seen_after,
notifications_seen_thread
notifications_seen_thread,
)
__all__ = [
@ -93,10 +93,9 @@ __all__ = [
'update_reaction',
'delete_reaction',
'load_reactions_by',
# notifier
'load_notifications',
'notifications_seen_thread',
'notifications_seen_after',
'notification_mark_seen'
'notification_mark_seen',
]

View File

@ -66,6 +66,7 @@ async def get_author(_, _info, slug='', author_id=None):
return author_dict
except Exception as exc:
import traceback
logger.error(exc)
exc = traceback.format_exc()
logger.error(exc)
@ -94,6 +95,7 @@ async def get_author_by_user_id(user_id: str):
await set_author_cache(author.dict())
except Exception as exc:
import traceback
traceback.print_exc()
logger.error(exc)
return author
@ -154,17 +156,19 @@ async def get_author_follows(_, _info, slug='', user=None, author_id=None):
logger.debug(f'getting {author_id} follows authors')
cached = await redis.execute('GET', rkey)
# logger.debug(f'AUTHOR CACHED {cached}')
authors = json.loads(cached) if cached else author_follows_authors(author_id)
authors = (
json.loads(cached) if cached else author_follows_authors(author_id)
)
if not cached:
prepared = [author.dict() for author in authors]
await redis.execute('SETEX', rkey, 24*60*60, json.dumps(prepared))
await redis.execute('SETEX', rkey, 24 * 60 * 60, json.dumps(prepared))
rkey = f'author:{author_id}:follows-topics'
cached = await redis.execute('GET', rkey)
topics = json.loads(cached) if cached else author_follows_topics(author_id)
if not cached:
prepared = [topic.dict() for topic in topics]
await redis.execute('SETEX', rkey, 24*60*60, json.dumps(prepared))
await redis.execute('SETEX', rkey, 24 * 60 * 60, json.dumps(prepared))
return {
'topics': topics,
'authors': authors,
@ -193,7 +197,7 @@ async def get_author_follows_topics(_, _info, slug='', user=None, author_id=None
topics = json.loads(cached) if cached else author_follows_topics(author_id)
if not cached:
prepared = [topic.dict() for topic in topics]
await redis.execute('SETEX', rkey, 24*60*60, json.dumps(prepared))
await redis.execute('SETEX', rkey, 24 * 60 * 60, json.dumps(prepared))
return topics
else:
raise ValueError('Author not found')
@ -213,10 +217,12 @@ async def get_author_follows_authors(_, _info, slug='', user=None, author_id=Non
logger.debug(f'getting {author_id} follows authors')
rkey = f'author:{author_id}:follows-authors'
cached = await redis.execute('GET', rkey)
authors = json.loads(cached) if cached else author_follows_authors(author_id)
authors = (
json.loads(cached) if cached else author_follows_authors(author_id)
)
if not cached:
prepared = [author.dict() for author in authors]
await redis.execute('SETEX', rkey, 24*60*60, json.dumps(prepared))
await redis.execute('SETEX', rkey, 24 * 60 * 60, json.dumps(prepared))
return authors
else:
raise ValueError('Author not found')
@ -245,7 +251,11 @@ async def get_author_followers(_, _info, slug: str):
try:
with local_session() as session:
author_alias = aliased(Author)
author_id = session.query(author_alias.id).filter(author_alias.slug == slug).scalar()
author_id = (
session.query(author_alias.id)
.filter(author_alias.slug == slug)
.scalar()
)
if author_id:
cached = await redis.execute('GET', f'author:{author_id}:followers')
results = []
@ -256,10 +266,14 @@ async def get_author_followers(_, _info, slug: str):
and_(
author_follower_alias.author == author_id,
author_follower_alias.follower == Author.id,
)
),
)
results = get_with_stat(q)
_ = asyncio.create_task(update_author_followers_cache(author_id, [x.dict() for x in results]))
_ = asyncio.create_task(
update_author_followers_cache(
author_id, [x.dict() for x in results]
)
)
logger.debug(f'@{slug} cache updated with {len(results)} followers')
return results
else:
@ -267,6 +281,7 @@ async def get_author_followers(_, _info, slug: str):
return json.loads(cached)
except Exception as exc:
import traceback
logger.error(exc)
logger.error(traceback.format_exc())
return []

View File

@ -35,11 +35,13 @@ async def get_my_shout(_, info, shout_id: int):
if not user_id:
error = 'unauthorized'
else:
if 'editor' in roles or filter(lambda x: x.id == author.id, [x for x in shout.authors]):
return {"error": error, "shout": shout}
if 'editor' in roles or filter(
lambda x: x.id == author.id, [x for x in shout.authors]
):
return {'error': error, 'shout': shout}
else:
error = 'forbidden'
return {"error": error, "shout": shout}
return {'error': error, 'shout': shout}
@query.field('get_shouts_drafts')
@ -205,7 +207,7 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
shout_id = shout_id or shout_input.get('id', shout_id)
slug = shout_input.get('slug')
if not user_id:
return {"error": "unauthorized"}
return {'error': 'unauthorized'}
try:
with local_session() as session:
author = session.query(Author).filter(Author.user == user_id).first()
@ -226,12 +228,19 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
slug += f'-{c}'
same_slug_shout = (
session.query(Shout)
.filter(Shout.slug == slug) # Use the updated slug value here
.filter(
Shout.slug == slug
) # Use the updated slug value here
.first()
)
shout_input['slug'] = slug
if filter(lambda x: x.id == author.id, [x for x in shout_by_id.authors]) or 'editor' in roles:
if (
filter(
lambda x: x.id == author.id, [x for x in shout_by_id.authors]
)
or 'editor' in roles
):
# topics patch
topics_input = shout_input.get('topics')
if topics_input:

View File

@ -16,7 +16,11 @@ from resolvers.topic import topic_unfollow
from resolvers.stat import get_with_stat, author_follows_topics, author_follows_authors
from services.auth import login_required
from services.db import local_session
from services.cache import DEFAULT_FOLLOWS, update_follows_for_author, update_followers_for_author
from services.cache import (
DEFAULT_FOLLOWS,
update_follows_for_author,
update_followers_for_author,
)
from services.notify import notify_follower
from services.schema import mutation, query
from services.logger import root_logger as logger
@ -29,22 +33,30 @@ async def follow(_, info, what, slug):
follows = None
try:
user_id = info.context['user_id']
follower_query = select(Author).select_from(Author).filter(Author.user == user_id)
follower_query = (
select(Author).select_from(Author).filter(Author.user == user_id)
)
[follower] = get_with_stat(follower_query)
if follower:
if what == 'AUTHOR':
if author_unfollow(follower.id, slug):
author_query = select(Author).select_from(Author).where(Author.slug == slug)
author_query = (
select(Author).select_from(Author).where(Author.slug == slug)
)
[author] = get_with_stat(author_query)
if author:
follows = await update_follows_for_author(follower, 'author', author, True)
follows = await update_follows_for_author(
follower, 'author', author, True
)
await update_followers_for_author(follower, author, True)
await notify_follower(follower.dict(), author.id, 'unfollow')
elif what == 'TOPIC':
topic_query = select(Topic).where(Topic.slug == slug)
[topic] = get_with_stat(topic_query)
if topic:
follows = await update_follows_for_author(follower, 'topic', topic, True)
follows = await update_follows_for_author(
follower, 'topic', topic, True
)
topic_unfollow(follower.id, slug)
elif what == 'COMMUNITY':
community_follow(follower.id, slug)
@ -72,14 +84,20 @@ async def unfollow(_, info, what, slug):
author_query = select(Author).where(Author.slug == slug)
[author] = get_with_stat(author_query)
if author:
await update_follows_for_author(follower, 'author', author, False)
follows = await update_followers_for_author(follower, author, False)
await update_follows_for_author(
follower, 'author', author, False
)
follows = await update_followers_for_author(
follower, author, False
)
await notify_follower(follower.dict(), author.id, 'unfollow')
elif what == 'TOPIC':
topic_query = select(Topic).where(Topic.slug == slug)
[topic] = get_with_stat(topic_query)
if topic:
follows = await update_follows_for_author(follower, 'topic', topic, False)
follows = await update_follows_for_author(
follower, 'topic', topic, False
)
topic_unfollow(follower.id, slug)
elif what == 'COMMUNITY':
community_unfollow(follower.id, slug)

View File

@ -22,17 +22,16 @@ from services.db import local_session
from services.logger import root_logger as logger
def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
def query_notifications(
author_id: int, after: int = 0
) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
notification_seen_alias = aliased(NotificationSeen)
q = (
select(Notification, notification_seen_alias.viewer.label("seen"))
.outerjoin(
NotificationSeen,
and_(
NotificationSeen.viewer == author_id,
NotificationSeen.notification == Notification.id,
),
)
q = select(Notification, notification_seen_alias.viewer.label('seen')).outerjoin(
NotificationSeen,
and_(
NotificationSeen.viewer == author_id,
NotificationSeen.notification == Notification.id,
),
)
if after:
q = q.filter(Notification.created_at > after)
@ -70,21 +69,25 @@ def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[
return total, unread, notifications
def group_notification(thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"):
def group_notification(
thread, authors=None, shout=None, reactions=None, entity='follower', action='follow'
):
reactions = reactions or []
authors = authors or []
return {
"thread": thread,
"authors": authors,
"updated_at": int(time.time()),
"shout": shout,
"reactions": reactions,
"entity": entity,
"action": action
'thread': thread,
'authors': authors,
'updated_at': int(time.time()),
'shout': shout,
'reactions': reactions,
'entity': entity,
'action': action,
}
def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, offset: int = 0):
def get_notifications_grouped(
author_id: int, after: int = 0, limit: int = 10, offset: int = 0
):
"""
Retrieves notifications for a given author.
@ -132,11 +135,13 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
if author and shout:
author = author.dict()
shout = shout.dict()
group = group_notification(thread_id,
shout=shout,
authors=[author],
action=notification.action,
entity=notification.entity)
group = group_notification(
thread_id,
shout=shout,
authors=[author],
action=notification.action,
entity=notification.entity,
)
groups_by_thread[thread_id] = group
groups_amount += 1
@ -162,17 +167,19 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
existing_group['reactions'].append(reaction)
groups_by_thread[thread_id] = existing_group
else:
group = group_notification(thread_id,
authors=[author],
shout=shout,
reactions=[reaction],
entity=notification.entity,
action=notification.action)
group = group_notification(
thread_id,
authors=[author],
shout=shout,
reactions=[reaction],
entity=notification.entity,
action=notification.action,
)
if group:
groups_by_thread[thread_id] = group
groups_amount += 1
elif notification.entity == "follower":
elif notification.entity == 'follower':
thread_id = 'followers'
follower = json.loads(payload)
group = groups_by_thread.get(thread_id)
@ -186,10 +193,12 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
group['authors'].remove(author)
break
else:
group = group_notification(thread_id,
authors=[follower],
entity=notification.entity,
action=notification.action)
group = group_notification(
thread_id,
authors=[follower],
entity=notification.entity,
action=notification.action,
)
groups_amount += 1
groups_by_thread[thread_id] = group
return groups_by_thread, unread, total
@ -198,7 +207,7 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
@query.field('load_notifications')
@login_required
async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
author_id = info.context.get("author_id")
author_id = info.context.get('author_id')
error = None
total = 0
unread = 0
@ -206,11 +215,18 @@ async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
try:
if author_id:
groups, unread, total = get_notifications_grouped(author_id, after, limit)
notifications = sorted(groups.values(), key=lambda group: group.updated_at, reverse=True)
notifications = sorted(
groups.values(), key=lambda group: group.updated_at, reverse=True
)
except Exception as e:
error = e
logger.error(e)
return {"notifications": notifications, "total": total, "unread": unread, "error": error}
return {
'notifications': notifications,
'total': total,
'unread': unread,
'error': error,
}
@mutation.field('notification_mark_seen')
@ -226,8 +242,8 @@ async def notification_mark_seen(_, info, notification_id: int):
except SQLAlchemyError as e:
session.rollback()
logger.error(f'seen mutation failed: {e}')
return {"error": 'cant mark as read'}
return {"error": None}
return {'error': 'cant mark as read'}
return {'error': None}
@mutation.field('notifications_seen_after')
@ -239,7 +255,11 @@ async def notifications_seen_after(_, info, after: int):
author_id = info.context.get('author_id')
if author_id:
with local_session() as session:
nnn = session.query(Notification).filter(and_(Notification.created_at > after)).all()
nnn = (
session.query(Notification)
.filter(and_(Notification.created_at > after))
.all()
)
for n in nnn:
try:
ns = NotificationSeen(notification=n.id, viewer=author_id)
@ -250,7 +270,7 @@ async def notifications_seen_after(_, info, after: int):
except Exception as e:
print(e)
error = 'cant mark as read'
return {"error": error}
return {'error': error}
@mutation.field('notifications_seen_thread')
@ -268,7 +288,7 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
Notification.action == 'create',
Notification.entity == 'reaction',
Notification.created_at > after,
)
)
.all()
)
removed_reaction_notifications = (
@ -277,7 +297,7 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
Notification.action == 'delete',
Notification.entity == 'reaction',
Notification.created_at > after,
)
)
.all()
)
exclude = set()
@ -289,9 +309,9 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
reaction = json.loads(n.payload)
reaction_id = reaction.get('id')
if (
reaction_id not in exclude
and reaction.get('shout') == shout_id
and reaction.get('reply_to') == reply_to_id
reaction_id not in exclude
and reaction.get('shout') == shout_id
and reaction.get('reply_to') == reply_to_id
):
try:
ns = NotificationSeen(notification=n.id, viewer=author_id)
@ -302,4 +322,4 @@ async def notifications_seen_thread(_, info, thread: str, after: int):
session.rollback()
else:
error = 'You are not logged in'
return {"error": error}
return {'error': error}

View File

@ -208,7 +208,9 @@ async def create_reaction(_, info, reaction):
return {'error': 'cannot create reaction without a kind'}
if kind in RATING_REACTIONS:
error_result = prepare_new_rating(reaction, shout_id, session, author)
error_result = prepare_new_rating(
reaction, shout_id, session, author
)
if error_result:
return error_result

View File

@ -68,12 +68,15 @@ async def get_shout(_, info, slug: str):
}
for author_caption in (
session.query(ShoutAuthor).join(Shout).where(
session.query(ShoutAuthor)
.join(Shout)
.where(
and_(
Shout.slug == slug,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None)
))
Shout.deleted_at.is_(None),
)
)
):
for author in shout.authors:
if author.id == author_caption.author:
@ -121,12 +124,7 @@ async def load_shouts_by(_, _info, options):
q = (
select(Shout)
.options(joinedload(Shout.authors), joinedload(Shout.topics))
.where(
and_(
Shout.deleted_at.is_(None),
Shout.published_at.is_not(None)
)
)
.where(and_(Shout.deleted_at.is_(None), Shout.published_at.is_not(None)))
)
# stats

View File

@ -31,7 +31,6 @@ def add_topic_stat_columns(q):
'followers_stat'
)
)
# TODO: topic.stat.comments
# .outerjoin(aliased_reaction)
# .add_columns(
@ -53,48 +52,58 @@ def add_topic_stat_columns(q):
def add_author_stat_columns(q):
aliased_shout_author = aliased(ShoutAuthor)
aliased_author_authors = aliased(AuthorFollower)
aliased_author_followers = aliased(AuthorFollower)
# aliased_reaction = aliased(Reaction)
aliased_authors = aliased(AuthorFollower)
aliased_followers = aliased(AuthorFollower)
q = q.outerjoin(aliased_shout_author, aliased_shout_author.author == Author.id)
q = q.add_columns(func.count(distinct(aliased_shout_author.shout)).label('shouts_stat'))
q = q.add_columns(
func.count(distinct(aliased_shout_author.shout)).label('shouts_stat')
)
q = q.outerjoin(aliased_author_authors, aliased_author_authors.follower == Author.id)
q = q.add_columns(func.count(distinct(aliased_author_authors.author)).label('authors_stat'))
q = q.outerjoin(aliased_authors, aliased_authors.follower == Author.id)
q = q.add_columns(
func.count(distinct(aliased_authors.author)).label('authors_stat')
)
q = q.outerjoin(aliased_author_followers, aliased_author_followers.author == Author.id)
q = q.add_columns(func.count(distinct(aliased_author_followers.follower)).label('followers_stat'))
q = q.outerjoin(aliased_followers, aliased_followers.author == Author.id)
q = q.add_columns(
func.count(distinct(aliased_followers.follower)).label('followers_stat')
)
# Create a subquery for comments count
subquery_comments = (
select(Reaction.created_by, func.count(Reaction.id).label('comments_stat'))
.filter(
sub_comments = (
select(Author.id, func.count(Reaction.id).label('comments_stat'))
.join(
Reaction,
and_(
Reaction.created_by == Author.id,
Reaction.kind == ReactionKind.COMMENT.value,
Reaction.deleted_at.is_(None),
)
),
)
.group_by(Reaction.created_by)
.group_by(Author.id)
.subquery()
)
q = q.outerjoin(subquery_comments, subquery_comments.c.created_by == Author.id)
q = q.add_columns(subquery_comments.c.comments_stat)
q = q.outerjoin(sub_comments, Author.id == sub_comments.c.id)
q = q.add_columns(sub_comments.c.comments_stat)
# Create a subquery for topics
subquery_topics = (select(ShoutTopic.topic, func.count(ShoutTopic.shout).label('topics_stat'))
sub_topics = (
select(
ShoutAuthor.author,
func.count(distinct(ShoutTopic.topic)).label('topics_stat'),
)
.join(Shout, ShoutTopic.shout == Shout.id)
.join(ShoutAuthor, Shout.id == ShoutAuthor.shout)
.filter(ShoutAuthor.author == Author.id)
.group_by(ShoutTopic.topic)
.group_by(ShoutAuthor.author)
.subquery()
)
q = q.outerjoin(subquery_topics, subquery_topics.c.topic == Author.id)
q = q.add_columns(subquery_topics.c.topics_stat)
q = q.outerjoin(sub_topics, Author.id == sub_topics.c.author)
q = q.add_columns(sub_topics.c.topics_stat)
q = q.group_by(Author.id, subquery_comments.c.comments_stat, subquery_topics.c.topics_stat)
q = q.group_by(Author.id, sub_comments.c.comments_stat, sub_topics.c.topics_stat)
return q

View File

@ -45,8 +45,8 @@ async def check_auth(req):
gql = {
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{'
+ f'{query_name}(params: $params) {{ is_valid claims }} '
+ '}',
+ f'{query_name}(params: $params) {{ is_valid claims }} '
+ '}',
'variables': variables,
'operationName': operation,
}

View File

@ -45,7 +45,9 @@ async def set_follows_topics_cache(follows, author_id: int, ttl=25 * 60 * 60):
async def set_follows_authors_cache(follows, author_id: int, ttl=25 * 60 * 60):
try:
payload = json.dumps(follows)
await redis.execute('SETEX', f'author:{author_id}:follows-authors', ttl, payload)
await redis.execute(
'SETEX', f'author:{author_id}:follows-authors', ttl, payload
)
except Exception:
import traceback
@ -53,7 +55,9 @@ async def set_follows_authors_cache(follows, author_id: int, ttl=25 * 60 * 60):
logger.error(exc)
async def update_follows_for_author(follower: Author, entity_type: str, entity: dict, is_insert: bool):
async def update_follows_for_author(
follower: Author, entity_type: str, entity: dict, is_insert: bool
):
redis_key = f'author:{follower.id}:follows-{entity_type}s'
follows_str = await redis.get(redis_key)
follows = json.loads(follows_str) if follows_str else []
@ -69,7 +73,9 @@ async def update_follows_for_author(follower: Author, entity_type: str, entity:
return follows
async def update_followers_for_author(follower: Author, author: Author, is_insert: bool):
async def update_followers_for_author(
follower: Author, author: Author, is_insert: bool
):
redis_key = f'author:{author.id}:followers'
followers_str = await redis.get(redis_key)
followers = json.loads(followers_str) if followers_str else []
@ -107,22 +113,27 @@ def after_reaction_insert(mapper, connection, reaction: Reaction):
.where(Reaction.id == reaction.reply_to)
)
author_query = select(
author_subquery.subquery().c.id,
author_subquery.subquery().c.slug,
author_subquery.subquery().c.created_at,
author_subquery.subquery().c.name,
).select_from(author_subquery.subquery()).union(
author_query = (
select(
replied_author_subquery.subquery().c.id,
author_subquery.subquery().c.id,
author_subquery.subquery().c.slug,
author_subquery.subquery().c.created_at,
author_subquery.subquery().c.name,
)
.select_from(author_subquery.subquery())
.union(
select(replied_author_subquery.subquery().c.id).select_from(
replied_author_subquery.subquery()
)
)
.select_from(replied_author_subquery.subquery())
)
for author_with_stat in get_with_stat(author_query):
asyncio.create_task(set_author_cache(author_with_stat.dict()))
shout = connection.execute(select(Shout).select_from(Shout).where(Shout.id == reaction.shout)).first()
shout = connection.execute(
select(Shout).select_from(Shout).where(Shout.id == reaction.shout)
).first()
if shout:
after_shouts_update(mapper, connection, shout)
except Exception as exc:
@ -176,7 +187,9 @@ async def handle_author_follower_change(
follower = get_with_stat(follower_query)
if follower and author:
_ = asyncio.create_task(set_author_cache(author.dict()))
follows_authors = await redis.execute('GET', f'author:{follower_id}:follows-authors')
follows_authors = await redis.execute(
'GET', f'author:{follower_id}:follows-authors'
)
if follows_authors:
follows_authors = json.loads(follows_authors)
if not any(x.get('id') == author.id for x in follows_authors):
@ -209,7 +222,9 @@ async def handle_topic_follower_change(
follower = get_with_stat(follower_query)
if follower and topic:
_ = asyncio.create_task(set_author_cache(follower.dict()))
follows_topics = await redis.execute('GET', f'author:{follower_id}:follows-topics')
follows_topics = await redis.execute(
'GET', f'author:{follower_id}:follows-topics'
)
if follows_topics:
follows_topics = json.loads(follows_topics)
if not any(x.get('id') == topic.id for x in follows_topics):

View File

@ -21,10 +21,7 @@ inspector = inspect(engine)
configure_mappers()
T = TypeVar('T')
REGISTRY: Dict[str, type] = {}
FILTERED_FIELDS = [
'_sa_instance_state',
'search_vector'
]
FILTERED_FIELDS = ['_sa_instance_state', 'search_vector']
# noinspection PyUnusedLocal
@ -47,7 +44,9 @@ class Base(declarative_base()):
REGISTRY[cls.__name__] = cls
def dict(self) -> Dict[str, Any]:
column_names = filter(lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys())
column_names = filter(
lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys()
)
try:
data = {}
for c in column_names:
@ -76,7 +75,9 @@ Base.metadata.create_all(bind=engine)
# Функция для вывода полного трейсбека при предупреждениях
def warning_with_traceback(message: Warning | str, category, filename: str, lineno: int, file=None, line=None):
def warning_with_traceback(
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
):
tb = traceback.format_stack()
tb_str = ''.join(tb)
return f'{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}'

View File

@ -47,6 +47,7 @@ class MultilineColoredFormatter(colorlog.ColoredFormatter):
# If not multiline or no message, use the default formatting
return super().format(record)
# Create a MultilineColoredFormatter object for colorized logging
formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
@ -54,6 +55,7 @@ formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
stream = logging.StreamHandler()
stream.setFormatter(formatter)
def get_colorful_logger(name='main'):
# Create and configure the logger
logger = logging.getLogger(name)
@ -62,6 +64,7 @@ def get_colorful_logger(name='main'):
return logger
# Set up the root logger with the same formatting
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)

View File

@ -113,7 +113,9 @@ class SearchService:
mapping = result.get('mapping')
if mapping and mapping != expected_mapping:
logger.debug(f' найдена структура индексации: {mapping}')
logger.warn(' требуется другая структура индексации, переиндексация')
logger.warn(
' требуется другая структура индексации, переиндексация'
)
await self.recreate_index()
async def recreate_index(self):
@ -125,7 +127,9 @@ class SearchService:
if self.client:
id_ = str(shout.id)
logger.debug(f' Индексируем пост {id_}')
asyncio.create_task(self.client.index(index=self.index_name, id=id_, body=shout.dict()))
asyncio.create_task(
self.client.index(index=self.index_name, id=id_, body=shout.dict())
)
async def search(self, text, limit, offset):
logger.debug(f' Ищем: {text}')

View File

@ -73,7 +73,9 @@ class ViewedStorage:
if now_date == self.start_date:
logger.info(' * Данные актуализованы!')
else:
logger.info(f' * Файл просмотров {VIEWS_FILEPATH} создан: {self.start_date}')
logger.info(
f' * Файл просмотров {VIEWS_FILEPATH} создан: {self.start_date}'
)
with open(VIEWS_FILEPATH, 'r') as file:
precounted_views = json.load(file)

View File

@ -15,14 +15,18 @@ class WebhookEndpoint(HTTPEndpoint):
try:
data = await request.json()
if not data:
raise HTTPException(status_code=400, detail="Request body is empty")
raise HTTPException(status_code=400, detail='Request body is empty')
auth = request.headers.get('Authorization')
if not auth or auth != os.environ.get('WEBHOOK_SECRET'):
raise HTTPException(status_code=401, detail="Invalid Authorization header")
raise HTTPException(
status_code=401, detail='Invalid Authorization header'
)
# logger.debug(data)
user = data.get('user')
if not isinstance(user, dict):
raise HTTPException(status_code=400, detail="User data is not a dictionary")
raise HTTPException(
status_code=400, detail='User data is not a dictionary'
)
user_id: str = user.get('id')
name: str = user.get('given_name', user.get('slug'))
email: str = user.get('email', '')
@ -32,20 +36,24 @@ class WebhookEndpoint(HTTPEndpoint):
author = session.query(Author).filter(Author.user == user_id).first()
if not author:
# If the author does not exist, create a new one
slug: str = email.split('@')[0].replace(".", "-").lower()
slug: str = email.split('@')[0].replace('.', '-').lower()
slug: str = re.sub('[^0-9a-z]+', '-', slug)
while True:
author = session.query(Author).filter(Author.slug == slug).first()
author = (
session.query(Author).filter(Author.slug == slug).first()
)
if not author:
break
slug = f"{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}"
slug = f'{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}'
author = Author(user=user_id, slug=slug, name=name, pic=pic)
session.add(author)
session.commit()
return JSONResponse({'status': 'success'})
except HTTPException as e:
return JSONResponse({'status': 'error', 'message': str(e.detail)}, status_code=e.status_code)
return JSONResponse(
{'status': 'error', 'message': str(e.detail)}, status_code=e.status_code
)
except Exception as e:
import traceback