add_author_stat-fix+fmt

This commit is contained in:
2024-03-06 12:25:55 +03:00
parent 70589a35da
commit 9f881c0641
15 changed files with 241 additions and 138 deletions

View File

@@ -45,8 +45,8 @@ async def check_auth(req):
gql = {
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{'
+ f'{query_name}(params: $params) {{ is_valid claims }} '
+ '}',
+ f'{query_name}(params: $params) {{ is_valid claims }} '
+ '}',
'variables': variables,
'operationName': operation,
}

View File

@@ -45,7 +45,9 @@ async def set_follows_topics_cache(follows, author_id: int, ttl=25 * 60 * 60):
async def set_follows_authors_cache(follows, author_id: int, ttl=25 * 60 * 60):
try:
payload = json.dumps(follows)
await redis.execute('SETEX', f'author:{author_id}:follows-authors', ttl, payload)
await redis.execute(
'SETEX', f'author:{author_id}:follows-authors', ttl, payload
)
except Exception:
import traceback
@@ -53,7 +55,9 @@ async def set_follows_authors_cache(follows, author_id: int, ttl=25 * 60 * 60):
logger.error(exc)
async def update_follows_for_author(follower: Author, entity_type: str, entity: dict, is_insert: bool):
async def update_follows_for_author(
follower: Author, entity_type: str, entity: dict, is_insert: bool
):
redis_key = f'author:{follower.id}:follows-{entity_type}s'
follows_str = await redis.get(redis_key)
follows = json.loads(follows_str) if follows_str else []
@@ -69,7 +73,9 @@ async def update_follows_for_author(follower: Author, entity_type: str, entity:
return follows
async def update_followers_for_author(follower: Author, author: Author, is_insert: bool):
async def update_followers_for_author(
follower: Author, author: Author, is_insert: bool
):
redis_key = f'author:{author.id}:followers'
followers_str = await redis.get(redis_key)
followers = json.loads(followers_str) if followers_str else []
@@ -107,22 +113,27 @@ def after_reaction_insert(mapper, connection, reaction: Reaction):
.where(Reaction.id == reaction.reply_to)
)
author_query = select(
author_subquery.subquery().c.id,
author_subquery.subquery().c.slug,
author_subquery.subquery().c.created_at,
author_subquery.subquery().c.name,
).select_from(author_subquery.subquery()).union(
author_query = (
select(
replied_author_subquery.subquery().c.id,
author_subquery.subquery().c.id,
author_subquery.subquery().c.slug,
author_subquery.subquery().c.created_at,
author_subquery.subquery().c.name,
)
.select_from(author_subquery.subquery())
.union(
select(replied_author_subquery.subquery().c.id).select_from(
replied_author_subquery.subquery()
)
)
.select_from(replied_author_subquery.subquery())
)
for author_with_stat in get_with_stat(author_query):
asyncio.create_task(set_author_cache(author_with_stat.dict()))
shout = connection.execute(select(Shout).select_from(Shout).where(Shout.id == reaction.shout)).first()
shout = connection.execute(
select(Shout).select_from(Shout).where(Shout.id == reaction.shout)
).first()
if shout:
after_shouts_update(mapper, connection, shout)
except Exception as exc:
@@ -176,7 +187,9 @@ async def handle_author_follower_change(
follower = get_with_stat(follower_query)
if follower and author:
_ = asyncio.create_task(set_author_cache(author.dict()))
follows_authors = await redis.execute('GET', f'author:{follower_id}:follows-authors')
follows_authors = await redis.execute(
'GET', f'author:{follower_id}:follows-authors'
)
if follows_authors:
follows_authors = json.loads(follows_authors)
if not any(x.get('id') == author.id for x in follows_authors):
@@ -209,7 +222,9 @@ async def handle_topic_follower_change(
follower = get_with_stat(follower_query)
if follower and topic:
_ = asyncio.create_task(set_author_cache(follower.dict()))
follows_topics = await redis.execute('GET', f'author:{follower_id}:follows-topics')
follows_topics = await redis.execute(
'GET', f'author:{follower_id}:follows-topics'
)
if follows_topics:
follows_topics = json.loads(follows_topics)
if not any(x.get('id') == topic.id for x in follows_topics):

View File

@@ -21,10 +21,7 @@ inspector = inspect(engine)
configure_mappers()
T = TypeVar('T')
REGISTRY: Dict[str, type] = {}
FILTERED_FIELDS = [
'_sa_instance_state',
'search_vector'
]
FILTERED_FIELDS = ['_sa_instance_state', 'search_vector']
# noinspection PyUnusedLocal
@@ -47,7 +44,9 @@ class Base(declarative_base()):
REGISTRY[cls.__name__] = cls
def dict(self) -> Dict[str, Any]:
column_names = filter(lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys())
column_names = filter(
lambda x: x not in FILTERED_FIELDS, self.__table__.columns.keys()
)
try:
data = {}
for c in column_names:
@@ -76,7 +75,9 @@ Base.metadata.create_all(bind=engine)
# Функция для вывода полного трейсбека при предупреждениях
def warning_with_traceback(message: Warning | str, category, filename: str, lineno: int, file=None, line=None):
def warning_with_traceback(
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
):
tb = traceback.format_stack()
tb_str = ''.join(tb)
return f'{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}'

View File

@@ -47,6 +47,7 @@ class MultilineColoredFormatter(colorlog.ColoredFormatter):
# If not multiline or no message, use the default formatting
return super().format(record)
# Create a MultilineColoredFormatter object for colorized logging
formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
@@ -54,6 +55,7 @@ formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
stream = logging.StreamHandler()
stream.setFormatter(formatter)
def get_colorful_logger(name='main'):
# Create and configure the logger
logger = logging.getLogger(name)
@@ -62,6 +64,7 @@ def get_colorful_logger(name='main'):
return logger
# Set up the root logger with the same formatting
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)

View File

@@ -113,7 +113,9 @@ class SearchService:
mapping = result.get('mapping')
if mapping and mapping != expected_mapping:
logger.debug(f' найдена структура индексации: {mapping}')
logger.warn(' требуется другая структура индексации, переиндексация')
logger.warn(
' требуется другая структура индексации, переиндексация'
)
await self.recreate_index()
async def recreate_index(self):
@@ -125,7 +127,9 @@ class SearchService:
if self.client:
id_ = str(shout.id)
logger.debug(f' Индексируем пост {id_}')
asyncio.create_task(self.client.index(index=self.index_name, id=id_, body=shout.dict()))
asyncio.create_task(
self.client.index(index=self.index_name, id=id_, body=shout.dict())
)
async def search(self, text, limit, offset):
logger.debug(f' Ищем: {text}')

View File

@@ -73,7 +73,9 @@ class ViewedStorage:
if now_date == self.start_date:
logger.info(' * Данные актуализованы!')
else:
logger.info(f' * Файл просмотров {VIEWS_FILEPATH} создан: {self.start_date}')
logger.info(
f' * Файл просмотров {VIEWS_FILEPATH} создан: {self.start_date}'
)
with open(VIEWS_FILEPATH, 'r') as file:
precounted_views = json.load(file)

View File

@@ -15,14 +15,18 @@ class WebhookEndpoint(HTTPEndpoint):
try:
data = await request.json()
if not data:
raise HTTPException(status_code=400, detail="Request body is empty")
raise HTTPException(status_code=400, detail='Request body is empty')
auth = request.headers.get('Authorization')
if not auth or auth != os.environ.get('WEBHOOK_SECRET'):
raise HTTPException(status_code=401, detail="Invalid Authorization header")
raise HTTPException(
status_code=401, detail='Invalid Authorization header'
)
# logger.debug(data)
user = data.get('user')
if not isinstance(user, dict):
raise HTTPException(status_code=400, detail="User data is not a dictionary")
raise HTTPException(
status_code=400, detail='User data is not a dictionary'
)
user_id: str = user.get('id')
name: str = user.get('given_name', user.get('slug'))
email: str = user.get('email', '')
@@ -32,20 +36,24 @@ class WebhookEndpoint(HTTPEndpoint):
author = session.query(Author).filter(Author.user == user_id).first()
if not author:
# If the author does not exist, create a new one
slug: str = email.split('@')[0].replace(".", "-").lower()
slug: str = email.split('@')[0].replace('.', '-').lower()
slug: str = re.sub('[^0-9a-z]+', '-', slug)
while True:
author = session.query(Author).filter(Author.slug == slug).first()
author = (
session.query(Author).filter(Author.slug == slug).first()
)
if not author:
break
slug = f"{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}"
slug = f'{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}'
author = Author(user=user_id, slug=slug, name=name, pic=pic)
session.add(author)
session.commit()
return JSONResponse({'status': 'success'})
except HTTPException as e:
return JSONResponse({'status': 'error', 'message': str(e.detail)}, status_code=e.status_code)
return JSONResponse(
{'status': 'error', 'message': str(e.detail)}, status_code=e.status_code
)
except Exception as e:
import traceback