async+fmt-fix
All checks were successful
Deploy on push / deploy (push) Successful in 1m12s

This commit is contained in:
Untone 2024-10-14 12:19:30 +03:00
parent 4e7fb953ba
commit 3188a67661
5 changed files with 73 additions and 51 deletions

56
main.py
View File

@ -6,11 +6,20 @@ from os.path import exists
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
from starlette.applications import Starlette
from starlette.routing import Route
from starlette.responses import JSONResponse
from starlette.routing import Route
from cache.precache import precache_data
from cache.revalidator import revalidation_manager
from orm import ( # , collection, invite
author,
community,
notification,
reaction,
shout,
topic,
)
from services.db import create_table_if_not_exists, engine
from services.exception import ExceptionHandlerMiddleware
from services.redis import redis
from services.schema import resolvers
@ -18,8 +27,6 @@ from services.search import search_service
from services.viewed import ViewedStorage
from services.webhook import WebhookEndpoint
from settings import DEV_SERVER_PID_FILE_NAME, MODE
from services.db import engine, create_table_if_not_exists
from orm import author, notification, shout, topic, reaction, community #, collection, invite
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
@ -34,10 +41,37 @@ async def start():
print(f"[main] process started in {MODE} mode")
def create_all_tables():
for model in [
author.Author,
author.AuthorRating,
author.AuthorFollower,
notification.Notification,
notification.NotificationSeen,
shout.Shout,
shout.ShoutAuthor,
shout.ShoutTopic,
shout.ShoutCommunity,
topic.Topic,
topic.TopicFollower,
reaction.Reaction,
community.Community,
community.CommunityFollower,
# collection.Collection, collection.ShoutCollection,
# invite.Invite
]:
create_table_if_not_exists(engine, model)
async def create_all_tables_async():
# Оборачиваем синхронную функцию в асинхронную
await asyncio.to_thread(create_all_tables)
async def lifespan(app):
# Запуск всех сервисов при старте приложения
await asyncio.gather(
create_all_tables(),
create_all_tables_async(),
redis.connect(),
precache_data(),
ViewedStorage.init(),
@ -50,22 +84,10 @@ async def lifespan(app):
await redis.disconnect()
def create_all_tables():
for model in [author.Author, author.AuthorRating, author.AuthorFollower,
notification.Notification, notification.NotificationSeen,
shout.Shout, shout.ShoutAuthor, shout.ShoutTopic, shout.ShoutCommunity,
topic.Topic, topic.TopicFollower,
reaction.Reaction,
community.Community, community.CommunityFollower,
# collection.Collection, collection.ShoutCollection,
# invite.Invite
]:
create_table_if_not_exists(engine, model)
# Создаем экземпляр GraphQL
graphql_app = GraphQL(schema, debug=True)
# Оборачиваем GraphQL-обработчик для лучшей обработки ошибок
async def graphql_handler(request):
try:

View File

@ -40,6 +40,3 @@ class Notification(Base):
payload = Column(JSON, nullable=True)
seen = relationship(lambda: Author, secondary="notification_seen")
# Вызываем функцию создания таблицы
create_table_if_not_exists(engine, Notification)

View File

@ -84,15 +84,17 @@ def query_shouts(slug=None):
# Подзапросы для каждого счетчика
comments_subquery = (
select(func.count(distinct(aliased_reaction.id)).label("comments_count"))
.where(and_(
aliased_reaction.shout == Shout.id,
aliased_reaction.kind == ReactionKind.COMMENT.value,
aliased_reaction.deleted_at.is_(None)
))
.where(
and_(
aliased_reaction.shout == Shout.id,
aliased_reaction.kind == ReactionKind.COMMENT.value,
aliased_reaction.deleted_at.is_(None),
)
)
.scalar_subquery()
.correlate(Shout)
)
# followers_subquery = (
# select(func.count(distinct(ShoutReactionsFollower.follower)).label("followers_count"))
# .where(ShoutReactionsFollower.shout == Shout.id)
@ -100,22 +102,26 @@ def query_shouts(slug=None):
# )
rating_subquery = (
select(func.sum(
case(
(aliased_reaction.kind == ReactionKind.LIKE.value, 1),
(aliased_reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0
select(
func.sum(
case(
(aliased_reaction.kind == ReactionKind.LIKE.value, 1),
(aliased_reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
).label("rating")
)
.where(
and_(
aliased_reaction.shout == Shout.id,
aliased_reaction.reply_to.is_(None),
aliased_reaction.deleted_at.is_(None),
)
).label("rating"))
.where(and_(
aliased_reaction.shout == Shout.id,
aliased_reaction.reply_to.is_(None),
aliased_reaction.deleted_at.is_(None)
))
)
.scalar_subquery()
.correlate(Shout)
)
# Основной запрос с использованием подзапросов
q = (
select(

View File

@ -1,16 +1,17 @@
import json
import math
import time
import traceback
import warnings
import math
from typing import Any, Callable, Dict, TypeVar
from sqlalchemy import JSON, Column, Engine, Integer, create_engine, event, exc, inspect
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, configure_mappers
from sqlalchemy.sql.schema import Table
from utils.logger import root_logger as logger
from settings import DB_URL
from settings import DB_URL
from utils.logger import root_logger as logger
if DB_URL.startswith("postgres"):
engine = create_engine(
@ -23,11 +24,7 @@ if DB_URL.startswith("postgres"):
connect_args={"sslmode": "disable"},
)
else:
engine = create_engine(
DB_URL,
echo=False,
connect_args={"check_same_thread": False}
)
engine = create_engine(DB_URL, echo=False, connect_args={"check_same_thread": False})
inspector = inspect(engine)
configure_mappers()
@ -110,7 +107,7 @@ warnings.simplefilter("always", exc.SAWarning)
# Функция для извлечения SQL-запроса из контекста
def get_statement_from_context(context):
query = ''
query = ""
compiled = context.compiled
if compiled:
compiled_statement = compiled.string
@ -148,6 +145,6 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema
query = query.split(query_end)[0] + query_end
logger.debug(query)
elapsed_n = math.floor(elapsed)
logger.debug('*' * (elapsed_n))
logger.debug("*" * (elapsed_n))
logger.debug(f"{elapsed:.3f} s")
del conn.cursor_id # Удаление идентификатора курсора после выполнения

View File

@ -1,8 +1,9 @@
import logging
import sentry_sdk
from sentry_sdk.integrations.ariadne import AriadneIntegration
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
from sentry_sdk.integrations.starlette import StarletteIntegration
import logging
from settings import GLITCHTIP_DSN
@ -12,6 +13,7 @@ sentry_logging_handler = sentry_sdk.integrations.logging.SentryHandler(level=log
logger.addHandler(sentry_logging_handler)
logger.setLevel(logging.DEBUG) # Более подробное логирование
def start_sentry():
try:
logger.info("[services.sentry] Sentry init started...")
@ -26,5 +28,3 @@ def start_sentry():
logger.info("[services.sentry] Sentry initialized successfully.")
except Exception as e:
logger.warning("[services.sentry] Failed to initialize Sentry", exc_info=True)