This commit is contained in:
parent
5c40ab3d00
commit
7c86d95f5e
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -153,3 +153,6 @@ poetry.lock
|
|||
.zed
|
||||
|
||||
dokku_config
|
||||
|
||||
*.db
|
||||
*.sqlite3
|
||||
|
|
16
main.py
16
main.py
|
@ -16,6 +16,8 @@ from services.search import search_service
|
|||
from services.viewed import ViewedStorage
|
||||
from services.webhook import WebhookEndpoint
|
||||
from settings import DEV_SERVER_PID_FILE_NAME, MODE
|
||||
from services.db import engine, create_table_if_not_exists
|
||||
from orm import author, notification, shout, topic, reaction, community #, collection, invite
|
||||
|
||||
import_module("resolvers")
|
||||
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
|
||||
|
@ -30,6 +32,19 @@ async def start():
|
|||
print(f"[main] process started in {MODE} mode")
|
||||
|
||||
|
||||
def create_all_tables():
|
||||
for model in [author.Author, author.AuthorRating, author.AuthorFollower,
|
||||
notification.Notification, notification.NotificationSeen,
|
||||
shout.Shout, shout.ShoutAuthor, shout.ShoutTopic, shout.ShoutCommunity,
|
||||
topic.Topic, topic.TopicFollower,
|
||||
reaction.Reaction,
|
||||
community.Community, community.CommunityFollower,
|
||||
# collection.Collection, collection.ShoutCollection,
|
||||
# invite.Invite
|
||||
]:
|
||||
create_table_if_not_exists(engine, model)
|
||||
|
||||
|
||||
# main starlette app object with ariadne mounted in root
|
||||
app = Starlette(
|
||||
routes=[
|
||||
|
@ -37,6 +52,7 @@ app = Starlette(
|
|||
Route("/new-author", WebhookEndpoint),
|
||||
],
|
||||
on_startup=[
|
||||
create_all_tables,
|
||||
redis.connect,
|
||||
precache_data,
|
||||
ViewedStorage.init,
|
||||
|
|
|
@ -2,11 +2,10 @@ import time
|
|||
from enum import Enum as Enumeration
|
||||
|
||||
from sqlalchemy import JSON, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy.exc import ProgrammingError
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from orm.author import Author
|
||||
from services.db import Base, engine
|
||||
from services.db import Base, create_table_if_not_exists, engine
|
||||
from utils.logger import root_logger as logger
|
||||
|
||||
|
||||
|
@ -42,10 +41,5 @@ class Notification(Base):
|
|||
|
||||
seen = relationship(lambda: Author, secondary="notification_seen")
|
||||
|
||||
|
||||
try:
|
||||
Notification.__table__.create(engine)
|
||||
logger.info("Table `notification` was created.")
|
||||
except ProgrammingError:
|
||||
# Handle the exception here, for example by printing a message
|
||||
logger.info("Table `notification` already exists.")
|
||||
# Вызываем функцию создания таблицы
|
||||
create_table_if_not_exists(engine, Notification)
|
||||
|
|
|
@ -12,16 +12,22 @@ from utils.logger import root_logger as logger
|
|||
from settings import DB_URL
|
||||
|
||||
|
||||
# Подключение к базе данных SQLAlchemy
|
||||
engine = create_engine(
|
||||
DB_URL,
|
||||
echo=False,
|
||||
pool_size=10,
|
||||
max_overflow=20,
|
||||
pool_timeout=30, # Время ожидания свободного соединения
|
||||
pool_recycle=1800, # Время жизни соединения
|
||||
connect_args={"sslmode": "disable"},
|
||||
)
|
||||
if DB_URL.startswith("postgres"):
|
||||
engine = create_engine(
|
||||
DB_URL,
|
||||
echo=False,
|
||||
pool_size=10,
|
||||
max_overflow=20,
|
||||
pool_timeout=30, # Время ожидания свободного соединения
|
||||
pool_recycle=1800, # Время жизни соединения
|
||||
connect_args={"sslmode": "disable"},
|
||||
)
|
||||
else:
|
||||
engine = create_engine(
|
||||
DB_URL,
|
||||
echo=False,
|
||||
connect_args={"check_same_thread": False}
|
||||
)
|
||||
inspector = inspect(engine)
|
||||
configure_mappers()
|
||||
T = TypeVar("T")
|
||||
|
@ -29,6 +35,15 @@ REGISTRY: Dict[str, type] = {}
|
|||
FILTERED_FIELDS = ["_sa_instance_state", "search_vector"]
|
||||
|
||||
|
||||
def create_table_if_not_exists(engine, table):
|
||||
inspector = inspect(engine)
|
||||
if not inspector.has_table(table.__tablename__):
|
||||
table.__table__.create(engine)
|
||||
logger.info(f"Table '{table.__tablename__}' created.")
|
||||
else:
|
||||
logger.info(f"Table '{table.__tablename__}' already exists.")
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def local_session(src=""):
|
||||
return Session(bind=engine, expire_on_commit=False)
|
||||
|
@ -77,7 +92,7 @@ class Base(declarative_base()):
|
|||
|
||||
|
||||
# make_searchable(Base.metadata)
|
||||
Base.metadata.create_all(bind=engine)
|
||||
# Base.metadata.create_all(bind=engine)
|
||||
|
||||
|
||||
# Функция для вывода полного трейсбека при предупреждениях
|
||||
|
@ -94,21 +109,12 @@ warnings.simplefilter("always", exc.SAWarning)
|
|||
|
||||
# Функция для извлечения SQL-запроса из контекста
|
||||
def get_statement_from_context(context):
|
||||
query = None
|
||||
compiled_statement = context.compiled.string
|
||||
compiled_parameters = context.compiled.params
|
||||
if compiled_statement:
|
||||
if compiled_parameters:
|
||||
try:
|
||||
# Безопасное форматирование параметров
|
||||
query = compiled_statement % compiled_parameters
|
||||
except Exception as e:
|
||||
logger.error(f"Error formatting query: {e}")
|
||||
else:
|
||||
query = compiled_statement
|
||||
if query:
|
||||
query = query.replace("\n", " ").replace(" ", " ").replace(" ", " ").strip()
|
||||
return query
|
||||
if context is None or not hasattr(context, 'compiled'):
|
||||
return None
|
||||
compiled = context.compiled
|
||||
if compiled is None or not hasattr(compiled, 'string'):
|
||||
return None
|
||||
return compiled.string
|
||||
|
||||
|
||||
# Обработчик события перед выполнением запроса
|
||||
|
@ -121,15 +127,22 @@ def before_cursor_execute(conn, cursor, statement, parameters, context, executem
|
|||
# Обработчик события после выполнения запроса
|
||||
@event.listens_for(Engine, "after_cursor_execute")
|
||||
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
if hasattr(conn, "cursor_id") and conn.cursor_id == id(cursor):
|
||||
query = get_statement_from_context(context)
|
||||
if query:
|
||||
elapsed = time.time() - conn.query_start_time
|
||||
if elapsed > 1:
|
||||
query_end = query[-16:]
|
||||
query = query.split(query_end)[0] + query_end
|
||||
logger.debug(query)
|
||||
elapsed_n = math.floor(elapsed)
|
||||
logger.debug('*' * (elapsed_n))
|
||||
logger.debug(f"{elapsed:.3f} s")
|
||||
del conn.cursor_id # Удаление идентификатора курсора после выполнения
|
||||
query = get_statement_from_context(context)
|
||||
if query is not None:
|
||||
logger.debug(query)
|
||||
|
||||
|
||||
def create_tables_if_not_exist(engine, base):
|
||||
inspector = inspect(engine)
|
||||
for table_name, table in base.metadata.tables.items():
|
||||
try:
|
||||
if not inspector.has_table(table_name):
|
||||
table.create(engine)
|
||||
logger.info(f"Table '{table_name}' created.")
|
||||
else:
|
||||
logger.info(f"Table '{table_name}' already exists.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error while creating table '{table_name}': {str(e)}")
|
||||
|
||||
# Заменяем Base.metadata.create_all(bind=engine) на:
|
||||
create_tables_if_not_exist(engine, Base)
|
||||
|
|
|
@ -5,7 +5,7 @@ PORT = 8000
|
|||
DB_URL = (
|
||||
environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://")
|
||||
or environ.get("DB_URL", "").replace("postgres://", "postgresql://")
|
||||
or "postgresql://postgres@localhost:5432/discoursio"
|
||||
or "sqlite:///discoursio-db.sqlite3"
|
||||
)
|
||||
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
|
||||
API_BASE = environ.get("API_BASE") or ""
|
||||
|
|
|
@ -66,8 +66,3 @@ root_logger = logging.getLogger()
|
|||
if not root_logger.hasHandlers():
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.addHandler(stream)
|
||||
|
||||
ignore_logs = ["_trace", "httpx", "_client", "_trace.atrace", "aiohttp", "_client", "base"]
|
||||
for lgr in ignore_logs:
|
||||
loggr = logging.getLogger(lgr)
|
||||
loggr.setLevel(logging.INFO)
|
||||
|
|
Loading…
Reference in New Issue
Block a user