linted+fmt
All checks were successful
Deploy on push / deploy (push) Successful in 6s

This commit is contained in:
Untone 2025-05-29 12:37:39 +03:00
parent d4c16658bd
commit 4070f4fcde
49 changed files with 835 additions and 983 deletions

View File

@ -2,19 +2,19 @@ from starlette.requests import Request
from starlette.responses import JSONResponse, RedirectResponse
from starlette.routing import Route
from auth.sessions import SessionManager
from auth.internal import verify_internal_auth
from auth.orm import Author
from auth.sessions import SessionManager
from services.db import local_session
from utils.logger import root_logger as logger
from settings import (
SESSION_COOKIE_NAME,
SESSION_COOKIE_HTTPONLY,
SESSION_COOKIE_SECURE,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_MAX_AGE,
SESSION_COOKIE_NAME,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_SECURE,
SESSION_TOKEN_HEADER,
)
from utils.logger import root_logger as logger
async def logout(request: Request):
@ -74,7 +74,7 @@ async def logout(request: Request):
key=SESSION_COOKIE_NAME,
secure=SESSION_COOKIE_SECURE,
httponly=SESSION_COOKIE_HTTPONLY,
samesite=SESSION_COOKIE_SAMESITE
samesite=SESSION_COOKIE_SAMESITE,
)
logger.info("[auth] logout: Cookie успешно удалена")
@ -147,9 +147,7 @@ async def refresh_token(request: Request):
if not new_token:
logger.error(f"[auth] refresh_token: Не удалось обновить токен для пользователя {user_id}")
return JSONResponse(
{"success": False, "error": "Не удалось обновить токен"}, status_code=500
)
return JSONResponse({"success": False, "error": "Не удалось обновить токен"}, status_code=500)
# Создаем ответ
response = JSONResponse(

View File

@ -1,4 +1,4 @@
from typing import Dict, List, Optional, Set, Any
from typing import Any, Dict, List, Optional, Set
from pydantic import BaseModel, Field

View File

@ -1,19 +1,21 @@
from functools import wraps
from typing import Callable, Any, Dict, Optional
from typing import Any, Callable, Dict, Optional
from graphql import GraphQLError, GraphQLResolveInfo
from sqlalchemy import exc
from auth.credentials import AuthCredentials
from services.db import local_session
from auth.orm import Author
from auth.exceptions import OperationNotAllowed
from utils.logger import root_logger as logger
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST, SESSION_TOKEN_HEADER, SESSION_COOKIE_NAME
from auth.sessions import SessionManager
from auth.jwtcodec import JWTCodec, InvalidToken, ExpiredToken
from auth.tokenstorage import TokenStorage
from services.redis import redis
from auth.internal import authenticate
from auth.jwtcodec import ExpiredToken, InvalidToken, JWTCodec
from auth.orm import Author
from auth.sessions import SessionManager
from auth.tokenstorage import TokenStorage
from services.db import local_session
from services.redis import redis
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from settings import SESSION_COOKIE_NAME, SESSION_TOKEN_HEADER
from utils.logger import root_logger as logger
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
@ -34,10 +36,7 @@ def get_safe_headers(request: Any) -> Dict[str, str]:
if hasattr(request, "scope") and isinstance(request.scope, dict):
scope_headers = request.scope.get("headers", [])
if scope_headers:
headers.update({
k.decode("utf-8").lower(): v.decode("utf-8")
for k, v in scope_headers
})
headers.update({k.decode("utf-8").lower(): v.decode("utf-8") for k, v in scope_headers})
logger.debug(f"[decorators] Получены заголовки из request.scope: {len(headers)}")
# Второй приоритет: метод headers() или атрибут headers
@ -177,7 +176,7 @@ async def validate_graphql_context(info: Any) -> None:
# Если токен не найден, возвращаем ошибку авторизации
client_info = {
"ip": getattr(request.client, "host", "unknown") if hasattr(request, "client") else "unknown",
"headers": get_safe_headers(request)
"headers": get_safe_headers(request),
}
logger.warning(f"[decorators] Токен авторизации не найден: {client_info}")
raise GraphQLError("Unauthorized - please login")
@ -199,11 +198,7 @@ async def validate_graphql_context(info: Any) -> None:
# Создаем объект авторизации
auth_cred = AuthCredentials(
author_id=author.id,
scopes=scopes,
logged_in=True,
email=author.email,
token=auth_state.token
author_id=author.id, scopes=scopes, logged_in=True, email=author.email, token=auth_state.token
)
# Устанавливаем auth в request
@ -235,6 +230,7 @@ def admin_auth_required(resolver: Callable) -> Callable:
... async def admin_resolver(root, info, **kwargs):
... return "Admin data"
"""
@wraps(resolver)
async def wrapper(root: Any = None, info: Any = None, **kwargs):
try:
@ -264,11 +260,13 @@ def admin_auth_required(resolver: Callable) -> Callable:
return await resolver(root, info, **kwargs)
# Проверяем роли пользователя
admin_roles = ['admin', 'super']
admin_roles = ["admin", "super"]
user_roles = [role.id for role in author.roles] if author.roles else []
if any(role in admin_roles for role in user_roles):
logger.info(f"Admin access granted for {author.email} (ID: {author.id}) with role: {user_roles}")
logger.info(
f"Admin access granted for {author.email} (ID: {author.id}) with role: {user_roles}"
)
return await resolver(root, info, **kwargs)
logger.warning(f"Admin access denied for {author.email} (ID: {author.id}). Roles: {user_roles}")
@ -326,19 +324,25 @@ def permission_required(resource: str, operation: str, func):
return await func(parent, info, *args, **kwargs)
# Проверяем роли пользователя
admin_roles = ['admin', 'super']
admin_roles = ["admin", "super"]
user_roles = [role.id for role in author.roles] if author.roles else []
if any(role in admin_roles for role in user_roles):
logger.debug(f"[permission_required] Пользователь с ролью администратора {author.email} имеет все разрешения")
logger.debug(
f"[permission_required] Пользователь с ролью администратора {author.email} имеет все разрешения"
)
return await func(parent, info, *args, **kwargs)
# Проверяем разрешение
if not author.has_permission(resource, operation):
logger.warning(f"[permission_required] У пользователя {author.email} нет разрешения {operation} на {resource}")
logger.warning(
f"[permission_required] У пользователя {author.email} нет разрешения {operation} на {resource}"
)
raise OperationNotAllowed(f"No permission for {operation} on {resource}")
logger.debug(f"[permission_required] Пользователь {author.email} имеет разрешение {operation} на {resource}")
logger.debug(
f"[permission_required] Пользователь {author.email} имеет разрешение {operation} на {resource}"
)
return await func(parent, info, *args, **kwargs)
except exc.NoResultFound:
logger.error(f"[permission_required] Пользователь с ID {auth.author_id} не найден в базе данных")
@ -357,6 +361,7 @@ def login_accepted(func):
Args:
func: Декорируемая функция
"""
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
try:

View File

@ -1,9 +1,11 @@
from ariadne.asgi.handlers import GraphQLHTTPHandler
from starlette.requests import Request
from starlette.responses import Response, JSONResponse
from starlette.responses import JSONResponse, Response
from auth.middleware import auth_middleware
from utils.logger import root_logger as logger
class EnhancedGraphQLHTTPHandler(GraphQLHTTPHandler):
"""
Улучшенный GraphQL HTTP обработчик с поддержкой cookie и авторизации.

View File

@ -1,13 +1,12 @@
from binascii import hexlify
from hashlib import sha256
from typing import Any, Dict, TypeVar, TYPE_CHECKING
from typing import TYPE_CHECKING, Any, Dict, TypeVar
from passlib.hash import bcrypt
from auth.exceptions import ExpiredToken, InvalidToken, InvalidPassword
from auth.exceptions import ExpiredToken, InvalidPassword, InvalidToken
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
from services.db import local_session
# Для типизации
@ -86,9 +85,7 @@ class Identity:
# Проверим исходный пароль в orm_author
if not orm_author.password:
logger.warning(
f"[auth.identity] Пароль в исходном объекте автора пуст: email={orm_author.email}"
)
logger.warning(f"[auth.identity] Пароль в исходном объекте автора пуст: email={orm_author.email}")
raise InvalidPassword("Пароль не установлен для данного пользователя")
# Проверяем пароль напрямую, не используя dict()

View File

@ -1,22 +1,22 @@
from typing import Optional, Tuple
import time
from typing import Any
from typing import Any, Optional, Tuple
from sqlalchemy.orm import exc
from starlette.authentication import AuthenticationBackend, BaseUser, UnauthenticatedUser
from starlette.requests import HTTPConnection
from auth.credentials import AuthCredentials
from auth.exceptions import ExpiredToken, InvalidToken
from auth.jwtcodec import JWTCodec
from auth.orm import Author
from auth.sessions import SessionManager
from services.db import local_session
from settings import SESSION_TOKEN_HEADER, SESSION_COOKIE_NAME, ADMIN_EMAILS as ADMIN_EMAILS_LIST
from utils.logger import root_logger as logger
from auth.jwtcodec import JWTCodec
from auth.exceptions import ExpiredToken, InvalidToken
from auth.state import AuthState
from auth.tokenstorage import TokenStorage
from services.db import local_session
from services.redis import redis
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from settings import SESSION_COOKIE_NAME, SESSION_TOKEN_HEADER
from utils.logger import root_logger as logger
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
@ -24,12 +24,8 @@ ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
class AuthenticatedUser(BaseUser):
"""Аутентифицированный пользователь для Starlette"""
def __init__(self,
user_id: str,
username: str = "",
roles: list = None,
permissions: dict = None,
token: str = None
def __init__(
self, user_id: str, username: str = "", roles: list = None, permissions: dict = None, token: str = None
):
self.user_id = user_id
self.username = username
@ -112,9 +108,7 @@ class InternalAuthentication(AuthenticationBackend):
if author.is_locked():
logger.debug(f"[auth.authenticate] Аккаунт заблокирован: {author.id}")
return AuthCredentials(
scopes={}, error_message="Account is locked"
), UnauthenticatedUser()
return AuthCredentials(scopes={}, error_message="Account is locked"), UnauthenticatedUser()
# Получаем разрешения из ролей
scopes = author.get_permissions()
@ -128,11 +122,7 @@ class InternalAuthentication(AuthenticationBackend):
# Создаем объекты авторизации с сохранением токена
credentials = AuthCredentials(
author_id=author.id,
scopes=scopes,
logged_in=True,
email=author.email,
token=token
author_id=author.id, scopes=scopes, logged_in=True, email=author.email, token=token
)
user = AuthenticatedUser(
@ -140,7 +130,7 @@ class InternalAuthentication(AuthenticationBackend):
username=author.slug or author.email or "",
roles=roles,
permissions=scopes,
token=token
token=token,
)
logger.debug(f"[auth.authenticate] Успешная аутентификация: {author.email}")
@ -190,8 +180,10 @@ async def verify_internal_auth(token: str) -> Tuple[str, list, bool]:
logger.debug(f"[verify_internal_auth] Роли пользователя: {roles}")
# Определяем, является ли пользователь администратором
is_admin = any(role in ['admin', 'super'] for role in roles) or author.email in ADMIN_EMAILS
logger.debug(f"[verify_internal_auth] Пользователь {author.id} {'является' if is_admin else 'не является'} администратором")
is_admin = any(role in ["admin", "super"] for role in roles) or author.email in ADMIN_EMAILS
logger.debug(
f"[verify_internal_auth] Пользователь {author.id} {'является' if is_admin else 'не является'} администратором"
)
return str(author.id), roles, is_admin
except exc.NoResultFound:
@ -304,16 +296,14 @@ async def authenticate(request: Any) -> AuthState:
# Создаем объект авторизации
auth_cred = AuthCredentials(
author_id=author.id,
scopes=scopes,
logged_in=True,
email=author.email,
token=token
author_id=author.id, scopes=scopes, logged_in=True, email=author.email, token=token
)
# Устанавливаем auth в request
setattr(request, "auth", auth_cred)
logger.debug(f"[auth.authenticate] Авторизационные данные установлены в request.auth для {payload.user_id}")
logger.debug(
f"[auth.authenticate] Авторизационные данные установлены в request.auth для {payload.user_id}"
)
except Exception as e:
logger.error(f"[auth.authenticate] Ошибка при установке auth в request: {e}")

View File

@ -1,12 +1,13 @@
from datetime import datetime, timezone, timedelta
from datetime import datetime, timedelta, timezone
from typing import Optional
import jwt
from pydantic import BaseModel
from typing import Optional
from utils.logger import root_logger as logger
from auth.exceptions import ExpiredToken, InvalidToken
from settings import JWT_ALGORITHM, JWT_SECRET_KEY
from utils.logger import root_logger as logger
class TokenPayload(BaseModel):
user_id: str
@ -92,7 +93,9 @@ class JWTCodec:
try:
r = TokenPayload(**payload)
logger.debug(f"[JWTCodec.decode] Создан объект TokenPayload: user_id={r.user_id}, username={r.username}")
logger.debug(
f"[JWTCodec.decode] Создан объект TokenPayload: user_id={r.user_id}, username={r.username}"
)
return r
except Exception as e:
logger.error(f"[JWTCodec.decode] Ошибка при создании TokenPayload: {e}")

View File

@ -1,13 +1,23 @@
"""
Middleware для обработки авторизации в GraphQL запросах
"""
from typing import Any, Dict
from starlette.datastructures import Headers
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from starlette.datastructures import Headers
from starlette.types import ASGIApp, Scope, Receive, Send
from starlette.types import ASGIApp, Receive, Scope, Send
from settings import (
SESSION_COOKIE_HTTPONLY,
SESSION_COOKIE_MAX_AGE,
SESSION_COOKIE_NAME,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_SECURE,
SESSION_TOKEN_HEADER,
)
from utils.logger import root_logger as logger
from settings import SESSION_COOKIE_HTTPONLY, SESSION_COOKIE_MAX_AGE, SESSION_COOKIE_SAMESITE, SESSION_COOKIE_SECURE, SESSION_TOKEN_HEADER, SESSION_COOKIE_NAME
class AuthMiddleware:
@ -93,11 +103,7 @@ class AuthMiddleware:
scope["headers"] = new_headers
# Также добавляем информацию о типе аутентификации для дальнейшего использования
scope["auth"] = {
"type": "bearer",
"token": token,
"source": token_source
}
scope["auth"] = {"type": "bearer", "token": token, "source": token_source}
logger.debug(f"[middleware] Токен добавлен в scope для аутентификации из источника: {token_source}")
else:
logger.debug(f"[middleware] Токен не найден ни в заголовке, ни в cookie")
@ -190,10 +196,13 @@ class AuthMiddleware:
# Проверяем наличие response в контексте
if "response" not in context or not context["response"]:
from starlette.responses import JSONResponse
context["response"] = JSONResponse({})
logger.debug("[middleware] Создан новый response объект в контексте GraphQL")
logger.debug(f"[middleware] GraphQL resolve: контекст подготовлен, добавлены расширения для работы с cookie")
logger.debug(
f"[middleware] GraphQL resolve: контекст подготовлен, добавлены расширения для работы с cookie"
)
return await next(root, info, *args, **kwargs)
except Exception as e:
@ -220,7 +229,8 @@ class AuthMiddleware:
if isinstance(result, JSONResponse):
try:
import json
result_data = json.loads(result.body.decode('utf-8'))
result_data = json.loads(result.body.decode("utf-8"))
except Exception as e:
logger.error(f"[process_result] Не удалось извлечь данные из JSONResponse: {str(e)}")
else:
@ -254,7 +264,9 @@ class AuthMiddleware:
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
logger.debug(f"[graphql_handler] Установлена cookie {SESSION_COOKIE_NAME} для операции {op_name}")
logger.debug(
f"[graphql_handler] Установлена cookie {SESSION_COOKIE_NAME} для операции {op_name}"
)
# Если это операция logout, удаляем cookie
elif op_name == "logout":
@ -262,7 +274,7 @@ class AuthMiddleware:
key=SESSION_COOKIE_NAME,
secure=SESSION_COOKIE_SECURE,
httponly=SESSION_COOKIE_HTTPONLY,
samesite=SESSION_COOKIE_SAMESITE
samesite=SESSION_COOKIE_SAMESITE,
)
logger.debug(f"[graphql_handler] Удалена cookie {SESSION_COOKIE_NAME} для операции {op_name}")
except Exception as e:
@ -270,5 +282,6 @@ class AuthMiddleware:
return response
# Создаем единый экземпляр AuthMiddleware для использования с GraphQL
auth_middleware = AuthMiddleware(lambda scope, receive, send: None)

View File

@ -1,11 +1,12 @@
import time
from secrets import token_urlsafe
from authlib.integrations.starlette_client import OAuth
from authlib.oauth2.rfc7636 import create_s256_code_challenge
from starlette.responses import RedirectResponse, JSONResponse
from secrets import token_urlsafe
import time
from starlette.responses import JSONResponse, RedirectResponse
from auth.tokenstorage import TokenStorage
from auth.orm import Author
from auth.tokenstorage import TokenStorage
from services.db import local_session
from settings import FRONTEND_URL, OAUTH_CLIENTS
@ -129,9 +130,7 @@ async def oauth_callback(request):
return JSONResponse({"error": "Provider not configured"}, status_code=400)
# Получаем токен с PKCE verifier
token = await client.authorize_access_token(
request, code_verifier=request.session.get("code_verifier")
)
token = await client.authorize_access_token(request, code_verifier=request.session.get("code_verifier"))
# Получаем профиль пользователя
profile = await get_user_profile(provider, client, token)

View File

@ -1,5 +1,6 @@
import time
from typing import Dict, Set
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Index, Integer, String
from sqlalchemy.orm import relationship
@ -180,7 +181,7 @@ class Author(Base):
# )
# Список защищенных полей, которые видны только владельцу и администраторам
_protected_fields = ['email', 'password', 'provider_access_token', 'provider_refresh_token']
_protected_fields = ["email", "password", "provider_access_token", "provider_refresh_token"]
@property
def is_authenticated(self) -> bool:
@ -252,11 +253,11 @@ class Author(Base):
result = {c.name: getattr(self, c.name) for c in self.__table__.columns}
# Добавляем роли как список идентификаторов и названий
if hasattr(self, 'roles'):
result['roles'] = []
if hasattr(self, "roles"):
result["roles"] = []
for role in self.roles:
if isinstance(role, dict):
result['roles'].append(role.get('id'))
result["roles"].append(role.get("id"))
# скрываем защищенные поля
if not access:

View File

@ -9,9 +9,9 @@ from typing import List, Union
from sqlalchemy.orm import Session
from auth.orm import Author, Role, RolePermission, Permission
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
from auth.orm import Author, Permission, Role, RolePermission
from orm.community import Community, CommunityFollower, CommunityRole
from settings import ADMIN_EMAILS as ADMIN_EMAILS_LIST
ADMIN_EMAILS = ADMIN_EMAILS_LIST.split(",")
@ -110,9 +110,7 @@ class ContextualPermissionCheck:
return has_permission
@staticmethod
def get_user_community_roles(
session: Session, author_id: int, community_slug: str
) -> List[CommunityRole]:
def get_user_community_roles(session: Session, author_id: int, community_slug: str) -> List[CommunityRole]:
"""
Получает список ролей пользователя в сообществе.

View File

@ -1,9 +1,10 @@
from datetime import datetime, timedelta, timezone
from typing import Optional, Dict, Any, List
from typing import Any, Dict, List, Optional
from pydantic import BaseModel
from services.redis import redis
from auth.jwtcodec import JWTCodec, TokenPayload
from services.redis import redis
from settings import SESSION_TOKEN_LIFE_SPAN
from utils.logger import root_logger as logger
@ -181,7 +182,9 @@ class SessionManager:
"user_id": user_id,
"username": payload.username,
"created_at": datetime.now(tz=timezone.utc).isoformat(),
"expires_at": payload.exp.isoformat() if isinstance(payload.exp, datetime) else datetime.fromtimestamp(payload.exp, tz=timezone.utc).isoformat(),
"expires_at": payload.exp.isoformat()
if isinstance(payload.exp, datetime)
else datetime.fromtimestamp(payload.exp, tz=timezone.utc).isoformat(),
}
# Сохраняем сессию в Redis

View File

@ -2,6 +2,7 @@
Классы состояния авторизации
"""
class AuthState:
"""
Класс для хранения информации о состоянии авторизации пользователя.

View File

@ -1,7 +1,7 @@
from datetime import datetime, timedelta, timezone
import json
import time
from typing import Dict, Any, Optional, Tuple, List
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, List, Optional, Tuple
from auth.jwtcodec import JWTCodec
from auth.validations import AuthInput
@ -91,7 +91,7 @@ class TokenStorage:
"user_id": user_id,
"username": username,
"created_at": time.time(),
"expires_at": time.time() + 30 * 24 * 60 * 60 # 30 дней
"expires_at": time.time() + 30 * 24 * 60 * 60, # 30 дней
}
if device_info:

6
cache/precache.py vendored
View File

@ -3,8 +3,8 @@ import json
from sqlalchemy import and_, join, select
from cache.cache import cache_author, cache_topic
from auth.orm import Author, AuthorFollower
from cache.cache import cache_author, cache_topic
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower, ShoutTopic
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat
@ -29,9 +29,7 @@ async def precache_authors_followers(author_id, session):
async def precache_authors_follows(author_id, session):
follows_topics_query = select(TopicFollower.topic).where(TopicFollower.follower == author_id)
follows_authors_query = select(AuthorFollower.author).where(AuthorFollower.follower == author_id)
follows_shouts_query = select(ShoutReactionsFollower.shout).where(
ShoutReactionsFollower.follower == author_id
)
follows_shouts_query = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == author_id)
follows_topics = {row[0] for row in session.execute(follows_topics_query) if row[0]}
follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]}

2
cache/triggers.py vendored
View File

@ -1,7 +1,7 @@
from sqlalchemy import event
from cache.revalidator import revalidation_manager
from auth.orm import Author, AuthorFollower
from cache.revalidator import revalidation_manager
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower

9
dev.py
View File

@ -1,9 +1,11 @@
import os
import subprocess
from pathlib import Path
from utils.logger import root_logger as logger
from granian import Granian
from utils.logger import root_logger as logger
def check_mkcert_installed():
"""
@ -21,6 +23,7 @@ def check_mkcert_installed():
except FileNotFoundError:
return False
def generate_certificates(domain="localhost", cert_file="localhost.pem", key_file="localhost-key.pem"):
"""
Генерирует сертификаты с использованием mkcert
@ -57,7 +60,7 @@ def generate_certificates(domain="localhost", cert_file="localhost.pem", key_fil
["mkcert", "-cert-file", cert_file, "-key-file", key_file, domain],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True
text=True,
)
if result.returncode != 0:
@ -70,6 +73,7 @@ def generate_certificates(domain="localhost", cert_file="localhost.pem", key_fil
logger.error(f"Не удалось создать сертификаты: {str(e)}")
return None, None
def run_server(host="0.0.0.0", port=8000, workers=1):
"""
Запускает сервер Granian с поддержкой HTTPS при необходимости
@ -113,5 +117,6 @@ def run_server(host="0.0.0.0", port=8000, workers=1):
# В случае проблем с Granian, пробуем запустить через Uvicorn
logger.error(f"Ошибка при запуске Granian: {str(e)}")
if __name__ == "__main__":
run_server()

35
main.py
View File

@ -5,19 +5,18 @@ from os.path import exists, join
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.cors import CORSMiddleware
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from starlette.routing import Mount, Route
from starlette.staticfiles import StaticFiles
from auth.handler import EnhancedGraphQLHTTPHandler
from auth.internal import InternalAuthentication
from auth.middleware import auth_middleware, AuthMiddleware
from starlette.applications import Starlette
from starlette.middleware.cors import CORSMiddleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware import Middleware
from starlette.requests import Request
from starlette.responses import JSONResponse, Response
from starlette.routing import Route, Mount
from starlette.staticfiles import StaticFiles
from auth.middleware import AuthMiddleware, auth_middleware
from cache.precache import precache_data
from cache.revalidator import revalidation_manager
from services.exception import ExceptionHandlerMiddleware
@ -25,8 +24,8 @@ from services.redis import redis
from services.schema import create_all_tables, resolvers
from services.search import check_search_service, initialize_search_index_background, search_service
from services.viewed import ViewedStorage
from utils.logger import root_logger as logger
from settings import DEV_SERVER_PID_FILE_NAME
from utils.logger import root_logger as logger
DEVMODE = os.getenv("DOKKU_APP_TYPE", "false").lower() == "false"
DIST_DIR = join(os.path.dirname(__file__), "dist") # Директория для собранных файлов
@ -51,7 +50,7 @@ middleware = [
"https://discours.io",
"https://new.discours.io",
"https://discours.ru",
"https://new.discours.ru"
"https://new.discours.ru",
],
allow_methods=["GET", "POST", "OPTIONS"], # Явно указываем OPTIONS
allow_headers=["*"],
@ -65,11 +64,7 @@ middleware = [
# Создаем экземпляр GraphQL с улучшенным обработчиком
graphql_app = GraphQL(
schema,
debug=DEVMODE,
http_handler=EnhancedGraphQLHTTPHandler()
)
graphql_app = GraphQL(schema, debug=DEVMODE, http_handler=EnhancedGraphQLHTTPHandler())
# Оборачиваем GraphQL-обработчик для лучшей обработки ошибок
@ -111,6 +106,7 @@ async def graphql_handler(request: Request):
logger.error(f"GraphQL error: {str(e)}")
# Логируем более подробную информацию для отладки
import traceback
logger.debug(f"GraphQL error traceback: {traceback.format_exc()}")
return JSONResponse({"error": str(e)}, status_code=500)
@ -127,6 +123,7 @@ async def shutdown():
# Удаляем PID-файл, если он существует
from settings import DEV_SERVER_PID_FILE_NAME
if exists(DEV_SERVER_PID_FILE_NAME):
os.unlink(DEV_SERVER_PID_FILE_NAME)
@ -151,6 +148,7 @@ async def dev_start():
old_pid = int(f.read().strip())
# Проверяем, существует ли процесс с таким PID
import signal
try:
os.kill(old_pid, 0) # Сигнал 0 только проверяет существование процесса
print(f"[warning] DEV server already running with PID {old_pid}")
@ -213,11 +211,12 @@ async def lifespan(_app):
await asyncio.gather(*tasks, return_exceptions=True)
print("[lifespan] Shutdown complete")
# Обновляем маршрут в Starlette
app = Starlette(
routes=[
Route("/graphql", graphql_handler, methods=["GET", "POST", "OPTIONS"]),
Mount("/", app=StaticFiles(directory=DIST_DIR, html=True))
Mount("/", app=StaticFiles(directory=DIST_DIR, html=True)),
],
lifespan=lifespan,
middleware=middleware, # Явно указываем список middleware

View File

@ -66,11 +66,7 @@ class CommunityStats:
def shouts(self):
from orm.shout import Shout
return (
self.community.session.query(func.count(Shout.id))
.filter(Shout.community == self.community.id)
.scalar()
)
return self.community.session.query(func.count(Shout.id)).filter(Shout.community == self.community.id).scalar()
@property
def followers(self):

View File

@ -1,4 +1,15 @@
from cache.triggers import events_register
from resolvers.admin import (
admin_get_roles,
admin_get_users,
)
from resolvers.auth import (
confirm_email,
get_current_user,
login,
register_by_email,
send_link,
)
from resolvers.author import ( # search_authors,
get_author,
get_author_followers,
@ -16,8 +27,8 @@ from resolvers.draft import (
delete_draft,
load_drafts,
publish_draft,
update_draft,
unpublish_draft,
update_draft,
)
from resolvers.editor import (
unpublish_shout,
@ -62,19 +73,6 @@ from resolvers.topic import (
get_topics_by_community,
)
from resolvers.auth import (
get_current_user,
confirm_email,
register_by_email,
send_link,
login,
)
from resolvers.admin import (
admin_get_users,
admin_get_roles,
)
events_register()
__all__ = [
@ -84,11 +82,9 @@ __all__ = [
"register_by_email",
"send_link",
"login",
# admin
"admin_get_users",
"admin_get_roles",
# author
"get_author",
"get_author_followers",
@ -100,11 +96,9 @@ __all__ = [
"load_authors_search",
"update_author",
# "search_authors",
# community
"get_community",
"get_communities_all",
# topic
"get_topic",
"get_topics_all",
@ -112,14 +106,12 @@ __all__ = [
"get_topics_by_author",
"get_topic_followers",
"get_topic_authors",
# reader
"get_shout",
"load_shouts_by",
"load_shouts_random_top",
"load_shouts_search",
"load_shouts_unrated",
# feed
"load_shouts_feed",
"load_shouts_coauthored",
@ -127,12 +119,10 @@ __all__ = [
"load_shouts_with_topic",
"load_shouts_followed_by",
"load_shouts_authored_by",
# follower
"follow",
"unfollow",
"get_shout_followers",
# reaction
"create_reaction",
"update_reaction",
@ -142,18 +132,15 @@ __all__ = [
"load_shout_ratings",
"load_comment_ratings",
"load_comments_branch",
# notifier
"load_notifications",
"notifications_seen_thread",
"notifications_seen_after",
"notification_mark_seen",
# rating
"rate_author",
"get_my_rates_comments",
"get_my_rates_shouts",
# draft
"load_drafts",
"create_draft",

View File

@ -1,12 +1,13 @@
from math import ceil
from sqlalchemy import or_, cast, String
from graphql.error import GraphQLError
from sqlalchemy import String, cast, or_
from auth.decorators import admin_auth_required
from auth.orm import Author, AuthorRole, Role
from services.db import local_session
from services.schema import query, mutation
from auth.orm import Author, Role, AuthorRole
from services.env import EnvManager, EnvVariable
from services.schema import mutation, query
from utils.logger import root_logger as logger
@ -64,11 +65,9 @@ async def admin_get_users(_, info, limit=10, offset=0, search=None):
"email": user.email,
"name": user.name,
"slug": user.slug,
"roles": [role.id for role in user.roles]
if hasattr(user, "roles") and user.roles
else [],
"roles": [role.id for role in user.roles] if hasattr(user, "roles") and user.roles else [],
"created_at": user.created_at,
"last_seen": user.last_seen
"last_seen": user.last_seen,
}
for user in users
],
@ -81,6 +80,7 @@ async def admin_get_users(_, info, limit=10, offset=0, search=None):
return result
except Exception as e:
import traceback
logger.error(f"Ошибка при получении списка пользователей: {str(e)}")
logger.error(traceback.format_exc())
raise GraphQLError(f"Не удалось получить список пользователей: {str(e)}")
@ -154,7 +154,7 @@ async def get_env_variables(_, info):
"isSecret": var.is_secret,
}
for var in section.variables
]
],
}
for section in sections
]
@ -216,11 +216,7 @@ async def update_env_variables(_, info, variables):
# Преобразуем входные данные в формат для менеджера
env_variables = [
EnvVariable(
key=var.get("key", ""),
value=var.get("value", ""),
type=var.get("type", "string")
)
EnvVariable(key=var.get("key", ""), value=var.get("value", ""), type=var.get("type", "string"))
for var in variables
]
@ -265,10 +261,7 @@ async def admin_update_user(_, info, user):
if not author:
error_msg = f"Пользователь с ID {user_id} не найден"
logger.error(error_msg)
return {
"success": False,
"error": error_msg
}
return {"success": False, "error": error_msg}
# Получаем ID сообщества по умолчанию
default_community_id = 1 # Используем значение по умолчанию из модели AuthorRole
@ -292,41 +285,32 @@ async def admin_update_user(_, info, user):
# Создаем новые записи в таблице author_role с указанием community
for role in role_objects:
# Используем ORM для создания новых записей
author_role = AuthorRole(
community=default_community_id,
author=user_id,
role=role.id
)
author_role = AuthorRole(community=default_community_id, author=user_id, role=role.id)
session.add(author_role)
# Сохраняем изменения в базе данных
session.commit()
# Проверяем, добавлена ли пользователю роль reader
has_reader = 'reader' in [role.id for role in role_objects]
has_reader = "reader" in [role.id for role in role_objects]
if not has_reader:
logger.warning(f"Пользователю {author.email or author.id} не назначена роль 'reader'. Доступ в систему будет ограничен.")
logger.warning(
f"Пользователю {author.email or author.id} не назначена роль 'reader'. Доступ в систему будет ограничен."
)
logger.info(f"Роли пользователя {author.email or author.id} обновлены: {', '.join(found_role_ids)}")
return {
"success": True
}
return {"success": True}
except Exception as e:
# Обработка вложенных исключений
session.rollback()
error_msg = f"Ошибка при изменении ролей: {str(e)}"
logger.error(error_msg)
return {
"success": False,
"error": error_msg
}
return {"success": False, "error": error_msg}
except Exception as e:
import traceback
error_msg = f"Ошибка при обновлении ролей пользователя: {str(e)}"
logger.error(error_msg)
logger.error(traceback.format_exc())
return {
"success": False,
"error": error_msg
}
return {"success": False, "error": error_msg}

View File

@ -1,33 +1,35 @@
# -*- coding: utf-8 -*-
import time
import traceback
from utils.logger import root_logger as logger
from graphql.type import GraphQLResolveInfo
# import asyncio # Убираем, так как резолвер будет синхронным
from services.auth import login_required
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
from auth.exceptions import InvalidToken, ObjectNotExist
from auth.identity import Identity, Password
from auth.internal import verify_internal_auth
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
from auth.orm import Author, Role
from auth.sessions import SessionManager
from auth.tokenstorage import TokenStorage
# import asyncio # Убираем, так как резолвер будет синхронным
from services.auth import login_required
from services.db import local_session
from services.schema import mutation, query
from settings import (
ADMIN_EMAILS,
SESSION_TOKEN_HEADER,
SESSION_COOKIE_NAME,
SESSION_COOKIE_SECURE,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_MAX_AGE,
SESSION_COOKIE_HTTPONLY,
SESSION_COOKIE_MAX_AGE,
SESSION_COOKIE_NAME,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_SECURE,
SESSION_TOKEN_HEADER,
)
from utils.generate_slug import generate_unique_slug
from auth.sessions import SessionManager
from auth.internal import verify_internal_auth
from utils.logger import root_logger as logger
@mutation.field("getSession")
@login_required
@ -49,6 +51,7 @@ async def get_current_user(_, info):
if not author_id:
logger.error("[getSession] Пользователь не авторизован")
from graphql.error import GraphQLError
raise GraphQLError("Требуется авторизация")
# Получаем токен из заголовка
@ -62,14 +65,15 @@ async def get_current_user(_, info):
# Если автор не найден в контексте, пробуем получить из БД с добавлением статистики
if not author:
logger.debug(f"[getSession] Автор не найден в контексте для пользователя {user_id}, получаем из БД")
logger.debug(f"[getSession] Автор не найден в контексте для пользователя {author_id}, получаем из БД")
try:
# Используем функцию get_with_stat для получения автора со статистикой
from sqlalchemy import select
from resolvers.stat import get_with_stat
q = select(Author).where(Author.id == user_id)
q = select(Author).where(Author.id == author_id)
authors_with_stat = get_with_stat(q)
if authors_with_stat and len(authors_with_stat) > 0:
@ -77,26 +81,29 @@ async def get_current_user(_, info):
# Обновляем last_seen отдельной транзакцией
with local_session() as session:
author_db = session.query(Author).filter(Author.id == user_id).first()
author_db = session.query(Author).filter(Author.id == author_id).first()
if author_db:
author_db.last_seen = int(time.time())
session.commit()
else:
logger.error(f"[getSession] Автор с ID {user_id} не найден в БД")
logger.error(f"[getSession] Автор с ID {author_id} не найден в БД")
from graphql.error import GraphQLError
raise GraphQLError("Пользователь не найден")
except Exception as e:
logger.error(f"[getSession] Ошибка при получении автора из БД: {e}", exc_info=True)
from graphql.error import GraphQLError
raise GraphQLError("Ошибка при получении данных пользователя")
else:
# Если автор уже есть в контексте, добавляем статистику
try:
from sqlalchemy import select
from resolvers.stat import get_with_stat
q = select(Author).where(Author.id == user_id)
q = select(Author).where(Author.id == author_id)
authors_with_stat = get_with_stat(q)
if authors_with_stat and len(authors_with_stat) > 0:
@ -106,8 +113,8 @@ async def get_current_user(_, info):
logger.warning(f"[getSession] Не удалось добавить статистику к автору: {e}")
# Возвращаем данные сессии
logger.info(f"[getSession] Успешно получена сессия для пользователя {user_id}")
return {"token": token or '', "author": author}
logger.info(f"[getSession] Успешно получена сессия для пользователя {author_id}")
return {"token": token or "", "author": author}
@mutation.field("confirmEmail")
@ -135,7 +142,7 @@ async def confirm_email(_, info, token):
session_token = await TokenStorage.create_session(
user_id=str(user_id),
username=user.username or user.email or user.slug or username,
device_info=device_info
device_info=device_info,
)
user.email_verified = True
@ -231,9 +238,7 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
try:
# Если auth_send_link асинхронный...
await send_link(_, _info, email)
logger.info(
f"[auth] registerUser: Пользователь {email} зарегистрирован, ссылка для подтверждения отправлена."
)
logger.info(f"[auth] registerUser: Пользователь {email} зарегистрирован, ссылка для подтверждения отправлена.")
# При регистрации возвращаем данные самому пользователю, поэтому не фильтруем
return {
"success": True,
@ -365,9 +370,7 @@ async def login(_, info, email: str, password: str):
or not hasattr(valid_author, "username")
and not hasattr(valid_author, "email")
):
logger.error(
f"[auth] login: Объект автора не содержит необходимых атрибутов: {valid_author}"
)
logger.error(f"[auth] login: Объект автора не содержит необходимых атрибутов: {valid_author}")
return {
"success": False,
"token": None,
@ -380,7 +383,7 @@ async def login(_, info, email: str, password: str):
token = await TokenStorage.create_session(
user_id=str(valid_author.id),
username=valid_author.username or valid_author.email or valid_author.slug or "",
device_info={"email": valid_author.email} if hasattr(valid_author, "email") else None
device_info={"email": valid_author.email} if hasattr(valid_author, "email") else None,
)
logger.info(f"[auth] login: токен успешно создан, длина: {len(token) if token else 0}")
@ -428,6 +431,7 @@ async def login(_, info, email: str, password: str):
if not cookie_set and hasattr(info.context, "request") and not hasattr(info.context, "response"):
try:
from starlette.responses import JSONResponse
response = JSONResponse({})
response.set_cookie(
key=SESSION_COOKIE_NAME,
@ -617,12 +621,7 @@ async def refresh_token_resolver(_, info: GraphQLResolveInfo):
logger.debug(traceback.format_exc())
logger.info(f"[auth] refresh_token_resolver: Токен успешно обновлен для пользователя {user_id}")
return {
"success": True,
"token": new_token,
"author": author,
"error": None
}
return {"success": True, "token": new_token, "author": author, "error": None}
except Exception as e:
logger.error(f"[auth] refresh_token_resolver: Ошибка при обновлении токена: {e}")

View File

@ -1,9 +1,10 @@
import asyncio
import time
from typing import Optional, List, Dict, Any
from typing import Any, Dict, List, Optional
from sqlalchemy import select, text
from auth.orm import Author
from cache.cache import (
cache_author,
cached_query,
@ -13,7 +14,6 @@ from cache.cache import (
get_cached_follower_topics,
invalidate_cache_by_prefix,
)
from auth.orm import Author
from resolvers.stat import get_with_stat
from services.auth import login_required
from services.db import local_session
@ -74,9 +74,7 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
# Функция для получения авторов из БД
async def fetch_authors_with_stats():
logger.debug(
f"Выполняем запрос на получение авторов со статистикой: limit={limit}, offset={offset}, by={by}"
)
logger.debug(f"Выполняем запрос на получение авторов со статистикой: limit={limit}, offset={offset}, by={by}")
with local_session() as session:
# Базовый запрос для получения авторов
@ -93,8 +91,9 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
logger.debug(f"Processing dict-based sorting: {by}")
# Обработка словаря параметров сортировки
from sqlalchemy import asc, desc, func
from orm.shout import ShoutAuthor
from auth.orm import AuthorFollower
from orm.shout import ShoutAuthor
# Checking for order field in the dictionary
if "order" in by:
@ -135,50 +134,40 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
# If sorting by statistics, modify the query
if stats_sort_field == "shouts":
# Sorting by the number of shouts
from sqlalchemy import func, and_
from sqlalchemy import and_, func
from orm.shout import Shout, ShoutAuthor
subquery = (
select(
ShoutAuthor.author,
func.count(func.distinct(Shout.id)).label("shouts_count")
)
select(ShoutAuthor.author, func.count(func.distinct(Shout.id)).label("shouts_count"))
.select_from(ShoutAuthor)
.join(Shout, ShoutAuthor.shout == Shout.id)
.where(
and_(
Shout.deleted_at.is_(None),
Shout.published_at.is_not(None)
)
)
.where(and_(Shout.deleted_at.is_(None), Shout.published_at.is_not(None)))
.group_by(ShoutAuthor.author)
.subquery()
)
base_query = (
base_query
.outerjoin(subquery, Author.id == subquery.c.author)
.order_by(desc(func.coalesce(subquery.c.shouts_count, 0)))
base_query = base_query.outerjoin(subquery, Author.id == subquery.c.author).order_by(
desc(func.coalesce(subquery.c.shouts_count, 0))
)
elif stats_sort_field == "followers":
# Sorting by the number of followers
from sqlalchemy import func
from auth.orm import AuthorFollower
subquery = (
select(
AuthorFollower.author,
func.count(func.distinct(AuthorFollower.follower)).label("followers_count")
func.count(func.distinct(AuthorFollower.follower)).label("followers_count"),
)
.select_from(AuthorFollower)
.group_by(AuthorFollower.author)
.subquery()
)
base_query = (
base_query
.outerjoin(subquery, Author.id == subquery.c.author)
.order_by(desc(func.coalesce(subquery.c.followers_count, 0)))
base_query = base_query.outerjoin(subquery, Author.id == subquery.c.author).order_by(
desc(func.coalesce(subquery.c.followers_count, 0))
)
# Применяем лимит и смещение
@ -398,6 +387,7 @@ async def load_authors_by(_, info, by, limit, offset):
return await get_authors_with_stats(limit, offset, by, viewer_id, is_admin)
except Exception as exc:
import traceback
logger.error(f"{exc}:\n{traceback.format_exc()}")
return []
@ -522,7 +512,6 @@ async def get_author_follows_authors(_, info, slug="", user=None, author_id=None
if not author_id:
return []
# Получаем данные из кэша
followed_authors_raw = await get_cached_follower_authors(author_id)

View File

@ -72,9 +72,7 @@ def toggle_bookmark_shout(_, info, slug: str) -> CommonResult:
if existing_bookmark:
db.execute(
delete(AuthorBookmark).where(
AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id
)
delete(AuthorBookmark).where(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
)
result = False
else:

View File

@ -74,9 +74,9 @@ async def update_community(_, info, community_data):
if slug:
with local_session() as session:
try:
session.query(Community).where(
Community.created_by == author_id, Community.slug == slug
).update(community_data)
session.query(Community).where(Community.created_by == author_id, Community.slug == slug).update(
community_data
)
session.commit()
except Exception as e:
return {"ok": False, "error": str(e)}
@ -90,9 +90,7 @@ async def delete_community(_, info, slug: str):
author_id = author_dict.get("id")
with local_session() as session:
try:
session.query(Community).where(
Community.slug == slug, Community.created_by == author_id
).delete()
session.query(Community).where(Community.slug == slug, Community.created_by == author_id).delete()
session.commit()
return {"ok": True}
except Exception as e:

View File

@ -1,11 +1,12 @@
import time
from sqlalchemy.orm import joinedload
from auth.orm import Author
from cache.cache import (
invalidate_shout_related_cache,
invalidate_shouts_cache,
)
from auth.orm import Author
from orm.draft import Draft, DraftAuthor, DraftTopic
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from services.auth import login_required
@ -449,15 +450,15 @@ async def publish_draft(_, info, draft_id: int):
# Добавляем темы
for topic in draft.topics or []:
st = ShoutTopic(
topic=topic.id, shout=shout.id, main=topic.main if hasattr(topic, "main") else False
)
st = ShoutTopic(topic=topic.id, shout=shout.id, main=topic.main if hasattr(topic, "main") else False)
session.add(st)
session.commit()
# Инвалидируем кеш
cache_keys = [f"shouts:{shout.id}", ]
cache_keys = [
f"shouts:{shout.id}",
]
await invalidate_shouts_cache(cache_keys)
await invalidate_shout_related_cache(shout, author_id)
@ -500,11 +501,7 @@ async def unpublish_draft(_, info, draft_id: int):
# Загружаем черновик со связанной публикацией
draft = (
session.query(Draft)
.options(
joinedload(Draft.publication),
joinedload(Draft.authors),
joinedload(Draft.topics)
)
.options(joinedload(Draft.publication), joinedload(Draft.authors), joinedload(Draft.topics))
.filter(Draft.id == draft_id)
.first()
)
@ -533,11 +530,7 @@ async def unpublish_draft(_, info, draft_id: int):
# Формируем результат
draft_dict = draft.dict()
# Добавляем информацию о публикации
draft_dict["publication"] = {
"id": shout.id,
"slug": shout.slug,
"published_at": None
}
draft_dict["publication"] = {"id": shout.id, "slug": shout.slug, "published_at": None}
logger.info(f"Successfully unpublished shout #{shout.id} for draft #{draft_id}")

View File

@ -5,13 +5,13 @@ from sqlalchemy import and_, desc, select
from sqlalchemy.orm import joinedload, selectinload
from sqlalchemy.sql.functions import coalesce
from auth.orm import Author
from cache.cache import (
cache_author,
cache_topic,
invalidate_shout_related_cache,
invalidate_shouts_cache,
)
from auth.orm import Author
from orm.draft import Draft
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
@ -179,9 +179,7 @@ async def create_shout(_, info, inp):
lead = inp.get("lead", "")
body_text = extract_text(body)
lead_text = extract_text(lead)
seo = inp.get(
"seo", lead_text.strip() or body_text.strip()[:300].split(". ")[:-1].join(". ")
)
seo = inp.get("seo", lead_text.strip() or body_text.strip()[:300].split(". ")[:-1].join(". "))
new_shout = Shout(
slug=slug,
body=body,
@ -278,9 +276,7 @@ def patch_main_topic(session, main_topic_slug, shout):
with session.begin():
# Получаем текущий главный топик
old_main = (
session.query(ShoutTopic)
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True)))
.first()
session.query(ShoutTopic).filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True))).first()
)
if old_main:
logger.info(f"Found current main topic: {old_main.topic.slug}")
@ -314,9 +310,7 @@ def patch_main_topic(session, main_topic_slug, shout):
session.flush()
logger.info(f"Main topic updated for shout#{shout.id}")
else:
logger.warning(
f"No changes needed for main topic (old={old_main is not None}, new={new_main is not None})"
)
logger.warning(f"No changes needed for main topic (old={old_main is not None}, new={new_main is not None})")
def patch_topics(session, shout, topics_input):
@ -410,9 +404,7 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
logger.info(f"Processing update for shout#{shout_id} by author #{author_id}")
shout_by_id = (
session.query(Shout)
.options(
joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors)
)
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
.filter(Shout.id == shout_id)
.first()
)
@ -441,10 +433,7 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
shout_input["slug"] = slug
logger.info(f"shout#{shout_id} slug patched")
if (
filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors])
or "editor" in roles
):
if filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors]) or "editor" in roles:
logger.info(f"Author #{author_id} has permission to edit shout#{shout_id}")
# topics patch
@ -558,9 +547,7 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
# Получаем полные данные шаута со связями
shout_with_relations = (
session.query(Shout)
.options(
joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors)
)
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
.filter(Shout.id == shout_id)
.first()
)

View File

@ -71,9 +71,7 @@ def shouts_by_follower(info, follower_id: int, options):
q = query_with_stat(info)
reader_followed_authors = select(AuthorFollower.author).where(AuthorFollower.follower == follower_id)
reader_followed_topics = select(TopicFollower.topic).where(TopicFollower.follower == follower_id)
reader_followed_shouts = select(ShoutReactionsFollower.shout).where(
ShoutReactionsFollower.follower == follower_id
)
reader_followed_shouts = select(ShoutReactionsFollower.shout).where(ShoutReactionsFollower.follower == follower_id)
followed_subquery = (
select(Shout.id)
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
@ -142,9 +140,7 @@ async def load_shouts_authored_by(_, info, slug: str, options) -> List[Shout]:
q = (
query_with_stat(info)
if has_field(info, "stat")
else select(Shout).filter(
and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None))
)
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
q = q.filter(Shout.authors.any(id=author_id))
q, limit, offset = apply_options(q, options, author_id)
@ -173,9 +169,7 @@ async def load_shouts_with_topic(_, info, slug: str, options) -> List[Shout]:
q = (
query_with_stat(info)
if has_field(info, "stat")
else select(Shout).filter(
and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None))
)
else select(Shout).filter(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
)
q = q.filter(Shout.topics.any(id=topic_id))
q, limit, offset = apply_options(q, options)

View File

@ -4,13 +4,13 @@ from graphql import GraphQLError
from sqlalchemy import select
from sqlalchemy.sql import and_
from auth.orm import Author, AuthorFollower
from cache.cache import (
cache_author,
cache_topic,
get_cached_follower_authors,
get_cached_follower_topics,
)
from auth.orm import Author, AuthorFollower
from orm.community import Community, CommunityFollower
from orm.reaction import Reaction
from orm.shout import Shout, ShoutReactionsFollower
@ -87,9 +87,7 @@ async def follow(_, info, what, slug="", entity_id=0):
.first()
)
if existing_sub:
logger.info(
f"Пользователь {follower_id} уже подписан на {what.lower()} с ID {entity_id}"
)
logger.info(f"Пользователь {follower_id} уже подписан на {what.lower()} с ID {entity_id}")
else:
logger.debug("Добавление новой записи в базу данных")
sub = follower_class(follower=follower_id, **{entity_type: entity_id})

View File

@ -66,9 +66,7 @@ def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[
return total, unread, notifications
def group_notification(
thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"
):
def group_notification(thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"):
reactions = reactions or []
authors = authors or []
return {

View File

@ -14,11 +14,7 @@ def handle_proposing(kind: ReactionKind, reply_to: int, shout_id: int):
session.query(Reaction).filter(Reaction.id == reply_to, Reaction.shout == shout_id).first()
)
if (
replied_reaction
and replied_reaction.kind is ReactionKind.PROPOSE.value
and replied_reaction.quote
):
if replied_reaction and replied_reaction.kind is ReactionKind.PROPOSE.value and replied_reaction.quote:
# patch all the proposals' quotes
proposals = (
session.query(Reaction)

View File

@ -186,9 +186,7 @@ def count_author_shouts_rating(session, author_id) -> int:
def get_author_rating_old(session, author: Author):
likes_count = (
session.query(AuthorRating)
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True)))
.count()
session.query(AuthorRating).filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True))).count()
)
dislikes_count = (
session.query(AuthorRating)

View File

@ -334,9 +334,7 @@ async def create_reaction(_, info, reaction):
with local_session() as session:
authors = session.query(ShoutAuthor.author).filter(ShoutAuthor.shout == shout_id).scalar()
is_author = (
bool(list(filter(lambda x: x == int(author_id), authors)))
if isinstance(authors, list)
else False
bool(list(filter(lambda x: x == int(author_id), authors))) if isinstance(authors, list) else False
)
reaction_input["created_by"] = author_id
kind = reaction_input.get("kind")

View File

@ -138,9 +138,7 @@ def query_with_stat(info):
select(
ShoutTopic.shout,
json_array_builder(
json_builder(
"id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main
)
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug, "is_main", ShoutTopic.main)
).label("topics"),
)
.outerjoin(Topic, ShoutTopic.topic == Topic.id)
@ -287,9 +285,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
if hasattr(row, "main_topic"):
# logger.debug(f"Raw main_topic for shout#{shout_id}: {row.main_topic}")
main_topic = (
orjson.loads(row.main_topic)
if isinstance(row.main_topic, str)
else row.main_topic
orjson.loads(row.main_topic) if isinstance(row.main_topic, str) else row.main_topic
)
# logger.debug(f"Parsed main_topic for shout#{shout_id}: {main_topic}")
@ -325,9 +321,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
media_data = orjson.loads(media_data)
except orjson.JSONDecodeError:
media_data = []
shout_dict["media"] = (
[media_data] if isinstance(media_data, dict) else media_data
)
shout_dict["media"] = [media_data] if isinstance(media_data, dict) else media_data
shouts.append(shout_dict)
@ -415,9 +409,7 @@ def apply_sorting(q, options):
"""
order_str = options.get("order_by")
if order_str in ["rating", "comments_count", "last_commented_at"]:
query_order_by = (
desc(text(order_str)) if options.get("order_by_desc", True) else asc(text(order_str))
)
query_order_by = desc(text(order_str)) if options.get("order_by_desc", True) else asc(text(order_str))
q = q.distinct(text(order_str), Shout.id).order_by( # DISTINCT ON включает поле сортировки
nulls_last(query_order_by), Shout.id
)
@ -513,15 +505,11 @@ async def load_shouts_unrated(_, info, options):
q = select(Shout).where(and_(Shout.published_at.is_not(None), Shout.deleted_at.is_(None)))
q = q.join(Author, Author.id == Shout.created_by)
q = q.add_columns(
json_builder("id", Author.id, "name", Author.name, "slug", Author.slug, "pic", Author.pic).label(
"main_author"
)
json_builder("id", Author.id, "name", Author.name, "slug", Author.slug, "pic", Author.pic).label("main_author")
)
q = q.join(ShoutTopic, and_(ShoutTopic.shout == Shout.id, ShoutTopic.main.is_(True)))
q = q.join(Topic, Topic.id == ShoutTopic.topic)
q = q.add_columns(
json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug).label("main_topic")
)
q = q.add_columns(json_builder("id", Topic.id, "title", Topic.title, "slug", Topic.slug).label("main_topic"))
q = q.where(Shout.id.not_in(rated_shouts))
q = q.order_by(func.random())

View File

@ -3,8 +3,8 @@ import asyncio
from sqlalchemy import and_, distinct, func, join, select
from sqlalchemy.orm import aliased
from cache.cache import cache_author
from auth.orm import Author, AuthorFollower
from cache.cache import cache_author
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
@ -177,9 +177,7 @@ def get_topic_comments_stat(topic_id: int) -> int:
.subquery()
)
# Запрос для суммирования количества комментариев по теме
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(
ShoutTopic.topic == topic_id
)
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(ShoutTopic.topic == topic_id)
q = q.outerjoin(sub_comments, ShoutTopic.shout == sub_comments.c.shout_id)
with local_session() as session:
result = session.execute(q).first()
@ -239,9 +237,7 @@ def get_author_followers_stat(author_id: int) -> int:
:return: Количество уникальных подписчиков автора.
"""
aliased_followers = aliased(AuthorFollower)
q = select(func.count(distinct(aliased_followers.follower))).filter(
aliased_followers.author == author_id
)
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.author == author_id)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
@ -293,9 +289,7 @@ def get_with_stat(q):
stat["shouts"] = cols[1] # Статистика по публикациям
stat["followers"] = cols[2] # Статистика по подписчикам
if is_author:
stat["authors"] = get_author_authors_stat(
entity.id
) # Статистика по подпискам на авторов
stat["authors"] = get_author_authors_stat(entity.id) # Статистика по подпискам на авторов
stat["comments"] = get_author_comments_stat(entity.id) # Статистика по комментариям
else:
stat["authors"] = get_topic_authors_stat(entity.id) # Статистика по авторам темы

View File

@ -1,5 +1,6 @@
from sqlalchemy import desc, select, text
from auth.orm import Author
from cache.cache import (
cache_topic,
cached_query,
@ -8,9 +9,8 @@ from cache.cache import (
get_cached_topic_followers,
invalidate_cache_by_prefix,
)
from auth.orm import Author
from orm.topic import Topic
from orm.reaction import ReactionKind
from orm.topic import Topic
from resolvers.stat import get_with_stat
from services.auth import login_required
from services.db import local_session

View File

@ -1,16 +1,16 @@
from functools import wraps
from typing import Tuple
from sqlalchemy import exc
from starlette.requests import Request
from auth.internal import verify_internal_auth
from auth.orm import Author, Role
from cache.cache import get_cached_author_by_id
from resolvers.stat import get_with_stat
from utils.logger import root_logger as logger
from auth.internal import verify_internal_auth
from sqlalchemy import exc
from services.db import local_session
from auth.orm import Author, Role
from settings import SESSION_TOKEN_HEADER
from utils.logger import root_logger as logger
# Список разрешенных заголовков
ALLOWED_HEADERS = ["Authorization", "Content-Type"]
@ -57,7 +57,9 @@ async def check_auth(req: Request) -> Tuple[str, list[str], bool]:
# Проверяем авторизацию внутренним механизмом
logger.debug("[check_auth] Вызов verify_internal_auth...")
user_id, user_roles, is_admin = await verify_internal_auth(token)
logger.debug(f"[check_auth] Результат verify_internal_auth: user_id={user_id}, roles={user_roles}, is_admin={is_admin}")
logger.debug(
f"[check_auth] Результат verify_internal_auth: user_id={user_id}, roles={user_roles}, is_admin={is_admin}"
)
# Если в ролях нет админа, но есть ID - проверяем в БД
if user_id and not is_admin:
@ -71,16 +73,19 @@ async def check_auth(req: Request) -> Tuple[str, list[str], bool]:
else:
# Проверяем наличие админских прав через БД
from auth.orm import AuthorRole
admin_role = session.query(AuthorRole).filter(
AuthorRole.author == user_id_int,
AuthorRole.role.in_(["admin", "super"])
).first()
admin_role = (
session.query(AuthorRole)
.filter(AuthorRole.author == user_id_int, AuthorRole.role.in_(["admin", "super"]))
.first()
)
is_admin = admin_role is not None
except Exception as e:
logger.error(f"Ошибка при проверке прав администратора: {e}")
return user_id, user_roles, is_admin
async def add_user_role(user_id: str, roles: list[str] = None):
"""
Добавление ролей пользователю в локальной БД.
@ -142,7 +147,7 @@ def login_required(f):
raise GraphQLError("Требуется авторизация")
# Проверяем наличие роли reader
if 'reader' not in user_roles:
if "reader" not in user_roles:
logger.error(f"Пользователь {user_id} не имеет роли 'reader'")
raise GraphQLError("У вас нет необходимых прав для доступа")

View File

@ -200,9 +200,7 @@ class Base(declarative_base()):
data[column_name] = value
else:
# Пропускаем атрибут, если его нет в объекте (может быть добавлен после миграции)
logger.debug(
f"Skipping missing attribute '{column_name}' for {self.__class__.__name__}"
)
logger.debug(f"Skipping missing attribute '{column_name}' for {self.__class__.__name__}")
except AttributeError as e:
logger.warning(f"Attribute error for column '{column_name}': {e}")
# Добавляем синтетическое поле .stat если оно существует
@ -223,9 +221,7 @@ class Base(declarative_base()):
# Функция для вывода полного трейсбека при предупреждениях
def warning_with_traceback(
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
):
def warning_with_traceback(message: Warning | str, category, filename: str, lineno: int, file=None, line=None):
tb = traceback.format_stack()
tb_str = "".join(tb)
return f"{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}"
@ -302,6 +298,7 @@ json_builder, json_array_builder, json_cast = get_json_builder()
# Fetch all shouts, with authors preloaded
# This function is used for search indexing
async def fetch_all_shouts(session=None):
"""Fetch all published shouts for search indexing with authors preloaded"""
from orm.shout import Shout
@ -313,11 +310,10 @@ async def fetch_all_shouts(session=None):
try:
# Fetch only published and non-deleted shouts with authors preloaded
query = session.query(Shout).options(
joinedload(Shout.authors)
).filter(
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None)
query = (
session.query(Shout)
.options(joinedload(Shout.authors))
.filter(Shout.published_at.is_not(None), Shout.deleted_at.is_(None))
)
shouts = query.all()
return shouts

View File

@ -1,9 +1,11 @@
from typing import Dict, List, Optional, Set
from dataclasses import dataclass
import os
import re
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional, Set
from redis import Redis
from settings import REDIS_URL, ROOT_DIR
from utils.logger import root_logger as logger
@ -31,12 +33,37 @@ class EnvManager:
# Стандартные переменные окружения, которые следует исключить
EXCLUDED_ENV_VARS: Set[str] = {
"PATH", "SHELL", "USER", "HOME", "PWD", "TERM", "LANG",
"PYTHONPATH", "_", "TMPDIR", "TERM_PROGRAM", "TERM_SESSION_ID",
"XPC_SERVICE_NAME", "XPC_FLAGS", "SHLVL", "SECURITYSESSIONID",
"LOGNAME", "OLDPWD", "ZSH", "PAGER", "LESS", "LC_CTYPE", "LSCOLORS",
"SSH_AUTH_SOCK", "DISPLAY", "COLORTERM", "EDITOR", "VISUAL",
"PYTHONDONTWRITEBYTECODE", "VIRTUAL_ENV", "PYTHONUNBUFFERED"
"PATH",
"SHELL",
"USER",
"HOME",
"PWD",
"TERM",
"LANG",
"PYTHONPATH",
"_",
"TMPDIR",
"TERM_PROGRAM",
"TERM_SESSION_ID",
"XPC_SERVICE_NAME",
"XPC_FLAGS",
"SHLVL",
"SECURITYSESSIONID",
"LOGNAME",
"OLDPWD",
"ZSH",
"PAGER",
"LESS",
"LC_CTYPE",
"LSCOLORS",
"SSH_AUTH_SOCK",
"DISPLAY",
"COLORTERM",
"EDITOR",
"VISUAL",
"PYTHONDONTWRITEBYTECODE",
"VIRTUAL_ENV",
"PYTHONUNBUFFERED",
}
# Секции для группировки переменных
@ -44,57 +71,67 @@ class EnvManager:
"AUTH": {
"pattern": r"^(JWT|AUTH|SESSION|OAUTH|GITHUB|GOOGLE|FACEBOOK)_",
"name": "Авторизация",
"description": "Настройки системы авторизации"
"description": "Настройки системы авторизации",
},
"DATABASE": {
"pattern": r"^(DB|DATABASE|POSTGRES|MYSQL|SQL)_",
"name": "База данных",
"description": "Настройки подключения к базам данных"
"description": "Настройки подключения к базам данных",
},
"CACHE": {
"pattern": r"^(REDIS|CACHE|MEMCACHED)_",
"name": "Кэширование",
"description": "Настройки систем кэширования"
"description": "Настройки систем кэширования",
},
"SEARCH": {
"pattern": r"^(ELASTIC|SEARCH|OPENSEARCH)_",
"name": "Поиск",
"description": "Настройки поисковых систем"
"description": "Настройки поисковых систем",
},
"APP": {
"pattern": r"^(APP|PORT|HOST|DEBUG|DOMAIN|ENVIRONMENT|ENV|FRONTEND)_",
"name": "Общие настройки",
"description": "Общие настройки приложения"
"description": "Общие настройки приложения",
},
"LOGGING": {
"pattern": r"^(LOG|LOGGING|SENTRY|GLITCH|GLITCHTIP)_",
"name": "Мониторинг",
"description": "Настройки логирования и мониторинга"
"description": "Настройки логирования и мониторинга",
},
"EMAIL": {
"pattern": r"^(MAIL|EMAIL|SMTP|IMAP|POP3|POST)_",
"name": "Электронная почта",
"description": "Настройки отправки электронной почты"
"description": "Настройки отправки электронной почты",
},
"ANALYTICS": {
"pattern": r"^(GA|GOOGLE_ANALYTICS|ANALYTICS)_",
"name": "Аналитика",
"description": "Настройки систем аналитики"
"description": "Настройки систем аналитики",
},
}
# Переменные, которые следует всегда помечать как секретные
SECRET_VARS_PATTERNS = [
r".*TOKEN.*", r".*SECRET.*", r".*PASSWORD.*", r".*KEY.*",
r".*PWD.*", r".*PASS.*", r".*CRED.*", r".*_DSN.*",
r".*JWT.*", r".*SESSION.*", r".*OAUTH.*",
r".*GITHUB.*", r".*GOOGLE.*", r".*FACEBOOK.*"
r".*TOKEN.*",
r".*SECRET.*",
r".*PASSWORD.*",
r".*KEY.*",
r".*PWD.*",
r".*PASS.*",
r".*CRED.*",
r".*_DSN.*",
r".*JWT.*",
r".*SESSION.*",
r".*OAUTH.*",
r".*GITHUB.*",
r".*GOOGLE.*",
r".*FACEBOOK.*",
]
def __init__(self):
self.redis = Redis.from_url(REDIS_URL)
self.prefix = "env:"
self.env_file_path = os.path.join(ROOT_DIR, '.env')
self.env_file_path = os.path.join(ROOT_DIR, ".env")
def get_all_variables(self) -> List[EnvSection]:
"""
@ -142,15 +179,15 @@ class EnvManager:
env_vars = {}
if os.path.exists(self.env_file_path):
try:
with open(self.env_file_path, 'r') as f:
with open(self.env_file_path, "r") as f:
for line in f:
line = line.strip()
# Пропускаем пустые строки и комментарии
if not line or line.startswith('#'):
if not line or line.startswith("#"):
continue
# Разделяем строку на ключ и значение
if '=' in line:
key, value = line.split('=', 1)
if "=" in line:
key, value = line.split("=", 1)
key = key.strip()
value = value.strip()
# Удаляем кавычки, если они есть
@ -207,17 +244,17 @@ class EnvManager:
"""
Определяет тип переменной на основе ее значения
"""
if value.lower() in ('true', 'false'):
if value.lower() in ("true", "false"):
return "boolean"
if value.isdigit():
return "integer"
if re.match(r"^\d+\.\d+$", value):
return "float"
# Проверяем на JSON объект или массив
if (value.startswith('{') and value.endswith('}')) or (value.startswith('[') and value.endswith(']')):
if (value.startswith("{") and value.endswith("}")) or (value.startswith("[") and value.endswith("]")):
return "json"
# Проверяем на URL
if value.startswith(('http://', 'https://', 'redis://', 'postgresql://')):
if value.startswith(("http://", "https://", "redis://", "postgresql://")):
return "url"
return "string"
@ -234,12 +271,7 @@ class EnvManager:
is_secret = self._is_secret_variable(key)
var_type = self._determine_variable_type(value)
var = EnvVariable(
key=key,
value=value,
type=var_type,
is_secret=is_secret
)
var = EnvVariable(key=key, value=value, type=var_type, is_secret=is_secret)
# Определяем секцию для переменной
placed = False
@ -260,9 +292,7 @@ class EnvManager:
section_config = self.SECTIONS[section_id]
result.append(
EnvSection(
name=section_config["name"],
description=section_config["description"],
variables=variables
name=section_config["name"], description=section_config["description"], variables=variables
)
)
@ -272,7 +302,7 @@ class EnvManager:
EnvSection(
name="Прочие переменные",
description="Переменные, не вошедшие в основные категории",
variables=other_variables
variables=other_variables,
)
)
@ -305,7 +335,7 @@ class EnvManager:
try:
# Если файл .env не существует, создаем его
if not os.path.exists(self.env_file_path):
with open(self.env_file_path, 'w') as f:
with open(self.env_file_path, "w") as f:
f.write(f"{key}={value}\n")
return True
@ -313,12 +343,12 @@ class EnvManager:
lines = []
found = False
with open(self.env_file_path, 'r') as f:
with open(self.env_file_path, "r") as f:
for line in f:
if line.strip() and not line.strip().startswith('#'):
if line.strip() and not line.strip().startswith("#"):
if line.strip().startswith(f"{key}="):
# Экранируем значение, если необходимо
if ' ' in value or ',' in value or '"' in value or "'" in value:
if " " in value or "," in value or '"' in value or "'" in value:
escaped_value = f'"{value}"'
else:
escaped_value = value
@ -332,14 +362,14 @@ class EnvManager:
# Если переменной не было в файле, добавляем ее
if not found:
# Экранируем значение, если необходимо
if ' ' in value or ',' in value or '"' in value or "'" in value:
if " " in value or "," in value or '"' in value or "'" in value:
escaped_value = f'"{value}"'
else:
escaped_value = value
lines.append(f"{key}={escaped_value}\n")
# Записываем обновленный файл
with open(self.env_file_path, 'w') as f:
with open(self.env_file_path, "w") as f:
f.writelines(lines)
return True

View File

@ -93,9 +93,7 @@ async def notify_draft(draft_data, action: str = "publish"):
# Если переданы связанные атрибуты, добавим их
if hasattr(draft_data, "topics") and draft_data.topics is not None:
draft_payload["topics"] = [
{"id": t.id, "name": t.name, "slug": t.slug} for t in draft_data.topics
]
draft_payload["topics"] = [{"id": t.id, "name": t.name, "slug": t.slug} for t in draft_data.topics]
if hasattr(draft_data, "authors") and draft_data.authors is not None:
draft_payload["authors"] = [

View File

@ -242,8 +242,6 @@ class RedisService:
return await self._client.keys(pattern)
redis = RedisService()
__all__ = ["redis"]

View File

@ -12,7 +12,7 @@ resolvers = [query, mutation, type_draft]
def create_all_tables():
"""Create all database tables in the correct order."""
from auth.orm import Author, AuthorFollower, AuthorBookmark, AuthorRating
from auth.orm import Author, AuthorBookmark, AuthorFollower, AuthorRating
from orm import community, draft, notification, reaction, shout, topic
# Порядок важен - сначала таблицы без внешних ключей, затем зависимые таблицы

View File

@ -2,9 +2,11 @@ import asyncio
import json
import logging
import os
import httpx
import time
import random
import time
import httpx
from settings import TXTAI_SERVICE_URL
# Set up proper logging
@ -15,23 +17,15 @@ logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("httpcore").setLevel(logging.WARNING)
# Configuration for search service
SEARCH_ENABLED = bool(
os.environ.get("SEARCH_ENABLED", "true").lower() in ["true", "1", "yes"]
)
SEARCH_ENABLED = bool(os.environ.get("SEARCH_ENABLED", "true").lower() in ["true", "1", "yes"])
MAX_BATCH_SIZE = int(os.environ.get("SEARCH_MAX_BATCH_SIZE", "25"))
# Search cache configuration
SEARCH_CACHE_ENABLED = bool(
os.environ.get("SEARCH_CACHE_ENABLED", "true").lower() in ["true", "1", "yes"]
)
SEARCH_CACHE_TTL_SECONDS = int(
os.environ.get("SEARCH_CACHE_TTL_SECONDS", "300")
) # Default: 15 minutes
SEARCH_CACHE_ENABLED = bool(os.environ.get("SEARCH_CACHE_ENABLED", "true").lower() in ["true", "1", "yes"])
SEARCH_CACHE_TTL_SECONDS = int(os.environ.get("SEARCH_CACHE_TTL_SECONDS", "300")) # Default: 15 minutes
SEARCH_PREFETCH_SIZE = int(os.environ.get("SEARCH_PREFETCH_SIZE", "200"))
SEARCH_USE_REDIS = bool(
os.environ.get("SEARCH_USE_REDIS", "true").lower() in ["true", "1", "yes"]
)
SEARCH_USE_REDIS = bool(os.environ.get("SEARCH_USE_REDIS", "true").lower() in ["true", "1", "yes"])
search_offset = 0
@ -68,9 +62,7 @@ class SearchCache:
serialized_results,
ex=self.ttl,
)
logger.info(
f"Stored {len(results)} search results for query '{query}' in Redis"
)
logger.info(f"Stored {len(results)} search results for query '{query}' in Redis")
return True
except Exception as e:
logger.error(f"Error storing search results in Redis: {e}")
@ -83,9 +75,7 @@ class SearchCache:
# Store results and update timestamp
self.cache[normalized_query] = results
self.last_accessed[normalized_query] = time.time()
logger.info(
f"Cached {len(results)} search results for query '{query}' in memory"
)
logger.info(f"Cached {len(results)} search results for query '{query}' in memory")
return True
async def get(self, query, limit=10, offset=0):
@ -117,14 +107,10 @@ class SearchCache:
# Return paginated subset
end_idx = min(offset + limit, len(all_results))
if offset >= len(all_results):
logger.warning(
f"Requested offset {offset} exceeds result count {len(all_results)}"
)
logger.warning(f"Requested offset {offset} exceeds result count {len(all_results)}")
return []
logger.info(
f"Cache hit for '{query}': serving {offset}:{end_idx} of {len(all_results)} results"
)
logger.info(f"Cache hit for '{query}': serving {offset}:{end_idx} of {len(all_results)} results")
return all_results[offset:end_idx]
async def has_query(self, query):
@ -174,11 +160,7 @@ class SearchCache:
"""Remove oldest entries if memory cache is full"""
now = time.time()
# First remove expired entries
expired_keys = [
key
for key, last_access in self.last_accessed.items()
if now - last_access > self.ttl
]
expired_keys = [key for key, last_access in self.last_accessed.items() if now - last_access > self.ttl]
for key in expired_keys:
if key in self.cache:
@ -217,9 +199,7 @@ class SearchService:
if SEARCH_CACHE_ENABLED:
cache_location = "Redis" if SEARCH_USE_REDIS else "Memory"
logger.info(
f"Search caching enabled using {cache_location} cache with TTL={SEARCH_CACHE_TTL_SECONDS}s"
)
logger.info(f"Search caching enabled using {cache_location} cache with TTL={SEARCH_CACHE_TTL_SECONDS}s")
async def info(self):
"""Return information about search service"""
@ -270,9 +250,7 @@ class SearchService:
logger.info(
f"Document verification complete: {bodies_missing_count} bodies missing, {titles_missing_count} titles missing"
)
logger.info(
f"Total unique missing documents: {total_missing_count} out of {len(doc_ids)} total"
)
logger.info(f"Total unique missing documents: {total_missing_count} out of {len(doc_ids)} total")
# Return in a backwards-compatible format plus the detailed breakdown
return {
@ -308,9 +286,7 @@ class SearchService:
# 1. Index title if available
if hasattr(shout, "title") and shout.title and isinstance(shout.title, str):
title_doc = {"id": str(shout.id), "title": shout.title.strip()}
indexing_tasks.append(
self.index_client.post("/index-title", json=title_doc)
)
indexing_tasks.append(self.index_client.post("/index-title", json=title_doc))
# 2. Index body content (subtitle, lead, body)
body_text_parts = []
@ -346,9 +322,7 @@ class SearchService:
body_text = body_text[:MAX_TEXT_LENGTH]
body_doc = {"id": str(shout.id), "body": body_text}
indexing_tasks.append(
self.index_client.post("/index-body", json=body_doc)
)
indexing_tasks.append(self.index_client.post("/index-body", json=body_doc))
# 3. Index authors
authors = getattr(shout, "authors", [])
@ -373,30 +347,22 @@ class SearchService:
if name:
author_doc = {"id": author_id, "name": name, "bio": combined_bio}
indexing_tasks.append(
self.index_client.post("/index-author", json=author_doc)
)
indexing_tasks.append(self.index_client.post("/index-author", json=author_doc))
# Run all indexing tasks in parallel
if indexing_tasks:
responses = await asyncio.gather(
*indexing_tasks, return_exceptions=True
)
responses = await asyncio.gather(*indexing_tasks, return_exceptions=True)
# Check for errors in responses
for i, response in enumerate(responses):
if isinstance(response, Exception):
logger.error(f"Error in indexing task {i}: {response}")
elif (
hasattr(response, "status_code") and response.status_code >= 400
):
elif hasattr(response, "status_code") and response.status_code >= 400:
logger.error(
f"Error response in indexing task {i}: {response.status_code}, {await response.text()}"
)
logger.info(
f"Document {shout.id} indexed across {len(indexing_tasks)} endpoints"
)
logger.info(f"Document {shout.id} indexed across {len(indexing_tasks)} endpoints")
else:
logger.warning(f"No content to index for shout {shout.id}")
@ -424,24 +390,14 @@ class SearchService:
for shout in shouts:
try:
# 1. Process title documents
if (
hasattr(shout, "title")
and shout.title
and isinstance(shout.title, str)
):
title_docs.append(
{"id": str(shout.id), "title": shout.title.strip()}
)
if hasattr(shout, "title") and shout.title and isinstance(shout.title, str):
title_docs.append({"id": str(shout.id), "title": shout.title.strip()})
# 2. Process body documents (subtitle, lead, body)
body_text_parts = []
for field_name in ["subtitle", "lead", "body"]:
field_value = getattr(shout, field_name, None)
if (
field_value
and isinstance(field_value, str)
and field_value.strip()
):
if field_value and isinstance(field_value, str) and field_value.strip():
body_text_parts.append(field_value.strip())
# Process media content if available
@ -507,9 +463,7 @@ class SearchService:
}
except Exception as e:
logger.error(
f"Error processing shout {getattr(shout, 'id', 'unknown')} for indexing: {e}"
)
logger.error(f"Error processing shout {getattr(shout, 'id', 'unknown')} for indexing: {e}")
total_skipped += 1
# Convert author dict to list
@ -543,9 +497,7 @@ class SearchService:
logger.info(f"Indexing {len(documents)} {doc_type} documents")
# Categorize documents by size
small_docs, medium_docs, large_docs = self._categorize_by_size(
documents, doc_type
)
small_docs, medium_docs, large_docs = self._categorize_by_size(documents, doc_type)
# Process each category with appropriate batch sizes
batch_sizes = {
@ -561,9 +513,7 @@ class SearchService:
]:
if docs:
batch_size = batch_sizes[category]
await self._process_batches(
docs, batch_size, endpoint, f"{doc_type}-{category}"
)
await self._process_batches(docs, batch_size, endpoint, f"{doc_type}-{category}")
def _categorize_by_size(self, documents, doc_type):
"""Categorize documents by size for optimized batch processing"""
@ -607,9 +557,7 @@ class SearchService:
while not success and retry_count < max_retries:
try:
response = await self.index_client.post(
endpoint, json=batch, timeout=90.0
)
response = await self.index_client.post(endpoint, json=batch, timeout=90.0)
if response.status_code == 422:
error_detail = response.json()
@ -649,9 +597,7 @@ class SearchService:
def _truncate_error_detail(self, error_detail):
"""Truncate error details for logging"""
truncated_detail = (
error_detail.copy() if isinstance(error_detail, dict) else error_detail
)
truncated_detail = error_detail.copy() if isinstance(error_detail, dict) else error_detail
if (
isinstance(truncated_detail, dict)
@ -660,30 +606,22 @@ class SearchService:
):
for i, item in enumerate(truncated_detail["detail"]):
if isinstance(item, dict) and "input" in item:
if isinstance(item["input"], dict) and any(
k in item["input"] for k in ["documents", "text"]
):
if "documents" in item["input"] and isinstance(
item["input"]["documents"], list
):
if isinstance(item["input"], dict) and any(k in item["input"] for k in ["documents", "text"]):
if "documents" in item["input"] and isinstance(item["input"]["documents"], list):
for j, doc in enumerate(item["input"]["documents"]):
if (
"text" in doc
and isinstance(doc["text"], str)
and len(doc["text"]) > 100
):
item["input"]["documents"][j][
"text"
] = f"{doc['text'][:100]}... [truncated, total {len(doc['text'])} chars]"
if "text" in doc and isinstance(doc["text"], str) and len(doc["text"]) > 100:
item["input"]["documents"][j]["text"] = (
f"{doc['text'][:100]}... [truncated, total {len(doc['text'])} chars]"
)
if (
"text" in item["input"]
and isinstance(item["input"]["text"], str)
and len(item["input"]["text"]) > 100
):
item["input"][
"text"
] = f"{item['input']['text'][:100]}... [truncated, total {len(item['input']['text'])} chars]"
item["input"]["text"] = (
f"{item['input']['text'][:100]}... [truncated, total {len(item['input']['text'])} chars]"
)
return truncated_detail
@ -767,9 +705,7 @@ class SearchService:
logger.info(
f"Searching authors for: '{text}' (limit={limit}, offset={offset}, search_limit={search_limit})"
)
response = await self.client.post(
"/search-author", json={"text": text, "limit": search_limit}
)
response = await self.client.post("/search-author", json={"text": text, "limit": search_limit})
response.raise_for_status()
result = response.json()
@ -802,9 +738,7 @@ class SearchService:
result = response.json()
if result.get("consistency", {}).get("status") != "ok":
null_count = result.get("consistency", {}).get(
"null_embeddings_count", 0
)
null_count = result.get("consistency", {}).get("null_embeddings_count", 0)
if null_count > 0:
logger.warning(f"Found {null_count} documents with NULL embeddings")
@ -877,14 +811,10 @@ async def initialize_search_index(shouts_data):
index_status = await search_service.check_index_status()
if index_status.get("status") == "inconsistent":
problem_ids = index_status.get("consistency", {}).get(
"null_embeddings_sample", []
)
problem_ids = index_status.get("consistency", {}).get("null_embeddings_sample", [])
if problem_ids:
problem_docs = [
shout for shout in shouts_data if str(shout.id) in problem_ids
]
problem_docs = [shout for shout in shouts_data if str(shout.id) in problem_ids]
if problem_docs:
await search_service.bulk_index(problem_docs)
@ -902,9 +832,7 @@ async def initialize_search_index(shouts_data):
if isinstance(media, str):
try:
media_json = json.loads(media)
if isinstance(media_json, dict) and (
media_json.get("title") or media_json.get("body")
):
if isinstance(media_json, dict) and (media_json.get("title") or media_json.get("body")):
return True
except Exception:
return True
@ -922,13 +850,9 @@ async def initialize_search_index(shouts_data):
if verification.get("status") == "error":
return
# Only reindex missing docs that actually have body content
missing_ids = [
mid for mid in verification.get("missing", []) if mid in body_ids
]
missing_ids = [mid for mid in verification.get("missing", []) if mid in body_ids]
if missing_ids:
missing_docs = [
shout for shout in shouts_with_body if str(shout.id) in missing_ids
]
missing_docs = [shout for shout in shouts_with_body if str(shout.id) in missing_ids]
await search_service.bulk_index(missing_docs)
else:
pass

View File

@ -83,7 +83,7 @@ class ViewedStorage:
# Декодируем байтовые строки, если есть
if keys and isinstance(keys[0], bytes):
keys = [k.decode('utf-8') for k in keys]
keys = [k.decode("utf-8") for k in keys]
logger.info(f" * Decoded keys: {keys}")
if not keys:
@ -243,20 +243,12 @@ class ViewedStorage:
# Обновление тем и авторов с использованием вспомогательной функции
for [_st, topic] in (
session.query(ShoutTopic, Topic)
.join(Topic)
.join(Shout)
.where(Shout.slug == shout_slug)
.all()
session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(Shout.slug == shout_slug).all()
):
update_groups(self.shouts_by_topic, topic.slug, shout_slug)
for [_st, author] in (
session.query(ShoutAuthor, Author)
.join(Author)
.join(Shout)
.where(Shout.slug == shout_slug)
.all()
session.query(ShoutAuthor, Author).join(Author).join(Shout).where(Shout.slug == shout_slug).all()
):
update_groups(self.shouts_by_author, author.slug, shout_slug)
@ -289,9 +281,7 @@ class ViewedStorage:
if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat())
logger.info(
" ⎩ next update: %s" % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
)
logger.info(" ⎩ next update: %s" % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0]))
await asyncio.sleep(self.period)
else:
await asyncio.sleep(10)

View File

@ -1,6 +1,7 @@
import pytest
from typing import Dict
import pytest
@pytest.fixture
def oauth_settings() -> Dict[str, Dict[str, str]]:

View File

@ -1,8 +1,9 @@
import pytest
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from starlette.responses import JSONResponse, RedirectResponse
from auth.oauth import get_user_profile, oauth_login, oauth_callback
from auth.oauth import get_user_profile, oauth_callback, oauth_login
# Подменяем настройки для тестов
with (

View File

@ -1,5 +1,7 @@
import asyncio
import pytest
from services.redis import redis
from tests.test_config import get_test_client