Improve topic sorting: add popular sorting by publications and authors count

This commit is contained in:
2025-06-02 02:56:11 +03:00
parent baca19a4d5
commit 3327976586
113 changed files with 7238 additions and 3739 deletions

View File

@@ -31,13 +31,17 @@ from resolvers.draft import (
update_draft,
)
from resolvers.editor import (
# delete_shout,
unpublish_shout,
# update_shout,
)
from resolvers.feed import (
load_shouts_authored_by,
load_shouts_coauthored,
load_shouts_discussed,
load_shouts_feed,
load_shouts_followed_by,
load_shouts_with_topic,
)
from resolvers.follower import follow, get_shout_followers, unfollow
from resolvers.notifier import (
@@ -76,77 +80,79 @@ from resolvers.topic import (
events_register()
__all__ = [
# auth
"get_current_user",
"confirm_email",
"register_by_email",
"send_link",
"login",
"admin_get_roles",
# admin
"admin_get_users",
"admin_get_roles",
"confirm_email",
"create_draft",
# reaction
"create_reaction",
"delete_draft",
"delete_reaction",
# "delete_shout",
# "update_shout",
# follower
"follow",
# author
"get_author",
"get_author_followers",
"get_author_follows",
"get_author_follows_topics",
"get_author_follows_authors",
"get_author_follows_topics",
"get_authors_all",
"load_authors_by",
"load_authors_search",
"update_author",
"get_communities_all",
# "search_authors",
# community
"get_community",
"get_communities_all",
# topic
"get_topic",
"get_topics_all",
"get_topics_by_community",
"get_topics_by_author",
"get_topic_followers",
"get_topic_authors",
# auth
"get_current_user",
"get_my_rates_comments",
"get_my_rates_shouts",
# reader
"get_shout",
"load_shouts_by",
"load_shouts_random_top",
"load_shouts_search",
"load_shouts_unrated",
# feed
"load_shouts_feed",
"load_shouts_coauthored",
"load_shouts_discussed",
"load_shouts_with_topic",
"load_shouts_followed_by",
"load_shouts_authored_by",
# follower
"follow",
"unfollow",
"get_shout_followers",
# reaction
"create_reaction",
"update_reaction",
"delete_reaction",
# topic
"get_topic",
"get_topic_authors",
"get_topic_followers",
"get_topics_all",
"get_topics_by_author",
"get_topics_by_community",
"load_authors_by",
"load_authors_search",
"load_comment_ratings",
"load_comments_branch",
# draft
"load_drafts",
# notifier
"load_notifications",
"load_reactions_by",
"load_shout_comments",
"load_shout_ratings",
"load_comment_ratings",
"load_comments_branch",
# notifier
"load_notifications",
"notifications_seen_thread",
"notifications_seen_after",
"load_shouts_authored_by",
"load_shouts_by",
"load_shouts_coauthored",
"load_shouts_discussed",
# feed
"load_shouts_feed",
"load_shouts_followed_by",
"load_shouts_random_top",
"load_shouts_search",
"load_shouts_unrated",
"load_shouts_with_topic",
"login",
"notification_mark_seen",
"notifications_seen_after",
"notifications_seen_thread",
"publish_draft",
# rating
"rate_author",
"get_my_rates_comments",
"get_my_rates_shouts",
# draft
"load_drafts",
"create_draft",
"update_draft",
"delete_draft",
"publish_draft",
"unpublish_shout",
"register_by_email",
"send_link",
"unfollow",
"unpublish_draft",
"unpublish_shout",
"update_author",
"update_draft",
"update_reaction",
]

View File

@@ -1,7 +1,10 @@
from math import ceil
from typing import Any
from graphql import GraphQLResolveInfo
from graphql.error import GraphQLError
from sqlalchemy import String, cast, or_
from sqlalchemy.orm import joinedload
from auth.decorators import admin_auth_required
from auth.orm import Author, AuthorRole, Role
@@ -13,7 +16,9 @@ from utils.logger import root_logger as logger
@query.field("adminGetUsers")
@admin_auth_required
async def admin_get_users(_, info, limit=10, offset=0, search=None):
async def admin_get_users(
_: None, _info: GraphQLResolveInfo, limit: int = 10, offset: int = 0, search: str = ""
) -> dict[str, Any]:
"""
Получает список пользователей для админ-панели с поддержкой пагинации и поиска
@@ -58,7 +63,7 @@ async def admin_get_users(_, info, limit=10, offset=0, search=None):
users = query.order_by(Author.id).offset(offset).limit(limit).all()
# Преобразуем в формат для API
result = {
return {
"users": [
{
"id": user.id,
@@ -77,34 +82,34 @@ async def admin_get_users(_, info, limit=10, offset=0, search=None):
"totalPages": total_pages,
}
return result
except Exception as e:
import traceback
logger.error(f"Ошибка при получении списка пользователей: {str(e)}")
logger.error(f"Ошибка при получении списка пользователей: {e!s}")
logger.error(traceback.format_exc())
raise GraphQLError(f"Не удалось получить список пользователей: {str(e)}")
msg = f"Не удалось получить список пользователей: {e!s}"
raise GraphQLError(msg)
@query.field("adminGetRoles")
@admin_auth_required
async def admin_get_roles(_, info):
async def admin_get_roles(_: None, info: GraphQLResolveInfo) -> dict[str, Any]:
"""
Получает список всех ролей для админ-панели
Получает список всех ролей в системе
Args:
info: Контекст GraphQL запроса
Returns:
Список ролей с их описаниями
Список ролей
"""
try:
with local_session() as session:
# Получаем все роли из базы данных
roles = session.query(Role).all()
# Загружаем роли с их разрешениями
roles = session.query(Role).options(joinedload(Role.permissions)).all()
# Преобразуем их в формат для API
result = [
roles_list = [
{
"id": role.id,
"name": role.name,
@@ -115,15 +120,17 @@ async def admin_get_roles(_, info):
for role in roles
]
return result
return {"roles": roles_list}
except Exception as e:
logger.error(f"Ошибка при получении списка ролей: {str(e)}")
raise GraphQLError(f"Не удалось получить список ролей: {str(e)}")
logger.error(f"Ошибка при получении списка ролей: {e!s}")
msg = f"Не удалось получить список ролей: {e!s}"
raise GraphQLError(msg)
@query.field("getEnvVariables")
@admin_auth_required
async def get_env_variables(_, info):
async def get_env_variables(_: None, info: GraphQLResolveInfo) -> dict[str, Any]:
"""
Получает список переменных окружения, сгруппированных по секциям
@@ -138,10 +145,10 @@ async def get_env_variables(_, info):
env_manager = EnvManager()
# Получаем все переменные
sections = env_manager.get_all_variables()
sections = await env_manager.get_all_variables()
# Преобразуем к формату GraphQL API
result = [
sections_list = [
{
"name": section.name,
"description": section.description,
@@ -159,15 +166,17 @@ async def get_env_variables(_, info):
for section in sections
]
return result
return {"sections": sections_list}
except Exception as e:
logger.error(f"Ошибка при получении переменных окружения: {str(e)}")
raise GraphQLError(f"Не удалось получить переменные окружения: {str(e)}")
logger.error(f"Ошибка при получении переменных окружения: {e!s}")
msg = f"Не удалось получить переменные окружения: {e!s}"
raise GraphQLError(msg)
@mutation.field("updateEnvVariable")
@admin_auth_required
async def update_env_variable(_, info, key, value):
async def update_env_variable(_: None, _info: GraphQLResolveInfo, key: str, value: str) -> dict[str, Any]:
"""
Обновляет значение переменной окружения
@@ -184,22 +193,22 @@ async def update_env_variable(_, info, key, value):
env_manager = EnvManager()
# Обновляем переменную
result = env_manager.update_variable(key, value)
result = env_manager.update_variables([EnvVariable(key=key, value=value)])
if result:
logger.info(f"Переменная окружения '{key}' успешно обновлена")
else:
logger.error(f"Не удалось обновить переменную окружения '{key}'")
return result
return {"success": result}
except Exception as e:
logger.error(f"Ошибка при обновлении переменной окружения: {str(e)}")
return False
logger.error(f"Ошибка при обновлении переменной окружения: {e!s}")
return {"success": False, "error": str(e)}
@mutation.field("updateEnvVariables")
@admin_auth_required
async def update_env_variables(_, info, variables):
async def update_env_variables(_: None, info: GraphQLResolveInfo, variables: list[dict[str, Any]]) -> dict[str, Any]:
"""
Массовое обновление переменных окружения
@@ -226,17 +235,17 @@ async def update_env_variables(_, info, variables):
if result:
logger.info(f"Переменные окружения успешно обновлены ({len(variables)} шт.)")
else:
logger.error(f"Не удалось обновить переменные окружения")
logger.error("Не удалось обновить переменные окружения")
return result
return {"success": result}
except Exception as e:
logger.error(f"Ошибка при массовом обновлении переменных окружения: {str(e)}")
return False
logger.error(f"Ошибка при массовом обновлении переменных окружения: {e!s}")
return {"success": False, "error": str(e)}
@mutation.field("adminUpdateUser")
@admin_auth_required
async def admin_update_user(_, info, user):
async def admin_update_user(_: None, info: GraphQLResolveInfo, user: dict[str, Any]) -> dict[str, Any]:
"""
Обновляет роли пользователя
@@ -275,7 +284,7 @@ async def admin_update_user(_, info, user):
role_objects = session.query(Role).filter(Role.id.in_(roles)).all()
# Проверяем, все ли запрошенные роли найдены
found_role_ids = [role.id for role in role_objects]
found_role_ids = [str(role.id) for role in role_objects]
missing_roles = set(roles) - set(found_role_ids)
if missing_roles:
@@ -292,7 +301,7 @@ async def admin_update_user(_, info, user):
session.commit()
# Проверяем, добавлена ли пользователю роль reader
has_reader = "reader" in [role.id for role in role_objects]
has_reader = "reader" in [str(role.id) for role in role_objects]
if not has_reader:
logger.warning(
f"Пользователю {author.email or author.id} не назначена роль 'reader'. Доступ в систему будет ограничен."
@@ -304,13 +313,13 @@ async def admin_update_user(_, info, user):
except Exception as e:
# Обработка вложенных исключений
session.rollback()
error_msg = f"Ошибка при изменении ролей: {str(e)}"
error_msg = f"Ошибка при изменении ролей: {e!s}"
logger.error(error_msg)
return {"success": False, "error": error_msg}
except Exception as e:
import traceback
error_msg = f"Ошибка при обновлении ролей пользователя: {str(e)}"
error_msg = f"Ошибка при обновлении ролей пользователя: {e!s}"
logger.error(error_msg)
logger.error(traceback.format_exc())
return {"success": False, "error": error_msg}

View File

@@ -1,14 +1,14 @@
# -*- coding: utf-8 -*-
import json
import secrets
import time
import traceback
from typing import Any
from graphql.type import GraphQLResolveInfo
from graphql import GraphQLResolveInfo
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
from auth.exceptions import InvalidToken, ObjectNotExist
from auth.identity import Identity, Password
from auth.internal import verify_internal_auth
from auth.jwtcodec import JWTCodec
from auth.orm import Author, Role
from auth.sessions import SessionManager
@@ -17,6 +17,7 @@ from auth.tokenstorage import TokenStorage
# import asyncio # Убираем, так как резолвер будет синхронным
from services.auth import login_required
from services.db import local_session
from services.redis import redis
from services.schema import mutation, query
from settings import (
ADMIN_EMAILS,
@@ -25,7 +26,6 @@ from settings import (
SESSION_COOKIE_NAME,
SESSION_COOKIE_SAMESITE,
SESSION_COOKIE_SECURE,
SESSION_TOKEN_HEADER,
)
from utils.generate_slug import generate_unique_slug
from utils.logger import root_logger as logger
@@ -33,7 +33,7 @@ from utils.logger import root_logger as logger
@mutation.field("getSession")
@login_required
async def get_current_user(_, info):
async def get_current_user(_: None, info: GraphQLResolveInfo) -> dict[str, Any]:
"""
Получает информацию о текущем пользователе.
@@ -44,89 +44,45 @@ async def get_current_user(_, info):
info: Контекст GraphQL запроса
Returns:
dict: Объект с токеном и данными автора с добавленной статистикой
Dict[str, Any]: Информация о пользователе или сообщение об ошибке
"""
# Получаем данные авторизации из контекста запроса
author_id = info.context.get("author", {}).get("id")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
logger.error("[getSession] Пользователь не авторизован")
from graphql.error import GraphQLError
return {"error": "User not found"}
raise GraphQLError("Требуется авторизация")
try:
# Используем кешированные данные если возможно
if "name" in author_dict and "slug" in author_dict:
return {"author": author_dict}
# Получаем токен из заголовка
req = info.context.get("request")
token = req.headers.get(SESSION_TOKEN_HEADER)
if token and token.startswith("Bearer "):
token = token.split("Bearer ")[-1].strip()
# Получаем данные автора
author = info.context.get("author")
# Если автор не найден в контексте, пробуем получить из БД с добавлением статистики
if not author:
logger.debug(f"[getSession] Автор не найден в контексте для пользователя {author_id}, получаем из БД")
try:
# Используем функцию get_with_stat для получения автора со статистикой
from sqlalchemy import select
from resolvers.stat import get_with_stat
q = select(Author).where(Author.id == author_id)
authors_with_stat = get_with_stat(q)
if authors_with_stat and len(authors_with_stat) > 0:
author = authors_with_stat[0]
# Обновляем last_seen отдельной транзакцией
with local_session() as session:
author_db = session.query(Author).filter(Author.id == author_id).first()
if author_db:
author_db.last_seen = int(time.time())
session.commit()
else:
# Если кеша нет, загружаем из базы
with local_session() as session:
author = session.query(Author).filter(Author.id == author_id).first()
if not author:
logger.error(f"[getSession] Автор с ID {author_id} не найден в БД")
from graphql.error import GraphQLError
return {"error": "User not found"}
raise GraphQLError("Пользователь не найден")
return {"author": author.dict()}
except Exception as e:
logger.error(f"[getSession] Ошибка при получении автора из БД: {e}", exc_info=True)
from graphql.error import GraphQLError
raise GraphQLError("Ошибка при получении данных пользователя")
else:
# Если автор уже есть в контексте, добавляем статистику
try:
from sqlalchemy import select
from resolvers.stat import get_with_stat
q = select(Author).where(Author.id == author_id)
authors_with_stat = get_with_stat(q)
if authors_with_stat and len(authors_with_stat) > 0:
# Обновляем только статистику
# Проверяем, является ли author объектом или словарем
if isinstance(author, dict):
author["stat"] = authors_with_stat[0].stat
else:
author.stat = authors_with_stat[0].stat
except Exception as e:
logger.warning(f"[getSession] Не удалось добавить статистику к автору: {e}")
# Возвращаем данные сессии
logger.info(f"[getSession] Успешно получена сессия для пользователя {author_id}")
return {"token": token or "", "author": author}
except Exception as e:
logger.error(f"Failed to get current user: {e}")
return {"error": "Internal error"}
@mutation.field("confirmEmail")
async def confirm_email(_, info, token):
@login_required
async def confirm_email(_: None, _info: GraphQLResolveInfo, token: str) -> dict[str, Any]:
"""confirm owning email address"""
try:
logger.info("[auth] confirmEmail: Начало подтверждения email по токену.")
payload = JWTCodec.decode(token)
if payload is None:
logger.warning("[auth] confirmEmail: Невозможно декодировать токен.")
return {"success": False, "token": None, "author": None, "error": "Невалидный токен"}
user_id = payload.user_id
username = payload.username
@@ -149,8 +105,8 @@ async def confirm_email(_, info, token):
device_info=device_info,
)
user.email_verified = True
user.last_seen = int(time.time())
user.email_verified = True # type: ignore[assignment]
user.last_seen = int(time.time()) # type: ignore[assignment]
session.add(user)
session.commit()
logger.info(f"[auth] confirmEmail: Email для пользователя {user_id} успешно подтвержден.")
@@ -160,17 +116,17 @@ async def confirm_email(_, info, token):
logger.warning(f"[auth] confirmEmail: Невалидный токен - {e.message}")
return {"success": False, "token": None, "author": None, "error": f"Невалидный токен: {e.message}"}
except Exception as e:
logger.error(f"[auth] confirmEmail: Общая ошибка - {str(e)}\n{traceback.format_exc()}")
logger.error(f"[auth] confirmEmail: Общая ошибка - {e!s}\n{traceback.format_exc()}")
return {
"success": False,
"token": None,
"author": None,
"error": f"Ошибка подтверждения email: {str(e)}",
"error": f"Ошибка подтверждения email: {e!s}",
}
def create_user(user_dict):
"""create new user account"""
def create_user(user_dict: dict[str, Any]) -> Author:
"""Create new user in database"""
user = Author(**user_dict)
with local_session() as session:
# Добавляем пользователя в БД
@@ -209,7 +165,7 @@ def create_user(user_dict):
@mutation.field("registerUser")
async def register_by_email(_, _info, email: str, password: str = "", name: str = ""):
async def register_by_email(_: None, info: GraphQLResolveInfo, email: str, password: str = "", name: str = ""):
"""register new user account by email"""
email = email.lower()
logger.info(f"[auth] registerUser: Попытка регистрации для {email}")
@@ -241,7 +197,7 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
# Попытка отправить ссылку для подтверждения email
try:
# Если auth_send_link асинхронный...
await send_link(_, _info, email)
await send_link(None, info, email)
logger.info(f"[auth] registerUser: Пользователь {email} зарегистрирован, ссылка для подтверждения отправлена.")
# При регистрации возвращаем данные самому пользователю, поэтому не фильтруем
return {
@@ -251,33 +207,47 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
"error": "Требуется подтверждение email.",
}
except Exception as e:
logger.error(f"[auth] registerUser: Ошибка при отправке ссылки подтверждения для {email}: {str(e)}")
logger.error(f"[auth] registerUser: Ошибка при отправке ссылки подтверждения для {email}: {e!s}")
return {
"success": True,
"token": None,
"author": new_user,
"error": f"Пользователь зарегистрирован, но произошла ошибка при отправке ссылки подтверждения: {str(e)}",
"error": f"Пользователь зарегистрирован, но произошла ошибка при отправке ссылки подтверждения: {e!s}",
}
@mutation.field("sendLink")
async def send_link(_, _info, email, lang="ru", template="email_confirmation"):
async def send_link(
_: None, _info: GraphQLResolveInfo, email: str, lang: str = "ru", template: str = "confirm"
) -> dict[str, Any]:
"""send link with confirm code to email"""
email = email.lower()
with local_session() as session:
user = session.query(Author).filter(Author.email == email).first()
if not user:
raise ObjectNotExist("User not found")
else:
# Если TokenStorage.create_onetime асинхронный...
token = await TokenStorage.create_onetime(user)
# Если send_auth_email асинхронный...
await send_auth_email(user, token, lang, template)
return user
msg = "User not found"
raise ObjectNotExist(msg)
# Если TokenStorage.create_onetime асинхронный...
try:
if hasattr(TokenStorage, "create_onetime"):
token = await TokenStorage.create_onetime(user)
else:
# Fallback if create_onetime doesn't exist
token = await TokenStorage.create_session(
user_id=str(user.id),
username=str(user.username or user.email or user.slug or ""),
device_info={"email": user.email} if hasattr(user, "email") else None,
)
except (AttributeError, ImportError):
# Fallback if TokenStorage doesn't exist or doesn't have the method
token = "temporary_token"
# Если send_auth_email асинхронный...
await send_auth_email(user, token, lang, template)
return user
@mutation.field("login")
async def login(_, info, email: str, password: str):
async def login(_: None, info: GraphQLResolveInfo, **kwargs: Any) -> dict[str, Any]:
"""
Авторизация пользователя с помощью email и пароля.
@@ -289,14 +259,13 @@ async def login(_, info, email: str, password: str):
Returns:
AuthResult с данными пользователя и токеном или сообщением об ошибке
"""
logger.info(f"[auth] login: Попытка входа для {email}")
logger.info(f"[auth] login: Попытка входа для {kwargs.get('email')}")
# Гарантируем, что всегда возвращаем непустой объект AuthResult
default_response = {"success": False, "token": None, "author": None, "error": "Неизвестная ошибка"}
try:
# Нормализуем email
email = email.lower()
email = kwargs.get("email", "").lower()
# Получаем пользователя из базы
with local_session() as session:
@@ -341,6 +310,7 @@ async def login(_, info, email: str, password: str):
# Проверяем пароль - важно использовать непосредственно объект author, а не его dict
logger.info(f"[auth] login: НАЧАЛО ПРОВЕРКИ ПАРОЛЯ для {email}")
try:
password = kwargs.get("password", "")
verify_result = Identity.password(author, password)
logger.info(
f"[auth] login: РЕЗУЛЬТАТ ПРОВЕРКИ ПАРОЛЯ: {verify_result if isinstance(verify_result, dict) else 'успешно'}"
@@ -355,7 +325,7 @@ async def login(_, info, email: str, password: str):
"error": verify_result.get("error", "Ошибка авторизации"),
}
except Exception as e:
logger.error(f"[auth] login: Ошибка при проверке пароля: {str(e)}")
logger.error(f"[auth] login: Ошибка при проверке пароля: {e!s}")
return {
"success": False,
"token": None,
@@ -369,10 +339,8 @@ async def login(_, info, email: str, password: str):
# Создаем токен через правильную функцию вместо прямого кодирования
try:
# Убедимся, что у автора есть нужные поля для создания токена
if (
not hasattr(valid_author, "id")
or not hasattr(valid_author, "username")
and not hasattr(valid_author, "email")
if not hasattr(valid_author, "id") or (
not hasattr(valid_author, "username") and not hasattr(valid_author, "email")
):
logger.error(f"[auth] login: Объект автора не содержит необходимых атрибутов: {valid_author}")
return {
@@ -384,15 +352,16 @@ async def login(_, info, email: str, password: str):
# Создаем сессионный токен
logger.info(f"[auth] login: СОЗДАНИЕ ТОКЕНА для {email}, id={valid_author.id}")
username = str(valid_author.username or valid_author.email or valid_author.slug or "")
token = await TokenStorage.create_session(
user_id=str(valid_author.id),
username=valid_author.username or valid_author.email or valid_author.slug or "",
username=username,
device_info={"email": valid_author.email} if hasattr(valid_author, "email") else None,
)
logger.info(f"[auth] login: токен успешно создан, длина: {len(token) if token else 0}")
# Обновляем время последнего входа
valid_author.last_seen = int(time.time())
valid_author.last_seen = int(time.time()) # type: ignore[assignment]
session.commit()
# Устанавливаем httponly cookie различными способами для надежности
@@ -409,10 +378,10 @@ async def login(_, info, email: str, password: str):
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
logger.info(f"[auth] login: Установлена cookie через extensions")
logger.info("[auth] login: Установлена cookie через extensions")
cookie_set = True
except Exception as e:
logger.error(f"[auth] login: Ошибка при установке cookie через extensions: {str(e)}")
logger.error(f"[auth] login: Ошибка при установке cookie через extensions: {e!s}")
# Метод 2: GraphQL контекст через response
if not cookie_set:
@@ -426,10 +395,10 @@ async def login(_, info, email: str, password: str):
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
logger.info(f"[auth] login: Установлена cookie через response")
logger.info("[auth] login: Установлена cookie через response")
cookie_set = True
except Exception as e:
logger.error(f"[auth] login: Ошибка при установке cookie через response: {str(e)}")
logger.error(f"[auth] login: Ошибка при установке cookie через response: {e!s}")
# Если ни один способ не сработал, создаем response в контексте
if not cookie_set and hasattr(info.context, "request") and not hasattr(info.context, "response"):
@@ -446,42 +415,42 @@ async def login(_, info, email: str, password: str):
max_age=SESSION_COOKIE_MAX_AGE,
)
info.context["response"] = response
logger.info(f"[auth] login: Создан новый response и установлена cookie")
logger.info("[auth] login: Создан новый response и установлена cookie")
cookie_set = True
except Exception as e:
logger.error(f"[auth] login: Ошибка при создании response и установке cookie: {str(e)}")
logger.error(f"[auth] login: Ошибка при создании response и установке cookie: {e!s}")
if not cookie_set:
logger.warning(f"[auth] login: Не удалось установить cookie никаким способом")
logger.warning("[auth] login: Не удалось установить cookie никаким способом")
# Возвращаем успешный результат с данными для клиента
# Для ответа клиенту используем dict() с параметром access=True,
# Для ответа клиенту используем dict() с параметром True,
# чтобы получить полный доступ к данным для самого пользователя
logger.info(f"[auth] login: Успешный вход для {email}")
author_dict = valid_author.dict(access=True)
author_dict = valid_author.dict(True)
result = {"success": True, "token": token, "author": author_dict, "error": None}
logger.info(
f"[auth] login: Возвращаемый результат: {{success: {result['success']}, token_length: {len(token) if token else 0}}}"
)
return result
except Exception as token_error:
logger.error(f"[auth] login: Ошибка при создании токена: {str(token_error)}")
logger.error(f"[auth] login: Ошибка при создании токена: {token_error!s}")
logger.error(traceback.format_exc())
return {
"success": False,
"token": None,
"author": None,
"error": f"Ошибка авторизации: {str(token_error)}",
"error": f"Ошибка авторизации: {token_error!s}",
}
except Exception as e:
logger.error(f"[auth] login: Ошибка при авторизации {email}: {str(e)}")
logger.error(f"[auth] login: Ошибка при авторизации {email}: {e!s}")
logger.error(traceback.format_exc())
return {"success": False, "token": None, "author": None, "error": str(e)}
@query.field("isEmailUsed")
async def is_email_used(_, _info, email):
async def is_email_used(_: None, _info: GraphQLResolveInfo, email: str) -> bool:
"""check if email is used"""
email = email.lower()
with local_session() as session:
@@ -490,144 +459,489 @@ async def is_email_used(_, _info, email):
@mutation.field("logout")
async def logout_resolver(_, info: GraphQLResolveInfo):
@login_required
async def logout_resolver(_: None, info: GraphQLResolveInfo, **kwargs: Any) -> dict[str, Any]:
"""
Выход из системы через GraphQL с удалением сессии и cookie.
Returns:
dict: Результат операции выхода
"""
# Получаем токен из cookie или заголовка
request = info.context["request"]
token = request.cookies.get(SESSION_COOKIE_NAME)
if not token:
# Проверяем заголовок авторизации
auth_header = request.headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header[7:] # Отрезаем "Bearer "
success = False
message = ""
# Если токен найден, отзываем его
if token:
try:
# Декодируем токен для получения user_id
user_id, _ = await verify_internal_auth(token)
if user_id:
# Отзываем сессию
await SessionManager.revoke_session(user_id, token)
logger.info(f"[auth] logout_resolver: Токен успешно отозван для пользователя {user_id}")
success = True
message = "Выход выполнен успешно"
else:
logger.warning("[auth] logout_resolver: Не удалось получить user_id из токена")
message = "Не удалось обработать токен"
except Exception as e:
logger.error(f"[auth] logout_resolver: Ошибка при отзыве токена: {e}")
message = f"Ошибка при выходе: {str(e)}"
else:
message = "Токен не найден"
success = True # Если токена нет, то пользователь уже вышел из системы
# Удаляем cookie через extensions
try:
# Используем extensions для удаления cookie
if hasattr(info.context, "extensions") and hasattr(info.context.extensions, "delete_cookie"):
info.context.extensions.delete_cookie(SESSION_COOKIE_NAME)
logger.info("[auth] logout_resolver: Cookie успешно удалена через extensions")
elif hasattr(info.context, "response") and hasattr(info.context.response, "delete_cookie"):
info.context.response.delete_cookie(SESSION_COOKIE_NAME)
logger.info("[auth] logout_resolver: Cookie успешно удалена через response")
# Используем данные автора из контекста, установленные декоратором login_required
author = info.context.get("author")
if not author:
logger.error("[auth] logout_resolver: Автор не найден в контексте после login_required")
return {"success": False, "message": "Пользователь не найден в контексте"}
user_id = str(author.get("id"))
logger.debug(f"[auth] logout_resolver: Обработка выхода для пользователя {user_id}")
# Получаем токен из cookie или заголовка
request = info.context.get("request")
token = None
if request:
# Проверяем cookie
token = request.cookies.get(SESSION_COOKIE_NAME)
# Если в cookie нет, проверяем заголовок Authorization
if not token:
auth_header = request.headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header[7:] # Отрезаем "Bearer "
if token:
# Отзываем сессию используя данные из контекста
await SessionManager.revoke_session(user_id, token)
logger.info(f"[auth] logout_resolver: Токен успешно отозван для пользователя {user_id}")
success = True
message = "Выход выполнен успешно"
else:
logger.warning("[auth] logout_resolver: Невозможно удалить cookie - объекты extensions/response недоступны")
logger.warning("[auth] logout_resolver: Токен не найден в запросе")
# Все равно считаем успешным, так как пользователь уже не авторизован
success = True
message = "Выход выполнен (токен не найден)"
# Удаляем cookie через extensions
try:
# Используем extensions для удаления cookie
if hasattr(info.context, "extensions") and hasattr(info.context.extensions, "delete_cookie"):
info.context.extensions.delete_cookie(SESSION_COOKIE_NAME)
logger.info("[auth] logout_resolver: Cookie успешно удалена через extensions")
elif hasattr(info.context, "response") and hasattr(info.context.response, "delete_cookie"):
info.context.response.delete_cookie(SESSION_COOKIE_NAME)
logger.info("[auth] logout_resolver: Cookie успешно удалена через response")
else:
logger.warning(
"[auth] logout_resolver: Невозможно удалить cookie - объекты extensions/response недоступны"
)
except Exception as e:
logger.error(f"[auth] logout_resolver: Ошибка при удалении cookie: {e}")
except Exception as e:
logger.error(f"[auth] logout_resolver: Ошибка при удалении cookie: {str(e)}")
logger.debug(traceback.format_exc())
logger.error(f"[auth] logout_resolver: Ошибка при выходе: {e}")
success = False
message = f"Ошибка при выходе: {e}"
return {"success": success, "message": message}
@mutation.field("refreshToken")
async def refresh_token_resolver(_, info: GraphQLResolveInfo):
@login_required
async def refresh_token_resolver(_: None, info: GraphQLResolveInfo, **kwargs: Any) -> dict[str, Any]:
"""
Обновление токена аутентификации через GraphQL.
Returns:
AuthResult с данными пользователя и обновленным токеном или сообщением об ошибке
"""
request = info.context["request"]
# Получаем текущий токен из cookie или заголовка
token = request.cookies.get(SESSION_COOKIE_NAME)
if not token:
auth_header = request.headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header[7:] # Отрезаем "Bearer "
if not token:
logger.warning("[auth] refresh_token_resolver: Токен не найден в запросе")
return {"success": False, "token": None, "author": None, "error": "Токен не найден"}
try:
# Получаем информацию о пользователе из токена
user_id, _ = await verify_internal_auth(token)
# Используем данные автора из контекста, установленные декоратором login_required
author = info.context.get("author")
if not author:
logger.error("[auth] refresh_token_resolver: Автор не найден в контексте после login_required")
return {"success": False, "token": None, "author": None, "error": "Пользователь не найден в контексте"}
user_id = author.get("id")
if not user_id:
logger.warning("[auth] refresh_token_resolver: Недействительный токен")
return {"success": False, "token": None, "author": None, "error": "Недействительный токен"}
logger.error("[auth] refresh_token_resolver: ID пользователя не найден в данных автора")
return {"success": False, "token": None, "author": None, "error": "ID пользователя не найден"}
# Получаем пользователя из базы данных
with local_session() as session:
author = session.query(Author).filter(Author.id == user_id).first()
# Получаем текущий токен из cookie или заголовка
request = info.context.get("request")
if not request:
logger.error("[auth] refresh_token_resolver: Запрос не найден в контексте")
return {"success": False, "token": None, "author": None, "error": "Запрос не найден в контексте"}
if not author:
logger.warning(f"[auth] refresh_token_resolver: Пользователь с ID {user_id} не найден")
return {"success": False, "token": None, "author": None, "error": "Пользователь не найден"}
token = request.cookies.get(SESSION_COOKIE_NAME)
if not token:
auth_header = request.headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header[7:] # Отрезаем "Bearer "
# Обновляем сессию (создаем новую и отзываем старую)
device_info = {"ip": request.client.host, "user_agent": request.headers.get("user-agent")}
new_token = await SessionManager.refresh_session(user_id, token, device_info)
if not token:
logger.warning("[auth] refresh_token_resolver: Токен не найден в запросе")
return {"success": False, "token": None, "author": None, "error": "Токен не найден"}
if not new_token:
logger.error("[auth] refresh_token_resolver: Не удалось обновить токен")
return {"success": False, "token": None, "author": None, "error": "Не удалось обновить токен"}
# Подготавливаем информацию об устройстве
device_info = {
"ip": request.client.host if request.client else "unknown",
"user_agent": request.headers.get("user-agent"),
}
# Устанавливаем cookie через extensions
try:
# Используем extensions для установки cookie
if hasattr(info.context, "extensions") and hasattr(info.context.extensions, "set_cookie"):
logger.info("[auth] refresh_token_resolver: Устанавливаем httponly cookie через extensions")
info.context.extensions.set_cookie(
SESSION_COOKIE_NAME,
new_token,
httponly=SESSION_COOKIE_HTTPONLY,
secure=SESSION_COOKIE_SECURE,
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
elif hasattr(info.context, "response") and hasattr(info.context.response, "set_cookie"):
logger.info("[auth] refresh_token_resolver: Устанавливаем httponly cookie через response")
info.context.response.set_cookie(
key=SESSION_COOKIE_NAME,
value=new_token,
httponly=SESSION_COOKIE_HTTPONLY,
secure=SESSION_COOKIE_SECURE,
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
else:
logger.warning(
"[auth] refresh_token_resolver: Невозможно установить cookie - объекты extensions/response недоступны"
)
except Exception as e:
# В случае ошибки при установке cookie просто логируем, но продолжаем обновление токена
logger.error(f"[auth] refresh_token_resolver: Ошибка при установке cookie: {str(e)}")
logger.debug(traceback.format_exc())
# Обновляем сессию (создаем новую и отзываем старую)
new_token = await SessionManager.refresh_session(user_id, token, device_info)
logger.info(f"[auth] refresh_token_resolver: Токен успешно обновлен для пользователя {user_id}")
return {"success": True, "token": new_token, "author": author, "error": None}
if not new_token:
logger.error(f"[auth] refresh_token_resolver: Не удалось обновить токен для пользователя {user_id}")
return {"success": False, "token": None, "author": None, "error": "Не удалось обновить токен"}
# Устанавливаем cookie через extensions
try:
# Используем extensions для установки cookie
if hasattr(info.context, "extensions") and hasattr(info.context.extensions, "set_cookie"):
logger.info("[auth] refresh_token_resolver: Устанавливаем httponly cookie через extensions")
info.context.extensions.set_cookie(
SESSION_COOKIE_NAME,
new_token,
httponly=SESSION_COOKIE_HTTPONLY,
secure=SESSION_COOKIE_SECURE,
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
elif hasattr(info.context, "response") and hasattr(info.context.response, "set_cookie"):
logger.info("[auth] refresh_token_resolver: Устанавливаем httponly cookie через response")
info.context.response.set_cookie(
key=SESSION_COOKIE_NAME,
value=new_token,
httponly=SESSION_COOKIE_HTTPONLY,
secure=SESSION_COOKIE_SECURE,
samesite=SESSION_COOKIE_SAMESITE,
max_age=SESSION_COOKIE_MAX_AGE,
)
else:
logger.warning(
"[auth] refresh_token_resolver: Невозможно установить cookie - объекты extensions/response недоступны"
)
except Exception as e:
# В случае ошибки при установке cookie просто логируем, но продолжаем обновление токена
logger.error(f"[auth] refresh_token_resolver: Ошибка при установке cookie: {e}")
logger.info(f"[auth] refresh_token_resolver: Токен успешно обновлен для пользователя {user_id}")
# Возвращаем данные автора из контекста (они уже обработаны декоратором)
return {"success": True, "token": new_token, "author": author, "error": None}
except Exception as e:
logger.error(f"[auth] refresh_token_resolver: Ошибка при обновлении токена: {e}")
logger.error(traceback.format_exc())
return {"success": False, "token": None, "author": None, "error": str(e)}
@mutation.field("requestPasswordReset")
async def request_password_reset(_: None, _info: GraphQLResolveInfo, **kwargs: Any) -> dict[str, Any]:
"""Запрос сброса пароля"""
try:
email = kwargs.get("email", "").lower()
logger.info(f"[auth] requestPasswordReset: Запрос сброса пароля для {email}")
with local_session() as session:
author = session.query(Author).filter(Author.email == email).first()
if not author:
logger.warning(f"[auth] requestPasswordReset: Пользователь {email} не найден")
# Возвращаем success даже если пользователь не найден (для безопасности)
return {"success": True}
# Создаем токен сброса пароля
try:
from auth.tokenstorage import TokenStorage
if hasattr(TokenStorage, "create_onetime"):
token = await TokenStorage.create_onetime(author)
else:
# Fallback if create_onetime doesn't exist
token = await TokenStorage.create_session(
user_id=str(author.id),
username=str(author.username or author.email or author.slug or ""),
device_info={"email": author.email} if hasattr(author, "email") else None,
)
except (AttributeError, ImportError):
# Fallback if TokenStorage doesn't exist or doesn't have the method
token = "temporary_token"
# Отправляем email с токеном
await send_auth_email(author, token, kwargs.get("lang", "ru"), "password_reset")
logger.info(f"[auth] requestPasswordReset: Письмо сброса пароля отправлено для {email}")
return {"success": True}
except Exception as e:
logger.error(f"[auth] requestPasswordReset: Ошибка при запросе сброса пароля для {email}: {e!s}")
return {"success": False}
@mutation.field("updateSecurity")
@login_required
async def update_security(
_: None,
info: GraphQLResolveInfo,
**kwargs: Any,
) -> dict[str, Any]:
"""
Мутация для смены пароля и/или email пользователя.
Args:
email: Новый email (опционально)
old_password: Текущий пароль (обязательно для любых изменений)
new_password: Новый пароль (опционально)
Returns:
SecurityUpdateResult: Результат операции с успехом/ошибкой и данными пользователя
"""
logger.info("[auth] updateSecurity: Начало обновления данных безопасности")
# Получаем текущего пользователя
current_user = info.context.get("author")
if not current_user:
logger.warning("[auth] updateSecurity: Пользователь не авторизован")
return {"success": False, "error": "NOT_AUTHENTICATED", "author": None}
user_id = current_user.get("id")
logger.info(f"[auth] updateSecurity: Обновление для пользователя ID={user_id}")
# Валидация входных параметров
new_password = kwargs.get("new_password")
old_password = kwargs.get("old_password")
email = kwargs.get("email")
if not email and not new_password:
logger.warning("[auth] updateSecurity: Не указаны параметры для изменения")
return {"success": False, "error": "VALIDATION_ERROR", "author": None}
if not old_password:
logger.warning("[auth] updateSecurity: Не указан старый пароль")
return {"success": False, "error": "VALIDATION_ERROR", "author": None}
if new_password and len(new_password) < 8:
logger.warning("[auth] updateSecurity: Новый пароль слишком короткий")
return {"success": False, "error": "WEAK_PASSWORD", "author": None}
if new_password == old_password:
logger.warning("[auth] updateSecurity: Новый пароль совпадает со старым")
return {"success": False, "error": "SAME_PASSWORD", "author": None}
# Валидация email
import re
email_pattern = r"^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$"
if email and not re.match(email_pattern, email):
logger.warning(f"[auth] updateSecurity: Неверный формат email: {email}")
return {"success": False, "error": "INVALID_EMAIL", "author": None}
email = email.lower() if email else ""
try:
with local_session() as session:
# Получаем пользователя из базы данных
author = session.query(Author).filter(Author.id == user_id).first()
if not author:
logger.error(f"[auth] updateSecurity: Пользователь с ID {user_id} не найден в БД")
return {"success": False, "error": "NOT_AUTHENTICATED", "author": None}
# Проверяем старый пароль
if not author.verify_password(old_password):
logger.warning(f"[auth] updateSecurity: Неверный старый пароль для пользователя {user_id}")
return {"success": False, "error": "incorrect old password", "author": None}
# Проверяем, что новый email не занят
if email and email != author.email:
existing_user = session.query(Author).filter(Author.email == email).first()
if existing_user:
logger.warning(f"[auth] updateSecurity: Email {email} уже используется")
return {"success": False, "error": "email already exists", "author": None}
# Выполняем изменения
changes_made = []
# Смена пароля
if new_password:
author.set_password(new_password)
changes_made.append("password")
logger.info(f"[auth] updateSecurity: Пароль изменен для пользователя {user_id}")
# Смена email через Redis
if email and email != author.email:
# Генерируем токен подтверждения
token = secrets.token_urlsafe(32)
# Сохраняем данные смены email в Redis с TTL 1 час
email_change_data = {
"user_id": user_id,
"old_email": author.email,
"new_email": email,
"token": token,
"expires_at": int(time.time()) + 3600, # 1 час
}
# Ключ для хранения в Redis
redis_key = f"email_change:{user_id}"
# Используем внутреннюю систему истечения Redis: SET + EXPIRE
await redis.execute("SET", redis_key, json.dumps(email_change_data))
await redis.execute("EXPIRE", redis_key, 3600) # 1 час TTL
changes_made.append("email_pending")
logger.info(
f"[auth] updateSecurity: Email смена инициирована для пользователя {user_id}: {author.email} -> {kwargs.get('email')}"
)
# TODO: Отправить письмо подтверждения на новый email
# await send_email_change_confirmation(author, kwargs.get('email'), token)
# Обновляем временную метку
author.updated_at = int(time.time()) # type: ignore[assignment]
# Сохраняем изменения
session.add(author)
session.commit()
logger.info(
f"[auth] updateSecurity: Изменения сохранены для пользователя {user_id}: {', '.join(changes_made)}"
)
# Возвращаем обновленные данные пользователя
return {
"success": True,
"error": None,
"author": author.dict(True), # Возвращаем полные данные владельцу
}
except Exception as e:
logger.error(f"[auth] updateSecurity: Ошибка при обновлении данных безопасности: {e!s}")
logger.error(traceback.format_exc())
return {"success": False, "error": str(e), "author": None}
@mutation.field("confirmEmailChange")
@login_required
async def confirm_email_change(_: None, info: GraphQLResolveInfo, **kwargs: Any) -> dict[str, Any]:
"""
Подтверждение смены email по токену.
Args:
token: Токен подтверждения смены email
Returns:
SecurityUpdateResult: Результат операции
"""
logger.info("[auth] confirmEmailChange: Подтверждение смены email по токену")
# Получаем текущего пользователя
current_user = info.context.get("author")
if not current_user:
logger.warning("[auth] confirmEmailChange: Пользователь не авторизован")
return {"success": False, "error": "NOT_AUTHENTICATED", "author": None}
user_id = current_user.get("id")
try:
# Получаем данные смены email из Redis
redis_key = f"email_change:{user_id}"
cached_data = await redis.execute("GET", redis_key)
if not cached_data:
logger.warning(f"[auth] confirmEmailChange: Данные смены email не найдены для пользователя {user_id}")
return {"success": False, "error": "NO_PENDING_EMAIL", "author": None}
try:
email_change_data = json.loads(cached_data)
except json.JSONDecodeError:
logger.error(f"[auth] confirmEmailChange: Ошибка декодирования данных из Redis для пользователя {user_id}")
return {"success": False, "error": "INVALID_TOKEN", "author": None}
# Проверяем токен
if email_change_data.get("token") != kwargs.get("token"):
logger.warning(f"[auth] confirmEmailChange: Неверный токен для пользователя {user_id}")
return {"success": False, "error": "INVALID_TOKEN", "author": None}
# Проверяем срок действия токена
if email_change_data.get("expires_at", 0) < int(time.time()):
logger.warning(f"[auth] confirmEmailChange: Токен истек для пользователя {user_id}")
# Удаляем истекшие данные из Redis
await redis.execute("DEL", redis_key)
return {"success": False, "error": "TOKEN_EXPIRED", "author": None}
new_email = email_change_data.get("new_email")
if not new_email:
logger.error(f"[auth] confirmEmailChange: Нет нового email в данных для пользователя {user_id}")
return {"success": False, "error": "INVALID_TOKEN", "author": None}
with local_session() as session:
author = session.query(Author).filter(Author.id == user_id).first()
if not author:
logger.error(f"[auth] confirmEmailChange: Пользователь с ID {user_id} не найден в БД")
return {"success": False, "error": "NOT_AUTHENTICATED", "author": None}
# Проверяем, что новый email еще не занят
existing_user = session.query(Author).filter(Author.email == new_email).first()
if existing_user and existing_user.id != author.id:
logger.warning(f"[auth] confirmEmailChange: Email {new_email} уже занят")
# Удаляем данные из Redis
await redis.execute("DEL", redis_key)
return {"success": False, "error": "email already exists", "author": None}
old_email = author.email
# Применяем смену email
author.email = new_email # type: ignore[assignment]
author.email_verified = True # type: ignore[assignment] # Новый email считается подтвержденным
author.updated_at = int(time.time()) # type: ignore[assignment]
session.add(author)
session.commit()
# Удаляем данные смены email из Redis после успешного применения
await redis.execute("DEL", redis_key)
logger.info(
f"[auth] confirmEmailChange: Email изменен для пользователя {user_id}: {old_email} -> {new_email}"
)
# TODO: Отправить уведомление на старый email о смене
return {"success": True, "error": None, "author": author.dict(True)}
except Exception as e:
logger.error(f"[auth] confirmEmailChange: Ошибка при подтверждении смены email: {e!s}")
logger.error(traceback.format_exc())
return {"success": False, "error": str(e), "author": None}
@mutation.field("cancelEmailChange")
@login_required
async def cancel_email_change(_: None, info: GraphQLResolveInfo) -> dict[str, Any]:
"""
Отмена смены email.
Returns:
SecurityUpdateResult: Результат операции
"""
logger.info("[auth] cancelEmailChange: Отмена смены email")
# Получаем текущего пользователя
current_user = info.context.get("author")
if not current_user:
logger.warning("[auth] cancelEmailChange: Пользователь не авторизован")
return {"success": False, "error": "NOT_AUTHENTICATED", "author": None}
user_id = current_user.get("id")
try:
# Проверяем наличие данных смены email в Redis
redis_key = f"email_change:{user_id}"
cached_data = await redis.execute("GET", redis_key)
if not cached_data:
logger.warning(f"[auth] cancelEmailChange: Нет активной смены email для пользователя {user_id}")
return {"success": False, "error": "NO_PENDING_EMAIL", "author": None}
# Удаляем данные смены email из Redis
await redis.execute("DEL", redis_key)
# Получаем текущие данные пользователя
with local_session() as session:
author = session.query(Author).filter(Author.id == user_id).first()
if not author:
logger.error(f"[auth] cancelEmailChange: Пользователь с ID {user_id} не найден в БД")
return {"success": False, "error": "NOT_AUTHENTICATED", "author": None}
logger.info(f"[auth] cancelEmailChange: Смена email отменена для пользователя {user_id}")
return {"success": True, "error": None, "author": author.dict(True)}
except Exception as e:
logger.error(f"[auth] cancelEmailChange: Ошибка при отмене смены email: {e!s}")
logger.error(traceback.format_exc())
return {"success": False, "error": str(e), "author": None}

View File

@@ -1,7 +1,8 @@
import asyncio
import time
from typing import Any, Dict, List, Optional
from typing import Any, Optional
from graphql import GraphQLResolveInfo
from sqlalchemy import select, text
from auth.orm import Author
@@ -16,17 +17,17 @@ from cache.cache import (
)
from resolvers.stat import get_with_stat
from services.auth import login_required
from services.common_result import CommonResult
from services.db import local_session
from services.redis import redis
from services.schema import mutation, query
from services.search import search_service
from utils.logger import root_logger as logger
DEFAULT_COMMUNITIES = [1]
# Вспомогательная функция для получения всех авторов без статистики
async def get_all_authors(current_user_id=None):
async def get_all_authors(current_user_id: Optional[int] = None) -> list[Any]:
"""
Получает всех авторов без статистики.
Используется для случаев, когда нужен полный список авторов без дополнительной информации.
@@ -41,7 +42,10 @@ async def get_all_authors(current_user_id=None):
cache_key = "authors:all:basic"
# Функция для получения всех авторов из БД
async def fetch_all_authors():
async def fetch_all_authors() -> list[Any]:
"""
Выполняет запрос к базе данных для получения всех авторов.
"""
logger.debug("Получаем список всех авторов из БД и кешируем результат")
with local_session() as session:
@@ -50,14 +54,16 @@ async def get_all_authors(current_user_id=None):
authors = session.execute(authors_query).scalars().unique().all()
# Преобразуем авторов в словари с учетом прав доступа
return [author.dict(access=False) for author in authors]
return [author.dict(False) for author in authors]
# Используем универсальную функцию для кеширования запросов
return await cached_query(cache_key, fetch_all_authors)
# Вспомогательная функция для получения авторов со статистикой с пагинацией
async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, current_user_id: Optional[int] = None):
async def get_authors_with_stats(
limit: int = 10, offset: int = 0, by: Optional[str] = None, current_user_id: Optional[int] = None
):
"""
Получает авторов со статистикой с пагинацией.
@@ -73,9 +79,19 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
cache_key = f"authors:stats:limit={limit}:offset={offset}"
# Функция для получения авторов из БД
async def fetch_authors_with_stats():
async def fetch_authors_with_stats() -> list[Any]:
"""
Выполняет запрос к базе данных для получения авторов со статистикой.
"""
logger.debug(f"Выполняем запрос на получение авторов со статистикой: limit={limit}, offset={offset}, by={by}")
# Импорты SQLAlchemy для избежания конфликтов имен
from sqlalchemy import and_, asc, func
from sqlalchemy import desc as sql_desc
from auth.orm import AuthorFollower
from orm.shout import Shout, ShoutAuthor
with local_session() as session:
# Базовый запрос для получения авторов
base_query = select(Author).where(Author.deleted_at.is_(None))
@@ -84,16 +100,11 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
# vars for statistics sorting
stats_sort_field = None
stats_sort_direction = "desc"
if by:
if isinstance(by, dict):
logger.debug(f"Processing dict-based sorting: {by}")
# Обработка словаря параметров сортировки
from sqlalchemy import asc, desc, func
from auth.orm import AuthorFollower
from orm.shout import ShoutAuthor
# Checking for order field in the dictionary
if "order" in by:
@@ -101,7 +112,6 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
logger.debug(f"Found order field with value: {order_value}")
if order_value in ["shouts", "followers", "rating", "comments"]:
stats_sort_field = order_value
stats_sort_direction = "desc" # По умолчанию убывающая сортировка для статистики
logger.debug(f"Applying statistics-based sorting by: {stats_sort_field}")
elif order_value == "name":
# Sorting by name in ascending order
@@ -111,33 +121,29 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
# If order is not a stats field, treat it as a regular field
column = getattr(Author, order_value, None)
if column:
base_query = base_query.order_by(desc(column))
base_query = base_query.order_by(sql_desc(column))
else:
# Regular sorting by fields
for field, direction in by.items():
column = getattr(Author, field, None)
if column:
if direction.lower() == "desc":
base_query = base_query.order_by(desc(column))
base_query = base_query.order_by(sql_desc(column))
else:
base_query = base_query.order_by(column)
elif by == "new":
base_query = base_query.order_by(desc(Author.created_at))
base_query = base_query.order_by(sql_desc(Author.created_at))
elif by == "active":
base_query = base_query.order_by(desc(Author.last_seen))
base_query = base_query.order_by(sql_desc(Author.last_seen))
else:
# По умолчанию сортируем по времени создания
base_query = base_query.order_by(desc(Author.created_at))
base_query = base_query.order_by(sql_desc(Author.created_at))
else:
base_query = base_query.order_by(desc(Author.created_at))
base_query = base_query.order_by(sql_desc(Author.created_at))
# If sorting by statistics, modify the query
if stats_sort_field == "shouts":
# Sorting by the number of shouts
from sqlalchemy import and_, func
from orm.shout import Shout, ShoutAuthor
subquery = (
select(ShoutAuthor.author, func.count(func.distinct(Shout.id)).label("shouts_count"))
.select_from(ShoutAuthor)
@@ -148,14 +154,10 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
)
base_query = base_query.outerjoin(subquery, Author.id == subquery.c.author).order_by(
desc(func.coalesce(subquery.c.shouts_count, 0))
sql_desc(func.coalesce(subquery.c.shouts_count, 0))
)
elif stats_sort_field == "followers":
# Sorting by the number of followers
from sqlalchemy import func
from auth.orm import AuthorFollower
subquery = (
select(
AuthorFollower.author,
@@ -167,7 +169,7 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
)
base_query = base_query.outerjoin(subquery, Author.id == subquery.c.author).order_by(
desc(func.coalesce(subquery.c.followers_count, 0))
sql_desc(func.coalesce(subquery.c.followers_count, 0))
)
# Применяем лимит и смещение
@@ -181,23 +183,25 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
return []
# Оптимизированный запрос для получения статистики по публикациям для авторов
placeholders = ", ".join([f":id{i}" for i in range(len(author_ids))])
shouts_stats_query = f"""
SELECT sa.author, COUNT(DISTINCT s.id) as shouts_count
FROM shout_author sa
JOIN shout s ON sa.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
WHERE sa.author IN ({",".join(map(str, author_ids))})
WHERE sa.author IN ({placeholders})
GROUP BY sa.author
"""
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))}
params = {f"id{i}": author_id for i, author_id in enumerate(author_ids)}
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query), params)}
# Запрос на получение статистики по подписчикам для авторов
followers_stats_query = f"""
SELECT author, COUNT(DISTINCT follower) as followers_count
FROM author_follower
WHERE author IN ({",".join(map(str, author_ids))})
WHERE author IN ({placeholders})
GROUP BY author
"""
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query), params)}
# Формируем результат с добавлением статистики
result = []
@@ -222,7 +226,7 @@ async def get_authors_with_stats(limit=50, offset=0, by: Optional[str] = None, c
# Функция для инвалидации кеша авторов
async def invalidate_authors_cache(author_id=None):
async def invalidate_authors_cache(author_id=None) -> None:
"""
Инвалидирует кеши авторов при изменении данных.
@@ -268,11 +272,12 @@ async def invalidate_authors_cache(author_id=None):
@mutation.field("update_author")
@login_required
async def update_author(_, info, profile):
async def update_author(_: None, info: GraphQLResolveInfo, profile: dict[str, Any]) -> CommonResult:
"""Update author profile"""
author_id = info.context.get("author", {}).get("id")
is_admin = info.context.get("is_admin", False)
if not author_id:
return {"error": "unauthorized", "author": None}
return CommonResult(error="unauthorized", author=None)
try:
with local_session() as session:
author = session.query(Author).where(Author.id == author_id).first()
@@ -286,35 +291,34 @@ async def update_author(_, info, profile):
author_with_stat = result[0]
if isinstance(author_with_stat, Author):
# Кэшируем полную версию для админов
author_dict = author_with_stat.dict(access=is_admin)
author_dict = author_with_stat.dict(is_admin)
asyncio.create_task(cache_author(author_dict))
# Возвращаем обычную полную версию, т.к. это владелец
return {"error": None, "author": author}
return CommonResult(error=None, author=author)
# Если мы дошли до сюда, значит автор не найден
return CommonResult(error="Author not found", author=None)
except Exception as exc:
import traceback
logger.error(traceback.format_exc())
return {"error": exc, "author": None}
return CommonResult(error=str(exc), author=None)
@query.field("get_authors_all")
async def get_authors_all(_, info):
"""
Получает список всех авторов без статистики.
Returns:
list: Список всех авторов
"""
async def get_authors_all(_: None, info: GraphQLResolveInfo) -> list[Any]:
"""Get all authors"""
# Получаем ID текущего пользователя и флаг админа из контекста
viewer_id = info.context.get("author", {}).get("id")
is_admin = info.context.get("is_admin", False)
authors = await get_all_authors(viewer_id)
return authors
info.context.get("is_admin", False)
return await get_all_authors(viewer_id)
@query.field("get_author")
async def get_author(_, info, slug="", author_id=0):
async def get_author(
_: None, info: GraphQLResolveInfo, slug: Optional[str] = None, author_id: Optional[int] = None
) -> dict[str, Any] | None:
"""Get specific author by slug or ID"""
# Получаем ID текущего пользователя и флаг админа из контекста
is_admin = info.context.get("is_admin", False)
@@ -322,7 +326,8 @@ async def get_author(_, info, slug="", author_id=0):
try:
author_id = get_author_id_from(slug=slug, user="", author_id=author_id)
if not author_id:
raise ValueError("cant find")
msg = "cant find"
raise ValueError(msg)
# Получаем данные автора из кэша (полные данные)
cached_author = await get_cached_author(int(author_id), get_with_stat)
@@ -335,7 +340,7 @@ async def get_author(_, info, slug="", author_id=0):
if hasattr(temp_author, key):
setattr(temp_author, key, value)
# Получаем отфильтрованную версию
author_dict = temp_author.dict(access=is_admin)
author_dict = temp_author.dict(is_admin)
# Добавляем статистику, которая могла быть в кэшированной версии
if "stat" in cached_author:
author_dict["stat"] = cached_author["stat"]
@@ -348,11 +353,11 @@ async def get_author(_, info, slug="", author_id=0):
author_with_stat = result[0]
if isinstance(author_with_stat, Author):
# Кэшируем полные данные для админов
original_dict = author_with_stat.dict(access=True)
original_dict = author_with_stat.dict(True)
asyncio.create_task(cache_author(original_dict))
# Возвращаем отфильтрованную версию
author_dict = author_with_stat.dict(access=is_admin)
author_dict = author_with_stat.dict(is_admin)
# Добавляем статистику
if hasattr(author_with_stat, "stat"):
author_dict["stat"] = author_with_stat.stat
@@ -366,22 +371,12 @@ async def get_author(_, info, slug="", author_id=0):
@query.field("load_authors_by")
async def load_authors_by(_, info, by, limit, offset):
"""
Загружает авторов по заданному критерию с пагинацией.
Args:
by: Критерий сортировки авторов (new/active)
limit: Максимальное количество возвращаемых авторов
offset: Смещение для пагинации
Returns:
list: Список авторов с учетом критерия
"""
async def load_authors_by(_: None, info: GraphQLResolveInfo, by: str, limit: int = 10, offset: int = 0) -> list[Any]:
"""Load authors by different criteria"""
try:
# Получаем ID текущего пользователя и флаг админа из контекста
viewer_id = info.context.get("author", {}).get("id")
is_admin = info.context.get("is_admin", False)
info.context.get("is_admin", False)
# Используем оптимизированную функцию для получения авторов
return await get_authors_with_stats(limit, offset, by, viewer_id)
@@ -393,48 +388,17 @@ async def load_authors_by(_, info, by, limit, offset):
@query.field("load_authors_search")
async def load_authors_search(_, info, text: str, limit: int = 10, offset: int = 0):
"""
Resolver for searching authors by text. Works with txt-ai search endpony.
Args:
text: Search text
limit: Maximum number of authors to return
offset: Offset for pagination
Returns:
list: List of authors matching the search criteria
"""
# Get author IDs from search engine (already sorted by relevance)
search_results = await search_service.search_authors(text, limit, offset)
if not search_results:
return []
author_ids = [result.get("id") for result in search_results if result.get("id")]
if not author_ids:
return []
# Fetch full author objects from DB
with local_session() as session:
# Simple query to get authors by IDs - no need for stats here
authors_query = select(Author).filter(Author.id.in_(author_ids))
db_authors = session.execute(authors_query).scalars().unique().all()
if not db_authors:
return []
# Create a dictionary for quick lookup
authors_dict = {str(author.id): author for author in db_authors}
# Keep the order from search results (maintains the relevance sorting)
ordered_authors = [authors_dict[author_id] for author_id in author_ids if author_id in authors_dict]
return ordered_authors
async def load_authors_search(_: None, info: GraphQLResolveInfo, **kwargs: Any) -> list[Any]:
"""Search for authors"""
# TODO: Implement search functionality
return []
def get_author_id_from(slug="", user=None, author_id=None):
def get_author_id_from(
slug: Optional[str] = None, user: Optional[str] = None, author_id: Optional[int] = None
) -> Optional[int]:
"""Get author ID from different identifiers"""
try:
author_id = None
if author_id:
return author_id
with local_session() as session:
@@ -442,19 +406,21 @@ def get_author_id_from(slug="", user=None, author_id=None):
if slug:
author = session.query(Author).filter(Author.slug == slug).first()
if author:
author_id = author.id
return author_id
return int(author.id)
if user:
author = session.query(Author).filter(Author.id == user).first()
if author:
author_id = author.id
return int(author.id)
except Exception as exc:
logger.error(exc)
return author_id
return None
@query.field("get_author_follows")
async def get_author_follows(_, info, slug="", user=None, author_id=0):
async def get_author_follows(
_, info: GraphQLResolveInfo, slug: Optional[str] = None, user: Optional[str] = None, author_id: Optional[int] = None
) -> dict[str, Any]:
"""Get entities followed by author"""
# Получаем ID текущего пользователя и флаг админа из контекста
viewer_id = info.context.get("author", {}).get("id")
is_admin = info.context.get("is_admin", False)
@@ -462,7 +428,7 @@ async def get_author_follows(_, info, slug="", user=None, author_id=0):
logger.debug(f"getting follows for @{slug}")
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
if not author_id:
return {}
return {"error": "Author not found"}
# Получаем данные из кэша
followed_authors_raw = await get_cached_follower_authors(author_id)
@@ -481,7 +447,7 @@ async def get_author_follows(_, info, slug="", user=None, author_id=0):
# current_user_id - ID текущего авторизованного пользователя (может быть None)
# is_admin - булево значение, является ли текущий пользователь админом
has_access = is_admin or (viewer_id is not None and str(viewer_id) == str(temp_author.id))
followed_authors.append(temp_author.dict(access=has_access))
followed_authors.append(temp_author.dict(has_access))
# TODO: Get followed communities too
return {
@@ -489,26 +455,41 @@ async def get_author_follows(_, info, slug="", user=None, author_id=0):
"topics": followed_topics,
"communities": DEFAULT_COMMUNITIES,
"shouts": [],
"error": None,
}
@query.field("get_author_follows_topics")
async def get_author_follows_topics(_, _info, slug="", user=None, author_id=None):
async def get_author_follows_topics(
_,
_info: GraphQLResolveInfo,
slug: Optional[str] = None,
user: Optional[str] = None,
author_id: Optional[int] = None,
) -> list[Any]:
"""Get topics followed by author"""
logger.debug(f"getting followed topics for @{slug}")
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
if not author_id:
return []
followed_topics = await get_cached_follower_topics(author_id)
return followed_topics
result = await get_cached_follower_topics(author_id)
# Ensure we return a list, not a dict
if isinstance(result, dict):
return result.get("topics", [])
return result if isinstance(result, list) else []
@query.field("get_author_follows_authors")
async def get_author_follows_authors(_, info, slug="", user=None, author_id=None):
async def get_author_follows_authors(
_, info: GraphQLResolveInfo, slug: Optional[str] = None, user: Optional[str] = None, author_id: Optional[int] = None
) -> list[Any]:
"""Get authors followed by author"""
# Получаем ID текущего пользователя и флаг админа из контекста
viewer_id = info.context.get("author", {}).get("id")
is_admin = info.context.get("is_admin", False)
logger.debug(f"getting followed authors for @{slug}")
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
if not author_id:
return []
@@ -528,17 +509,20 @@ async def get_author_follows_authors(_, info, slug="", user=None, author_id=None
# current_user_id - ID текущего авторизованного пользователя (может быть None)
# is_admin - булево значение, является ли текущий пользователь админом
has_access = is_admin or (viewer_id is not None and str(viewer_id) == str(temp_author.id))
followed_authors.append(temp_author.dict(access=has_access))
followed_authors.append(temp_author.dict(has_access))
return followed_authors
def create_author(user_id: str, slug: str, name: str = ""):
def create_author(**kwargs) -> Author:
"""Create new author"""
author = Author()
Author.id = user_id # Связь с user_id из системы авторизации
author.slug = slug # Идентификатор из системы авторизации
author.created_at = author.updated_at = int(time.time())
author.name = name or slug # если не указано
# Use setattr to avoid MyPy complaints about Column assignment
author.id = kwargs.get("user_id") # type: ignore[assignment] # Связь с user_id из системы авторизации # type: ignore[assignment]
author.slug = kwargs.get("slug") # type: ignore[assignment] # Идентификатор из системы авторизации # type: ignore[assignment]
author.created_at = int(time.time()) # type: ignore[assignment]
author.updated_at = int(time.time()) # type: ignore[assignment]
author.name = kwargs.get("name") or kwargs.get("slug") # type: ignore[assignment] # если не указано # type: ignore[assignment]
with local_session() as session:
session.add(author)
@@ -547,13 +531,14 @@ def create_author(user_id: str, slug: str, name: str = ""):
@query.field("get_author_followers")
async def get_author_followers(_, info, slug: str = "", user: str = "", author_id: int = 0):
async def get_author_followers(_: None, info: GraphQLResolveInfo, **kwargs: Any) -> list[Any]:
"""Get followers of an author"""
# Получаем ID текущего пользователя и флаг админа из контекста
viewer_id = info.context.get("author", {}).get("id")
is_admin = info.context.get("is_admin", False)
logger.debug(f"getting followers for author @{slug} or ID:{author_id}")
author_id = get_author_id_from(slug=slug, user=user, author_id=author_id)
logger.debug(f"getting followers for author @{kwargs.get('slug')} or ID:{kwargs.get('author_id')}")
author_id = get_author_id_from(slug=kwargs.get("slug"), user=kwargs.get("user"), author_id=kwargs.get("author_id"))
if not author_id:
return []
@@ -573,6 +558,6 @@ async def get_author_followers(_, info, slug: str = "", user: str = "", author_i
# current_user_id - ID текущего авторизованного пользователя (может быть None)
# is_admin - булево значение, является ли текущий пользователь админом
has_access = is_admin or (viewer_id is not None and str(viewer_id) == str(temp_author.id))
followers.append(temp_author.dict(access=has_access))
followers.append(temp_author.dict(has_access))
return followers

View File

@@ -5,8 +5,7 @@ from sqlalchemy import delete, insert
from auth.orm import AuthorBookmark
from orm.shout import Shout
from resolvers.feed import apply_options
from resolvers.reader import get_shouts_with_links, query_with_stat
from resolvers.reader import apply_options, get_shouts_with_links, query_with_stat
from services.auth import login_required
from services.common_result import CommonResult
from services.db import local_session
@@ -15,7 +14,7 @@ from services.schema import mutation, query
@query.field("load_shouts_bookmarked")
@login_required
def load_shouts_bookmarked(_, info, options):
def load_shouts_bookmarked(_: None, info, options):
"""
Load bookmarked shouts for the authenticated user.
@@ -29,7 +28,8 @@ def load_shouts_bookmarked(_, info, options):
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
raise GraphQLError("User not authenticated")
msg = "User not authenticated"
raise GraphQLError(msg)
q = query_with_stat(info)
q = q.join(AuthorBookmark)
@@ -44,7 +44,7 @@ def load_shouts_bookmarked(_, info, options):
@mutation.field("toggle_bookmark_shout")
def toggle_bookmark_shout(_, info, slug: str) -> CommonResult:
def toggle_bookmark_shout(_: None, info, slug: str) -> CommonResult:
"""
Toggle bookmark status for a specific shout.
@@ -57,12 +57,14 @@ def toggle_bookmark_shout(_, info, slug: str) -> CommonResult:
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
raise GraphQLError("User not authenticated")
msg = "User not authenticated"
raise GraphQLError(msg)
with local_session() as db:
shout = db.query(Shout).filter(Shout.slug == slug).first()
if not shout:
raise GraphQLError("Shout not found")
msg = "Shout not found"
raise GraphQLError(msg)
existing_bookmark = (
db.query(AuthorBookmark)
@@ -74,10 +76,10 @@ def toggle_bookmark_shout(_, info, slug: str) -> CommonResult:
db.execute(
delete(AuthorBookmark).where(AuthorBookmark.author == author_id, AuthorBookmark.shout == shout.id)
)
result = False
result = CommonResult()
else:
db.execute(insert(AuthorBookmark).values(author=author_id, shout=shout.id))
result = True
result = CommonResult()
db.commit()
return result

View File

@@ -8,7 +8,7 @@ from services.schema import mutation
@mutation.field("accept_invite")
@login_required
async def accept_invite(_, info, invite_id: int):
async def accept_invite(_: None, info, invite_id: int):
author_dict = info.context["author"]
author_id = author_dict.get("id")
if author_id:
@@ -29,17 +29,15 @@ async def accept_invite(_, info, invite_id: int):
session.delete(invite)
session.commit()
return {"success": True, "message": "Invite accepted"}
else:
return {"error": "Shout not found"}
else:
return {"error": "Invalid invite or already accepted/rejected"}
return {"error": "Shout not found"}
return {"error": "Invalid invite or already accepted/rejected"}
else:
return {"error": "Unauthorized"}
@mutation.field("reject_invite")
@login_required
async def reject_invite(_, info, invite_id: int):
async def reject_invite(_: None, info, invite_id: int):
author_dict = info.context["author"]
author_id = author_dict.get("id")
@@ -54,14 +52,13 @@ async def reject_invite(_, info, invite_id: int):
session.delete(invite)
session.commit()
return {"success": True, "message": "Invite rejected"}
else:
return {"error": "Invalid invite or already accepted/rejected"}
return {"error": "Invalid invite or already accepted/rejected"}
return {"error": "User not found"}
@mutation.field("create_invite")
@login_required
async def create_invite(_, info, slug: str = "", author_id: int = 0):
async def create_invite(_: None, info, slug: str = "", author_id: int = 0):
author_dict = info.context["author"]
viewer_id = author_dict.get("id")
roles = info.context.get("roles", [])
@@ -99,15 +96,14 @@ async def create_invite(_, info, slug: str = "", author_id: int = 0):
session.commit()
return {"error": None, "invite": new_invite}
else:
return {"error": "Invalid author"}
return {"error": "Invalid author"}
else:
return {"error": "Access denied"}
@mutation.field("remove_author")
@login_required
async def remove_author(_, info, slug: str = "", author_id: int = 0):
async def remove_author(_: None, info, slug: str = "", author_id: int = 0):
viewer_id = info.context.get("author", {}).get("id")
is_admin = info.context.get("is_admin", False)
roles = info.context.get("roles", [])
@@ -127,7 +123,7 @@ async def remove_author(_, info, slug: str = "", author_id: int = 0):
@mutation.field("remove_invite")
@login_required
async def remove_invite(_, info, invite_id: int):
async def remove_invite(_: None, info, invite_id: int):
author_dict = info.context["author"]
author_id = author_dict.get("id")
if isinstance(author_id, int):
@@ -144,7 +140,9 @@ async def remove_invite(_, info, invite_id: int):
session.delete(invite)
session.commit()
return {}
else:
return {"error": "Invalid invite or already accepted/rejected"}
return None
return None
return None
return {"error": "Invalid invite or already accepted/rejected"}
else:
return {"error": "Author not found"}

View File

@@ -1,3 +1,7 @@
from typing import Any
from graphql import GraphQLResolveInfo
from auth.orm import Author
from orm.community import Community, CommunityFollower
from services.db import local_session
@@ -5,18 +9,20 @@ from services.schema import mutation, query
@query.field("get_communities_all")
async def get_communities_all(_, _info):
async def get_communities_all(_: None, _info: GraphQLResolveInfo) -> list[Community]:
return local_session().query(Community).all()
@query.field("get_community")
async def get_community(_, _info, slug: str):
async def get_community(_: None, _info: GraphQLResolveInfo, slug: str) -> Community | None:
q = local_session().query(Community).where(Community.slug == slug)
return q.first()
@query.field("get_communities_by_author")
async def get_communities_by_author(_, _info, slug="", user="", author_id=0):
async def get_communities_by_author(
_: None, _info: GraphQLResolveInfo, slug: str = "", user: str = "", author_id: int = 0
) -> list[Community]:
with local_session() as session:
q = session.query(Community).join(CommunityFollower)
if slug:
@@ -32,20 +38,20 @@ async def get_communities_by_author(_, _info, slug="", user="", author_id=0):
@mutation.field("join_community")
async def join_community(_, info, slug: str):
async def join_community(_: None, info: GraphQLResolveInfo, slug: str) -> dict[str, Any]:
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:
community = session.query(Community).where(Community.slug == slug).first()
if not community:
return {"ok": False, "error": "Community not found"}
session.add(CommunityFollower(community=community.id, author=author_id))
session.add(CommunityFollower(community=community.id, follower=author_id))
session.commit()
return {"ok": True}
@mutation.field("leave_community")
async def leave_community(_, info, slug: str):
async def leave_community(_: None, info: GraphQLResolveInfo, slug: str) -> dict[str, Any]:
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:
@@ -57,7 +63,7 @@ async def leave_community(_, info, slug: str):
@mutation.field("create_community")
async def create_community(_, info, community_data):
async def create_community(_: None, info: GraphQLResolveInfo, community_data: dict[str, Any]) -> dict[str, Any]:
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:
@@ -67,7 +73,7 @@ async def create_community(_, info, community_data):
@mutation.field("update_community")
async def update_community(_, info, community_data):
async def update_community(_: None, info: GraphQLResolveInfo, community_data: dict[str, Any]) -> dict[str, Any]:
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
slug = community_data.get("slug")
@@ -85,7 +91,7 @@ async def update_community(_, info, community_data):
@mutation.field("delete_community")
async def delete_community(_, info, slug: str):
async def delete_community(_: None, info: GraphQLResolveInfo, slug: str) -> dict[str, Any]:
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
with local_session() as session:

View File

@@ -1,6 +1,8 @@
import time
from typing import Any
from sqlalchemy.orm import joinedload
from graphql import GraphQLResolveInfo
from sqlalchemy.orm import Session, joinedload
from auth.orm import Author
from cache.cache import (
@@ -18,7 +20,7 @@ from utils.extract_text import extract_text
from utils.logger import root_logger as logger
def create_shout_from_draft(session, draft, author_id):
def create_shout_from_draft(session: Session | None, draft: Draft, author_id: int) -> Shout:
"""
Создаёт новый объект публикации (Shout) на основе черновика.
@@ -69,11 +71,11 @@ def create_shout_from_draft(session, draft, author_id):
@query.field("load_drafts")
@login_required
async def load_drafts(_, info):
async def load_drafts(_: None, info: GraphQLResolveInfo) -> dict[str, Any]:
"""
Загружает все черновики, доступные текущему пользователю.
Предварительно загружает связанные объекты (topics, authors, publication),
Предварительно загружает связанные объекты (topics, authors),
чтобы избежать ошибок с отсоединенными объектами при сериализации.
Returns:
@@ -87,13 +89,12 @@ async def load_drafts(_, info):
try:
with local_session() as session:
# Предзагружаем authors, topics и связанную publication
# Предзагружаем authors и topics
drafts_query = (
session.query(Draft)
.options(
joinedload(Draft.topics),
joinedload(Draft.authors),
joinedload(Draft.publication), # Загружаем связанную публикацию
)
.filter(Draft.authors.any(Author.id == author_id))
)
@@ -106,28 +107,17 @@ async def load_drafts(_, info):
# Всегда возвращаем массив для topics, даже если он пустой
draft_dict["topics"] = [topic.dict() for topic in (draft.topics or [])]
draft_dict["authors"] = [author.dict() for author in (draft.authors or [])]
# Добавляем информацию о публикации, если она есть
if draft.publication:
draft_dict["publication"] = {
"id": draft.publication.id,
"slug": draft.publication.slug,
"published_at": draft.publication.published_at,
}
else:
draft_dict["publication"] = None
drafts_data.append(draft_dict)
return {"drafts": drafts_data}
except Exception as e:
logger.error(f"Failed to load drafts: {e}", exc_info=True)
return {"error": f"Failed to load drafts: {str(e)}"}
return {"error": f"Failed to load drafts: {e!s}"}
@mutation.field("create_draft")
@login_required
async def create_draft(_, info, draft_input):
async def create_draft(_: None, info: GraphQLResolveInfo, draft_input: dict[str, Any]) -> dict[str, Any]:
"""Create a new draft.
Args:
@@ -155,7 +145,7 @@ async def create_draft(_, info, draft_input):
author_dict = info.context.get("author") or {}
author_id = author_dict.get("id")
if not author_id:
if not author_id or not isinstance(author_id, int):
return {"error": "Author ID is required"}
# Проверяем обязательные поля
@@ -173,8 +163,7 @@ async def create_draft(_, info, draft_input):
try:
with local_session() as session:
# Remove id from input if present since it's auto-generated
if "id" in draft_input:
del draft_input["id"]
draft_input.pop("id", None)
# Добавляем текущее время создания и ID автора
draft_input["created_at"] = int(time.time())
@@ -191,18 +180,17 @@ async def create_draft(_, info, draft_input):
return {"draft": draft}
except Exception as e:
logger.error(f"Failed to create draft: {e}", exc_info=True)
return {"error": f"Failed to create draft: {str(e)}"}
return {"error": f"Failed to create draft: {e!s}"}
def generate_teaser(body, limit=300):
def generate_teaser(body: str, limit: int = 300) -> str:
body_text = extract_text(body)
body_teaser = ". ".join(body_text[:limit].split(". ")[:-1])
return body_teaser
return ". ".join(body_text[:limit].split(". ")[:-1])
@mutation.field("update_draft")
@login_required
async def update_draft(_, info, draft_id: int, draft_input):
async def update_draft(_: None, info: GraphQLResolveInfo, draft_id: int, draft_input: dict[str, Any]) -> dict[str, Any]:
"""Обновляет черновик публикации.
Args:
@@ -229,8 +217,8 @@ async def update_draft(_, info, draft_id: int, draft_input):
author_dict = info.context.get("author") or {}
author_id = author_dict.get("id")
if not author_id:
return {"error": "Author ID are required"}
if not author_id or not isinstance(author_id, int):
return {"error": "Author ID is required"}
try:
with local_session() as session:
@@ -306,8 +294,8 @@ async def update_draft(_, info, draft_id: int, draft_input):
setattr(draft, key, value)
# Обновляем метаданные
draft.updated_at = int(time.time())
draft.updated_by = author_id
draft.updated_at = int(time.time()) # type: ignore[assignment]
draft.updated_by = author_id # type: ignore[assignment]
session.commit()
@@ -322,12 +310,12 @@ async def update_draft(_, info, draft_id: int, draft_input):
except Exception as e:
logger.error(f"Failed to update draft: {e}", exc_info=True)
return {"error": f"Failed to update draft: {str(e)}"}
return {"error": f"Failed to update draft: {e!s}"}
@mutation.field("delete_draft")
@login_required
async def delete_draft(_, info, draft_id: int):
async def delete_draft(_: None, info: GraphQLResolveInfo, draft_id: int) -> dict[str, Any]:
author_dict = info.context.get("author") or {}
author_id = author_dict.get("id")
@@ -372,12 +360,12 @@ def validate_html_content(html_content: str) -> tuple[bool, str]:
return bool(extracted), extracted or ""
except Exception as e:
logger.error(f"HTML validation error: {e}", exc_info=True)
return False, f"Invalid HTML content: {str(e)}"
return False, f"Invalid HTML content: {e!s}"
@mutation.field("publish_draft")
@login_required
async def publish_draft(_, info, draft_id: int):
async def publish_draft(_: None, info: GraphQLResolveInfo, draft_id: int) -> dict[str, Any]:
"""
Публикует черновик, создавая новый Shout или обновляя существующий.
@@ -390,7 +378,7 @@ async def publish_draft(_, info, draft_id: int):
author_dict = info.context.get("author") or {}
author_id = author_dict.get("id")
if not author_id:
if not author_id or not isinstance(author_id, int):
return {"error": "Author ID is required"}
try:
@@ -407,7 +395,8 @@ async def publish_draft(_, info, draft_id: int):
return {"error": "Draft not found"}
# Проверка валидности HTML в body
is_valid, error = validate_html_content(draft.body)
draft_body = str(draft.body) if draft.body else ""
is_valid, error = validate_html_content(draft_body)
if not is_valid:
return {"error": f"Cannot publish draft: {error}"}
@@ -415,19 +404,24 @@ async def publish_draft(_, info, draft_id: int):
if draft.publication:
shout = draft.publication
# Обновляем существующую публикацию
for field in [
"body",
"title",
"subtitle",
"lead",
"cover",
"cover_caption",
"media",
"lang",
"seo",
]:
if hasattr(draft, field):
setattr(shout, field, getattr(draft, field))
if hasattr(draft, "body"):
shout.body = draft.body
if hasattr(draft, "title"):
shout.title = draft.title
if hasattr(draft, "subtitle"):
shout.subtitle = draft.subtitle
if hasattr(draft, "lead"):
shout.lead = draft.lead
if hasattr(draft, "cover"):
shout.cover = draft.cover
if hasattr(draft, "cover_caption"):
shout.cover_caption = draft.cover_caption
if hasattr(draft, "media"):
shout.media = draft.media
if hasattr(draft, "lang"):
shout.lang = draft.lang
if hasattr(draft, "seo"):
shout.seo = draft.seo
shout.updated_at = int(time.time())
shout.updated_by = author_id
else:
@@ -466,7 +460,7 @@ async def publish_draft(_, info, draft_id: int):
await notify_shout(shout.id)
# Обновляем поисковый индекс
search_service.perform_index(shout)
await search_service.perform_index(shout)
logger.info(f"Successfully published shout #{shout.id} from draft #{draft_id}")
logger.debug(f"Shout data: {shout.dict()}")
@@ -475,12 +469,12 @@ async def publish_draft(_, info, draft_id: int):
except Exception as e:
logger.error(f"Failed to publish draft {draft_id}: {e}", exc_info=True)
return {"error": f"Failed to publish draft: {str(e)}"}
return {"error": f"Failed to publish draft: {e!s}"}
@mutation.field("unpublish_draft")
@login_required
async def unpublish_draft(_, info, draft_id: int):
async def unpublish_draft(_: None, info: GraphQLResolveInfo, draft_id: int) -> dict[str, Any]:
"""
Снимает с публикации черновик, обновляя связанный Shout.
@@ -493,7 +487,7 @@ async def unpublish_draft(_, info, draft_id: int):
author_dict = info.context.get("author") or {}
author_id = author_dict.get("id")
if author_id:
if not author_id or not isinstance(author_id, int):
return {"error": "Author ID is required"}
try:
@@ -538,4 +532,4 @@ async def unpublish_draft(_, info, draft_id: int):
except Exception as e:
logger.error(f"Failed to unpublish draft {draft_id}: {e}", exc_info=True)
return {"error": f"Failed to unpublish draft: {str(e)}"}
return {"error": f"Failed to unpublish draft: {e!s}"}

View File

@@ -1,8 +1,10 @@
import time
from typing import Any
import orjson
from graphql import GraphQLResolveInfo
from sqlalchemy import and_, desc, select
from sqlalchemy.orm import joinedload, selectinload
from sqlalchemy.orm import joinedload
from sqlalchemy.sql.functions import coalesce
from auth.orm import Author
@@ -12,12 +14,12 @@ from cache.cache import (
invalidate_shout_related_cache,
invalidate_shouts_cache,
)
from orm.draft import Draft
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from resolvers.follower import follow, unfollow
from resolvers.follower import follow
from resolvers.stat import get_with_stat
from services.auth import login_required
from services.common_result import CommonResult
from services.db import local_session
from services.notify import notify_shout
from services.schema import mutation, query
@@ -48,7 +50,7 @@ async def cache_by_id(entity, entity_id: int, cache_method):
result = get_with_stat(caching_query)
if not result or not result[0]:
logger.warning(f"{entity.__name__} with id {entity_id} not found")
return
return None
x = result[0]
d = x.dict() # convert object to dictionary
cache_method(d)
@@ -57,7 +59,7 @@ async def cache_by_id(entity, entity_id: int, cache_method):
@query.field("get_my_shout")
@login_required
async def get_my_shout(_, info, shout_id: int):
async def get_my_shout(_: None, info, shout_id: int):
"""Get a shout by ID if the requesting user has permission to view it.
DEPRECATED: use `load_drafts` instead
@@ -111,17 +113,17 @@ async def get_my_shout(_, info, shout_id: int):
except Exception as e:
logger.error(f"Error parsing shout media: {e}")
shout.media = []
if not isinstance(shout.media, list):
shout.media = [shout.media] if shout.media else []
elif isinstance(shout.media, list):
shout.media = shout.media or []
else:
shout.media = []
shout.media = [] # type: ignore[assignment]
logger.debug(f"got {len(shout.authors)} shout authors, created by {shout.created_by}")
is_editor = "editor" in roles
logger.debug(f"viewer is{'' if is_editor else ' not'} editor")
is_creator = author_id == shout.created_by
logger.debug(f"viewer is{'' if is_creator else ' not'} creator")
is_author = bool(list(filter(lambda x: x.id == int(author_id), [x for x in shout.authors])))
is_author = bool(list(filter(lambda x: x.id == int(author_id), list(shout.authors))))
logger.debug(f"viewer is{'' if is_creator else ' not'} author")
can_edit = is_editor or is_author or is_creator
@@ -134,10 +136,10 @@ async def get_my_shout(_, info, shout_id: int):
@query.field("get_shouts_drafts")
@login_required
async def get_shouts_drafts(_, info):
async def get_shouts_drafts(_: None, info: GraphQLResolveInfo) -> list[dict]:
author_dict = info.context.get("author") or {}
if not author_dict:
return {"error": "author profile was not found"}
return [] # Return empty list instead of error dict
author_id = author_dict.get("id")
shouts = []
with local_session() as session:
@@ -150,13 +152,13 @@ async def get_shouts_drafts(_, info):
.order_by(desc(coalesce(Shout.updated_at, Shout.created_at)))
.group_by(Shout.id)
)
shouts = [shout for [shout] in session.execute(q).unique()]
return {"shouts": shouts}
shouts = [shout.dict() for [shout] in session.execute(q).unique()]
return shouts
# @mutation.field("create_shout")
# @login_required
async def create_shout(_, info, inp):
async def create_shout(_: None, info: GraphQLResolveInfo, inp: dict) -> dict:
logger.info(f"Starting create_shout with input: {inp}")
author_dict = info.context.get("author") or {}
logger.debug(f"Context author: {author_dict}")
@@ -179,7 +181,8 @@ async def create_shout(_, info, inp):
lead = inp.get("lead", "")
body_text = extract_text(body)
lead_text = extract_text(lead)
seo = inp.get("seo", lead_text.strip() or body_text.strip()[:300].split(". ")[:-1].join(". "))
seo_parts = lead_text.strip() or body_text.strip()[:300].split(". ")[:-1]
seo = inp.get("seo", ". ".join(seo_parts))
new_shout = Shout(
slug=slug,
body=body,
@@ -198,7 +201,7 @@ async def create_shout(_, info, inp):
c = 1
while same_slug_shout is not None:
logger.debug(f"Found duplicate slug, trying iteration {c}")
new_shout.slug = f"{slug}-{c}"
new_shout.slug = f"{slug}-{c}" # type: ignore[assignment]
same_slug_shout = session.query(Shout).filter(Shout.slug == new_shout.slug).first()
c += 1
@@ -209,7 +212,7 @@ async def create_shout(_, info, inp):
logger.info(f"Created shout with ID: {new_shout.id}")
except Exception as e:
logger.error(f"Error creating shout object: {e}", exc_info=True)
return {"error": f"Database error: {str(e)}"}
return {"error": f"Database error: {e!s}"}
# Связываем с автором
try:
@@ -218,7 +221,7 @@ async def create_shout(_, info, inp):
session.add(sa)
except Exception as e:
logger.error(f"Error linking author: {e}", exc_info=True)
return {"error": f"Error linking author: {str(e)}"}
return {"error": f"Error linking author: {e!s}"}
# Связываем с темами
@@ -237,38 +240,39 @@ async def create_shout(_, info, inp):
logger.debug(f"Added topic {topic.slug} {'(main)' if st.main else ''}")
except Exception as e:
logger.error(f"Error linking topics: {e}", exc_info=True)
return {"error": f"Error linking topics: {str(e)}"}
return {"error": f"Error linking topics: {e!s}"}
try:
session.commit()
logger.info("Final commit successful")
except Exception as e:
logger.error(f"Error in final commit: {e}", exc_info=True)
return {"error": f"Error in final commit: {str(e)}"}
return {"error": f"Error in final commit: {e!s}"}
# Получаем созданную публикацию
shout = session.query(Shout).filter(Shout.id == new_shout.id).first()
# Подписываем автора
try:
logger.debug("Following created shout")
await follow(None, info, "shout", shout.slug)
except Exception as e:
logger.warning(f"Error following shout: {e}", exc_info=True)
if shout:
# Подписываем автора
try:
logger.debug("Following created shout")
await follow(None, info, "shout", shout.slug)
except Exception as e:
logger.warning(f"Error following shout: {e}", exc_info=True)
logger.info(f"Successfully created shout {shout.id}")
return {"shout": shout}
logger.info(f"Successfully created shout {shout.id}")
return {"shout": shout}
except Exception as e:
logger.error(f"Unexpected error in create_shout: {e}", exc_info=True)
return {"error": f"Unexpected error: {str(e)}"}
return {"error": f"Unexpected error: {e!s}"}
error_msg = "cant create shout" if author_id else "unauthorized"
logger.error(f"Create shout failed: {error_msg}")
return {"error": error_msg}
def patch_main_topic(session, main_topic_slug, shout):
def patch_main_topic(session: Any, main_topic_slug: str, shout: Any) -> None:
"""Update the main topic for a shout."""
logger.info(f"Starting patch_main_topic for shout#{shout.id} with slug '{main_topic_slug}'")
logger.debug(f"Current shout topics: {[(t.topic.slug, t.main) for t in shout.topics]}")
@@ -301,10 +305,10 @@ def patch_main_topic(session, main_topic_slug, shout):
if old_main and new_main and old_main is not new_main:
logger.info(f"Updating main topic flags: {old_main.topic.slug} -> {new_main.topic.slug}")
old_main.main = False
old_main.main = False # type: ignore[assignment]
session.add(old_main)
new_main.main = True
new_main.main = True # type: ignore[assignment]
session.add(new_main)
session.flush()
@@ -313,7 +317,7 @@ def patch_main_topic(session, main_topic_slug, shout):
logger.warning(f"No changes needed for main topic (old={old_main is not None}, new={new_main is not None})")
def patch_topics(session, shout, topics_input):
def patch_topics(session: Any, shout: Any, topics_input: list[Any]) -> None:
"""Update the topics associated with a shout.
Args:
@@ -384,12 +388,17 @@ def patch_topics(session, shout, topics_input):
# @mutation.field("update_shout")
# @login_required
async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
author_dict = info.context.get("author") or {}
async def update_shout(
_: None, info: GraphQLResolveInfo, shout_id: int, shout_input: dict | None = None, *, publish: bool = False
) -> CommonResult:
"""Update an existing shout with optional publishing"""
logger.info(f"update_shout called with shout_id={shout_id}, publish={publish}")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
logger.error("Unauthorized update attempt")
return {"error": "unauthorized"}
return CommonResult(error="unauthorized", shout=None)
logger.info(f"Starting update_shout with id={shout_id}, publish={publish}")
logger.debug(f"Full shout_input: {shout_input}") # DraftInput
@@ -412,7 +421,7 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
if not shout_by_id:
logger.error(f"shout#{shout_id} not found")
return {"error": "shout not found"}
return CommonResult(error="shout not found", shout=None)
logger.info(f"Found shout#{shout_id}")
@@ -429,12 +438,12 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
c = 1
while same_slug_shout is not None:
c += 1
slug = f"{slug}-{c}"
same_slug_shout.slug = f"{slug}-{c}" # type: ignore[assignment]
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
shout_input["slug"] = slug
logger.info(f"shout#{shout_id} slug patched")
if filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors]) or "editor" in roles:
if filter(lambda x: x.id == author_id, list(shout_by_id.authors)) or "editor" in roles:
logger.info(f"Author #{author_id} has permission to edit shout#{shout_id}")
# topics patch
@@ -450,7 +459,7 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
except Exception as e:
logger.error(f"Error patching topics: {e}", exc_info=True)
return {"error": f"Failed to update topics: {str(e)}"}
return CommonResult(error=f"Failed to update topics: {e!s}", shout=None)
del shout_input["topics"]
for tpc in topics_input:
@@ -464,10 +473,10 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
logger.info(f"Updating main topic for shout#{shout_id} to {main_topic}")
patch_main_topic(session, main_topic, shout_by_id)
shout_input["updated_at"] = current_time
shout_by_id.updated_at = current_time # type: ignore[assignment]
if publish:
logger.info(f"Publishing shout#{shout_id}")
shout_input["published_at"] = current_time
shout_by_id.published_at = current_time # type: ignore[assignment]
# Проверяем наличие связи с автором
logger.info(f"Checking author link for shout#{shout_id} and author#{author_id}")
author_link = (
@@ -497,7 +506,7 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
logger.info(f"Successfully committed updates for shout#{shout_id}")
except Exception as e:
logger.error(f"Commit failed: {e}", exc_info=True)
return {"error": f"Failed to save changes: {str(e)}"}
return CommonResult(error=f"Failed to save changes: {e!s}", shout=None)
# После обновления проверяем топики
updated_topics = (
@@ -545,93 +554,56 @@ async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
for a in shout_by_id.authors:
await cache_by_id(Author, a.id, cache_author)
logger.info(f"shout#{shout_id} updated")
# Получаем полные данные шаута со связями
shout_with_relations = (
session.query(Shout)
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
.filter(Shout.id == shout_id)
.first()
)
# Создаем словарь с базовыми полями
shout_dict = shout_with_relations.dict()
# Return success with the updated shout
return CommonResult(error=None, shout=shout_by_id)
# Явно добавляем связанные данные
shout_dict["topics"] = (
[
{"id": topic.id, "slug": topic.slug, "title": topic.title}
for topic in shout_with_relations.topics
]
if shout_with_relations.topics
else []
)
logger.warning(f"Access denied: author #{author_id} cannot edit shout#{shout_id}")
return CommonResult(error="access denied", shout=None)
# Add main_topic to the shout dictionary
shout_dict["main_topic"] = get_main_topic(shout_with_relations.topics)
shout_dict["authors"] = (
[
{"id": author.id, "name": author.name, "slug": author.slug}
for author in shout_with_relations.authors
]
if shout_with_relations.authors
else []
)
logger.info(f"Final shout data with relations: {shout_dict}")
logger.debug(
f"Loaded topics details: {[(t.topic.slug if t.topic else 'no-topic', t.main) for t in shout_with_relations.topics]}"
)
return {"shout": shout_dict, "error": None}
else:
logger.warning(f"Access denied: author #{author_id} cannot edit shout#{shout_id}")
return {"error": "access denied", "shout": None}
except Exception as exc:
logger.error(f"Unexpected error in update_shout: {exc}", exc_info=True)
logger.error(f"Failed input data: {shout_input}")
return {"error": "cant update shout"}
return {"error": "cant update shout"}
return CommonResult(error="cant update shout", shout=None)
except Exception as e:
logger.error(f"Exception in update_shout: {e}", exc_info=True)
return CommonResult(error="cant update shout", shout=None)
# @mutation.field("delete_shout")
# @login_required
async def delete_shout(_, info, shout_id: int):
author_dict = info.context.get("author") or {}
async def delete_shout(_: None, info: GraphQLResolveInfo, shout_id: int) -> CommonResult:
"""Delete a shout (mark as deleted)"""
author_dict = info.context.get("author", {})
if not author_dict:
return {"error": "author profile was not found"}
return CommonResult(error="author profile was not found", shout=None)
author_id = author_dict.get("id")
roles = info.context.get("roles", [])
if author_id:
author_id = int(author_id)
with local_session() as session:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if not isinstance(shout, Shout):
return {"error": "invalid shout id"}
shout_dict = shout.dict()
# NOTE: only owner and editor can mark the shout as deleted
if shout_dict["created_by"] == author_id or "editor" in roles:
shout_dict["deleted_at"] = int(time.time())
Shout.update(shout, shout_dict)
session.add(shout)
session.commit()
for author in shout.authors:
await cache_by_id(Author, author.id, cache_author)
info.context["author"] = author.dict()
unfollow(None, info, "shout", shout.slug)
with local_session() as session:
if author_id:
if shout_id:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if shout:
# Check if user has permission to delete
if any(x.id == author_id for x in shout.authors) or "editor" in roles:
# Use setattr to avoid MyPy complaints about Column assignment
shout.deleted_at = int(time.time()) # type: ignore[assignment]
session.add(shout)
session.commit()
for topic in shout.topics:
await cache_by_id(Topic, topic.id, cache_topic)
# Get shout data for notification
shout_dict = shout.dict()
await notify_shout(shout_dict, "delete")
return {"error": None}
else:
return {"error": "access denied"}
# Invalidate cache
await invalidate_shout_related_cache(shout, author_id)
# Notify about deletion
await notify_shout(shout_dict, "delete")
return CommonResult(error=None, shout=shout)
return CommonResult(error="access denied", shout=None)
return CommonResult(error="shout not found", shout=None)
def get_main_topic(topics):
def get_main_topic(topics: list[Any]) -> dict[str, Any]:
"""Get the main topic from a list of ShoutTopic objects."""
logger.info(f"Starting get_main_topic with {len(topics) if topics else 0} topics")
logger.debug(f"Topics data: {[(t.slug, getattr(t, 'main', False)) for t in topics] if topics else []}")
@@ -662,25 +634,22 @@ def get_main_topic(topics):
# If no main found but topics exist, return first
if topics and topics[0].topic:
logger.info(f"No main topic found, using first topic: {topics[0].topic.slug}")
result = {
return {
"slug": topics[0].topic.slug,
"title": topics[0].topic.title,
"id": topics[0].topic.id,
"is_main": True,
}
return result
else:
# Для Topic объектов (новый формат из selectinload)
# После смены на selectinload у нас просто список Topic объектов
if topics:
logger.info(f"Using first topic as main: {topics[0].slug}")
result = {
"slug": topics[0].slug,
"title": topics[0].title,
"id": topics[0].id,
"is_main": True,
}
return result
# Для Topic объектов (новый формат из selectinload)
# После смены на selectinload у нас просто список Topic объектов
elif topics:
logger.info(f"Using first topic as main: {topics[0].slug}")
return {
"slug": topics[0].slug,
"title": topics[0].title,
"id": topics[0].id,
"is_main": True,
}
logger.warning("No valid topics found, returning default")
return {"slug": "notopic", "title": "no topic", "id": 0, "is_main": True}
@@ -688,112 +657,58 @@ def get_main_topic(topics):
@mutation.field("unpublish_shout")
@login_required
async def unpublish_shout(_, info, shout_id: int):
"""Снимает публикацию (shout) с публикации.
Предзагружает связанный черновик (draft) и его авторов/темы, чтобы избежать
ошибок при последующем доступе к ним в GraphQL.
Args:
shout_id: ID публикации для снятия с публикации
Returns:
dict: Снятая с публикации публикация или сообщение об ошибке
async def unpublish_shout(_: None, info: GraphQLResolveInfo, shout_id: int) -> CommonResult:
"""
Unpublish a shout by setting published_at to NULL
"""
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
# В идеале нужна проверка прав, имеет ли автор право снимать публикацию
return {"error": "Author ID is required"}
roles = info.context.get("roles", [])
shout = None
with local_session() as session:
try:
# Загружаем Shout со всеми связями для правильного формирования ответа
shout = (
session.query(Shout)
.options(joinedload(Shout.authors), selectinload(Shout.topics))
.filter(Shout.id == shout_id)
.first()
)
if not author_id:
return CommonResult(error="Author ID is required", shout=None)
try:
with local_session() as session:
# Получаем шаут с авторами
shout = session.query(Shout).options(joinedload(Shout.authors)).filter(Shout.id == shout_id).first()
if not shout:
logger.warning(f"Shout not found for unpublish: ID {shout_id}")
return {"error": "Shout not found"}
return CommonResult(error="Shout not found", shout=None)
# Если у публикации есть связанный черновик, загружаем его с relationships
if shout.draft is not None:
# Отдельно загружаем черновик с его связями
draft = (
session.query(Draft)
.options(selectinload(Draft.authors), selectinload(Draft.topics))
.filter(Draft.id == shout.draft)
.first()
)
# Проверяем права доступа
can_edit = any(author.id == author_id for author in shout.authors) or "editor" in roles
# Связываем черновик с публикацией вручную для доступа через API
if draft:
shout.draft_obj = draft
if can_edit:
shout.published_at = None # type: ignore[assignment]
shout.updated_at = int(time.time()) # type: ignore[assignment]
session.add(shout)
session.commit()
# TODO: Добавить проверку прав доступа, если необходимо
# if author_id not in [a.id for a in shout.authors]: # Требует selectinload(Shout.authors) выше
# logger.warning(f"Author {author_id} denied unpublishing shout {shout_id}")
# return {"error": "Access denied"}
# Запоминаем старый slug и id для формирования поля publication
shout_slug = shout.slug
shout_id_for_publication = shout.id
# Снимаем с публикации (устанавливаем published_at в None)
shout.published_at = None
session.commit()
# Формируем полноценный словарь для ответа
shout_dict = shout.dict()
# Добавляем связанные данные
shout_dict["topics"] = (
[{"id": topic.id, "slug": topic.slug, "title": topic.title} for topic in shout.topics]
if shout.topics
else []
)
# Добавляем main_topic
shout_dict["main_topic"] = get_main_topic(shout.topics)
# Добавляем авторов
shout_dict["authors"] = (
[{"id": author.id, "name": author.name, "slug": author.slug} for author in shout.authors]
if shout.authors
else []
)
# Важно! Обновляем поле publication, отражая состояние "снят с публикации"
shout_dict["publication"] = {
"id": shout_id_for_publication,
"slug": shout_slug,
"published_at": None, # Ключевое изменение - устанавливаем published_at в None
}
# Инвалидация кэша
try:
# Инвалидация кэша
cache_keys = [
"feed", # лента
f"author_{author_id}", # публикации автора
"random_top", # случайные топовые
"unrated", # неоцененные
"feed",
f"author_{author_id}",
"random_top",
"unrated",
]
await invalidate_shout_related_cache(shout, author_id)
# Добавляем ключи для тем публикации
for topic in shout.topics:
cache_keys.append(f"topic_{topic.id}")
cache_keys.append(f"topic_shouts_{topic.id}")
await invalidate_shouts_cache(cache_keys)
logger.info(f"Cache invalidated after unpublishing shout {shout_id}")
except Exception as cache_err:
logger.error(f"Failed to invalidate cache for unpublish shout {shout_id}: {cache_err}")
await invalidate_shout_related_cache(shout, author_id)
except Exception as e:
session.rollback()
logger.error(f"Failed to unpublish shout {shout_id}: {e}", exc_info=True)
return {"error": f"Failed to unpublish shout: {str(e)}"}
# Получаем обновленные данные шаута
session.refresh(shout)
shout_dict = shout.dict()
# Возвращаем сформированный словарь вместо объекта
logger.info(f"Shout {shout_id} unpublished successfully by author {author_id}")
return {"shout": shout_dict}
logger.info(f"Shout {shout_id} unpublished successfully")
return CommonResult(error=None, shout=shout)
return CommonResult(error="Access denied", shout=None)
except Exception as e:
logger.error(f"Error unpublishing shout {shout_id}: {e}", exc_info=True)
return CommonResult(error=f"Failed to unpublish shout: {e!s}", shout=None)

View File

@@ -1,5 +1,4 @@
from typing import List
from graphql import GraphQLResolveInfo
from sqlalchemy import and_, select
from auth.orm import Author, AuthorFollower
@@ -19,7 +18,7 @@ from utils.logger import root_logger as logger
@query.field("load_shouts_coauthored")
@login_required
async def load_shouts_coauthored(_, info, options):
async def load_shouts_coauthored(_: None, info: GraphQLResolveInfo, options: dict) -> list[Shout]:
"""
Загрузка публикаций, написанных в соавторстве с пользователем.
@@ -38,7 +37,7 @@ async def load_shouts_coauthored(_, info, options):
@query.field("load_shouts_discussed")
@login_required
async def load_shouts_discussed(_, info, options):
async def load_shouts_discussed(_: None, info: GraphQLResolveInfo, options: dict) -> list[Shout]:
"""
Загрузка публикаций, которые обсуждались пользователем.
@@ -55,7 +54,7 @@ async def load_shouts_discussed(_, info, options):
return get_shouts_with_links(info, q, limit, offset=offset)
def shouts_by_follower(info, follower_id: int, options):
def shouts_by_follower(info: GraphQLResolveInfo, follower_id: int, options: dict) -> list[Shout]:
"""
Загружает публикации, на которые подписан автор.
@@ -85,12 +84,11 @@ def shouts_by_follower(info, follower_id: int, options):
)
q = q.filter(Shout.id.in_(followed_subquery))
q, limit, offset = apply_options(q, options)
shouts = get_shouts_with_links(info, q, limit, offset=offset)
return shouts
return get_shouts_with_links(info, q, limit, offset=offset)
@query.field("load_shouts_followed_by")
async def load_shouts_followed_by(_, info, slug: str, options) -> List[Shout]:
async def load_shouts_followed_by(_: None, info: GraphQLResolveInfo, slug: str, options: dict) -> list[Shout]:
"""
Загружает публикации, на которые подписан автор по slug.
@@ -103,14 +101,13 @@ async def load_shouts_followed_by(_, info, slug: str, options) -> List[Shout]:
author = session.query(Author).filter(Author.slug == slug).first()
if author:
follower_id = author.dict()["id"]
shouts = shouts_by_follower(info, follower_id, options)
return shouts
return shouts_by_follower(info, follower_id, options)
return []
@query.field("load_shouts_feed")
@login_required
async def load_shouts_feed(_, info, options) -> List[Shout]:
async def load_shouts_feed(_: None, info: GraphQLResolveInfo, options: dict) -> list[Shout]:
"""
Загружает публикации, на которые подписан авторизованный пользователь.
@@ -123,7 +120,7 @@ async def load_shouts_feed(_, info, options) -> List[Shout]:
@query.field("load_shouts_authored_by")
async def load_shouts_authored_by(_, info, slug: str, options) -> List[Shout]:
async def load_shouts_authored_by(_: None, info: GraphQLResolveInfo, slug: str, options: dict) -> list[Shout]:
"""
Загружает публикации, написанные автором по slug.
@@ -144,15 +141,14 @@ async def load_shouts_authored_by(_, info, slug: str, options) -> List[Shout]:
)
q = q.filter(Shout.authors.any(id=author_id))
q, limit, offset = apply_options(q, options, author_id)
shouts = get_shouts_with_links(info, q, limit, offset=offset)
return shouts
return get_shouts_with_links(info, q, limit, offset=offset)
except Exception as error:
logger.debug(error)
return []
@query.field("load_shouts_with_topic")
async def load_shouts_with_topic(_, info, slug: str, options) -> List[Shout]:
async def load_shouts_with_topic(_: None, info: GraphQLResolveInfo, slug: str, options: dict) -> list[Shout]:
"""
Загружает публикации, связанные с темой по slug.
@@ -173,26 +169,7 @@ async def load_shouts_with_topic(_, info, slug: str, options) -> List[Shout]:
)
q = q.filter(Shout.topics.any(id=topic_id))
q, limit, offset = apply_options(q, options)
shouts = get_shouts_with_links(info, q, limit, offset=offset)
return shouts
return get_shouts_with_links(info, q, limit, offset=offset)
except Exception as error:
logger.debug(error)
return []
def apply_filters(q, filters):
"""
Применяет фильтры к запросу
"""
logger.info(f"Applying filters: {filters}")
if filters.get("published"):
q = q.filter(Shout.published_at.is_not(None))
logger.info("Added published filter")
if filters.get("topic"):
topic_slug = filters["topic"]
q = q.join(ShoutTopic).join(Topic).filter(Topic.slug == topic_slug)
logger.info(f"Added topic filter: {topic_slug}")
return q

View File

@@ -1,6 +1,6 @@
from typing import List
from __future__ import annotations
from graphql import GraphQLError
from graphql import GraphQLResolveInfo
from sqlalchemy import select
from sqlalchemy.sql import and_
@@ -12,7 +12,6 @@ from cache.cache import (
get_cached_follower_topics,
)
from orm.community import Community, CommunityFollower
from orm.reaction import Reaction
from orm.shout import Shout, ShoutReactionsFollower
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat
@@ -26,16 +25,14 @@ from utils.logger import root_logger as logger
@mutation.field("follow")
@login_required
async def follow(_, info, what, slug="", entity_id=0):
async def follow(_: None, info: GraphQLResolveInfo, what: str, slug: str = "", entity_id: int = 0) -> dict:
logger.debug("Начало выполнения функции 'follow'")
viewer_id = info.context.get("author", {}).get("id")
if not viewer_id:
return {"error": "Access denied"}
follower_dict = info.context.get("author") or {}
logger.debug(f"follower: {follower_dict}")
if not viewer_id or not follower_dict:
return GraphQLError("Access denied")
return {"error": "Access denied"}
follower_id = follower_dict.get("id")
logger.debug(f"follower_id: {follower_id}")
@@ -70,11 +67,7 @@ async def follow(_, info, what, slug="", entity_id=0):
entity_id = entity.id
# Если это автор, учитываем фильтрацию данных
if what == "AUTHOR":
# Полная версия для кэширования
entity_dict = entity.dict(access=True)
else:
entity_dict = entity.dict()
entity_dict = entity.dict(True) if what == "AUTHOR" else entity.dict()
logger.debug(f"entity_id: {entity_id}, entity_dict: {entity_dict}")
@@ -84,8 +77,8 @@ async def follow(_, info, what, slug="", entity_id=0):
existing_sub = (
session.query(follower_class)
.filter(
follower_class.follower == follower_id,
getattr(follower_class, entity_type) == entity_id,
follower_class.follower == follower_id, # type: ignore[attr-defined]
getattr(follower_class, entity_type) == entity_id, # type: ignore[attr-defined]
)
.first()
)
@@ -111,10 +104,11 @@ async def follow(_, info, what, slug="", entity_id=0):
if what == "AUTHOR" and not existing_sub:
logger.debug("Отправка уведомления автору о подписке")
await notify_follower(follower=follower_dict, author_id=entity_id, action="follow")
if isinstance(follower_dict, dict) and isinstance(entity_id, int):
await notify_follower(follower=follower_dict, author_id=entity_id, action="follow")
# Всегда получаем актуальный список подписок для возврата клиенту
if get_cached_follows_method:
if get_cached_follows_method and isinstance(follower_id, int):
logger.debug("Получение актуального списка подписок из кэша")
existing_follows = await get_cached_follows_method(follower_id)
@@ -129,7 +123,7 @@ async def follow(_, info, what, slug="", entity_id=0):
if hasattr(temp_author, key):
setattr(temp_author, key, value)
# Добавляем отфильтрованную версию
follows_filtered.append(temp_author.dict(access=False))
follows_filtered.append(temp_author.dict(False))
follows = follows_filtered
else:
@@ -147,17 +141,17 @@ async def follow(_, info, what, slug="", entity_id=0):
@mutation.field("unfollow")
@login_required
async def unfollow(_, info, what, slug="", entity_id=0):
async def unfollow(_: None, info: GraphQLResolveInfo, what: str, slug: str = "", entity_id: int = 0) -> dict:
logger.debug("Начало выполнения функции 'unfollow'")
viewer_id = info.context.get("author", {}).get("id")
if not viewer_id:
return GraphQLError("Access denied")
return {"error": "Access denied"}
follower_dict = info.context.get("author") or {}
logger.debug(f"follower: {follower_dict}")
if not viewer_id or not follower_dict:
logger.warning("Неавторизованный доступ при попытке отписаться")
return GraphQLError("Unauthorized")
return {"error": "Unauthorized"}
follower_id = follower_dict.get("id")
logger.debug(f"follower_id: {follower_id}")
@@ -187,15 +181,15 @@ async def unfollow(_, info, what, slug="", entity_id=0):
logger.warning(f"{what.lower()} не найден по slug: {slug}")
return {"error": f"{what.lower()} not found"}
if entity and not entity_id:
entity_id = entity.id
entity_id = int(entity.id) # Convert Column to int
logger.debug(f"entity_id: {entity_id}")
sub = (
session.query(follower_class)
.filter(
and_(
getattr(follower_class, "follower") == follower_id,
getattr(follower_class, entity_type) == entity_id,
follower_class.follower == follower_id, # type: ignore[attr-defined]
getattr(follower_class, entity_type) == entity_id, # type: ignore[attr-defined]
)
)
.first()
@@ -215,20 +209,21 @@ async def unfollow(_, info, what, slug="", entity_id=0):
logger.debug("Обновление кэша после отписки")
# Если это автор, кэшируем полную версию
if what == "AUTHOR":
await cache_method(entity.dict(access=True))
await cache_method(entity.dict(True))
else:
await cache_method(entity.dict())
if what == "AUTHOR":
logger.debug("Отправка уведомления автору об отписке")
await notify_follower(follower=follower_dict, author_id=entity_id, action="unfollow")
if isinstance(follower_dict, dict) and isinstance(entity_id, int):
await notify_follower(follower=follower_dict, author_id=entity_id, action="unfollow")
else:
# Подписка не найдена, но это не критическая ошибка
logger.warning(f"Подписка не найдена: follower_id={follower_id}, {entity_type}_id={entity_id}")
error = "following was not found"
# Всегда получаем актуальный список подписок для возврата клиенту
if get_cached_follows_method:
if get_cached_follows_method and isinstance(follower_id, int):
logger.debug("Получение актуального списка подписок из кэша")
existing_follows = await get_cached_follows_method(follower_id)
@@ -243,7 +238,7 @@ async def unfollow(_, info, what, slug="", entity_id=0):
if hasattr(temp_author, key):
setattr(temp_author, key, value)
# Добавляем отфильтрованную версию
follows_filtered.append(temp_author.dict(access=False))
follows_filtered.append(temp_author.dict(False))
follows = follows_filtered
else:
@@ -263,7 +258,7 @@ async def unfollow(_, info, what, slug="", entity_id=0):
@query.field("get_shout_followers")
def get_shout_followers(_, _info, slug: str = "", shout_id: int | None = None) -> List[Author]:
def get_shout_followers(_: None, _info: GraphQLResolveInfo, slug: str = "", shout_id: int | None = None) -> list[dict]:
logger.debug("Начало выполнения функции 'get_shout_followers'")
followers = []
try:
@@ -277,11 +272,20 @@ def get_shout_followers(_, _info, slug: str = "", shout_id: int | None = None) -
logger.debug(f"Найден shout по ID: {shout_id} -> {shout}")
if shout:
reactions = session.query(Reaction).filter(Reaction.shout == shout.id).all()
logger.debug(f"Полученные реакции для shout ID {shout.id}: {reactions}")
for r in reactions:
followers.append(r.created_by)
logger.debug(f"Добавлен follower: {r.created_by}")
shout_id = int(shout.id) # Convert Column to int
logger.debug(f"shout_id для получения подписчиков: {shout_id}")
# Получение подписчиков из таблицы ShoutReactionsFollower
shout_followers = (
session.query(Author)
.join(ShoutReactionsFollower, Author.id == ShoutReactionsFollower.follower)
.filter(ShoutReactionsFollower.shout == shout_id)
.all()
)
# Convert Author objects to dicts
followers = [author.dict() for author in shout_followers]
logger.debug(f"Найдено {len(followers)} подписчиков для shout {shout_id}")
except Exception as _exc:
import traceback

View File

@@ -1,7 +1,8 @@
import time
from typing import List, Tuple
from typing import Any
import orjson
from graphql import GraphQLResolveInfo
from sqlalchemy import and_, select
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import aliased
@@ -21,7 +22,7 @@ from services.schema import mutation, query
from utils.logger import root_logger as logger
def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[Tuple[Notification, bool]]]:
def query_notifications(author_id: int, after: int = 0) -> tuple[int, int, list[tuple[Notification, bool]]]:
notification_seen_alias = aliased(NotificationSeen)
q = select(Notification, notification_seen_alias.viewer.label("seen")).outerjoin(
NotificationSeen,
@@ -66,7 +67,14 @@ def query_notifications(author_id: int, after: int = 0) -> Tuple[int, int, List[
return total, unread, notifications
def group_notification(thread, authors=None, shout=None, reactions=None, entity="follower", action="follow"):
def group_notification(
thread: str,
authors: list[Any] | None = None,
shout: Any | None = None,
reactions: list[Any] | None = None,
entity: str = "follower",
action: str = "follow",
) -> dict:
reactions = reactions or []
authors = authors or []
return {
@@ -80,7 +88,7 @@ def group_notification(thread, authors=None, shout=None, reactions=None, entity=
}
def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, offset: int = 0):
def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, offset: int = 0) -> list[dict]:
"""
Retrieves notifications for a given author.
@@ -111,7 +119,7 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
groups_by_thread = {}
groups_amount = 0
for notification, seen in notifications:
for notification, _seen in notifications:
if (groups_amount + offset) >= limit:
break
@@ -126,12 +134,12 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
author = session.query(Author).filter(Author.id == author_id).first()
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if author and shout:
author = author.dict()
shout = shout.dict()
author_dict = author.dict()
shout_dict = shout.dict()
group = group_notification(
thread_id,
shout=shout,
authors=[author],
shout=shout_dict,
authors=[author_dict],
action=str(notification.action),
entity=str(notification.entity),
)
@@ -141,7 +149,8 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
elif str(notification.entity) == NotificationEntity.REACTION.value:
reaction = payload
if not isinstance(reaction, dict):
raise ValueError("reaction data is not consistent")
msg = "reaction data is not consistent"
raise ValueError(msg)
shout_id = reaction.get("shout")
author_id = reaction.get("created_by", 0)
if shout_id and author_id:
@@ -149,8 +158,8 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
author = session.query(Author).filter(Author.id == author_id).first()
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if shout and author:
author = author.dict()
shout = shout.dict()
author_dict = author.dict()
shout_dict = shout.dict()
reply_id = reaction.get("reply_to")
thread_id = f"shout-{shout_id}"
if reply_id and reaction.get("kind", "").lower() == "comment":
@@ -165,8 +174,8 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
else:
group = group_notification(
thread_id,
authors=[author],
shout=shout,
authors=[author_dict],
shout=shout_dict,
reactions=[reaction],
entity=str(notification.entity),
action=str(notification.action),
@@ -178,15 +187,15 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
elif str(notification.entity) == "follower":
thread_id = "followers"
follower = orjson.loads(payload)
group = groups_by_thread.get(thread_id)
if group:
existing_group = groups_by_thread.get(thread_id)
if existing_group:
if str(notification.action) == "follow":
group["authors"].append(follower)
existing_group["authors"].append(follower)
elif str(notification.action) == "unfollow":
follower_id = follower.get("id")
for author in group["authors"]:
if author.get("id") == follower_id:
group["authors"].remove(author)
for author in existing_group["authors"]:
if isinstance(author, dict) and author.get("id") == follower_id:
existing_group["authors"].remove(author)
break
else:
group = group_notification(
@@ -196,13 +205,14 @@ def get_notifications_grouped(author_id: int, after: int = 0, limit: int = 10, o
action=str(notification.action),
)
groups_amount += 1
groups_by_thread[thread_id] = group
return groups_by_thread, unread, total
existing_group = group
groups_by_thread[thread_id] = existing_group
return list(groups_by_thread.values())
@query.field("load_notifications")
@login_required
async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
async def load_notifications(_: None, info: GraphQLResolveInfo, after: int, limit: int = 50, offset: int = 0) -> dict:
author_dict = info.context.get("author") or {}
author_id = author_dict.get("id")
error = None
@@ -211,10 +221,10 @@ async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
notifications = []
try:
if author_id:
groups, unread, total = get_notifications_grouped(author_id, after, limit)
notifications = sorted(groups.values(), key=lambda group: group.updated_at, reverse=True)
groups_list = get_notifications_grouped(author_id, after, limit)
notifications = sorted(groups_list, key=lambda group: group.get("updated_at", 0), reverse=True)
except Exception as e:
error = e
error = str(e)
logger.error(e)
return {
"notifications": notifications,
@@ -226,7 +236,7 @@ async def load_notifications(_, info, after: int, limit: int = 50, offset=0):
@mutation.field("notification_mark_seen")
@login_required
async def notification_mark_seen(_, info, notification_id: int):
async def notification_mark_seen(_: None, info: GraphQLResolveInfo, notification_id: int) -> dict:
author_id = info.context.get("author", {}).get("id")
if author_id:
with local_session() as session:
@@ -243,7 +253,7 @@ async def notification_mark_seen(_, info, notification_id: int):
@mutation.field("notifications_seen_after")
@login_required
async def notifications_seen_after(_, info, after: int):
async def notifications_seen_after(_: None, info: GraphQLResolveInfo, after: int) -> dict:
# TODO: use latest loaded notification_id as input offset parameter
error = None
try:
@@ -251,13 +261,10 @@ async def notifications_seen_after(_, info, after: int):
if author_id:
with local_session() as session:
nnn = session.query(Notification).filter(and_(Notification.created_at > after)).all()
for n in nnn:
try:
ns = NotificationSeen(notification=n.id, viewer=author_id)
session.add(ns)
session.commit()
except SQLAlchemyError:
session.rollback()
for notification in nnn:
ns = NotificationSeen(notification=notification.id, author=author_id)
session.add(ns)
session.commit()
except Exception as e:
print(e)
error = "cant mark as read"
@@ -266,7 +273,7 @@ async def notifications_seen_after(_, info, after: int):
@mutation.field("notifications_seen_thread")
@login_required
async def notifications_seen_thread(_, info, thread: str, after: int):
async def notifications_seen_thread(_: None, info: GraphQLResolveInfo, thread: str, after: int) -> dict:
error = None
author_id = info.context.get("author", {}).get("id")
if author_id:

View File

@@ -7,7 +7,7 @@ from services.db import local_session
from utils.diff import apply_diff, get_diff
def handle_proposing(kind: ReactionKind, reply_to: int, shout_id: int):
def handle_proposing(kind: ReactionKind, reply_to: int, shout_id: int) -> None:
with local_session() as session:
if is_positive(kind):
replied_reaction = (
@@ -29,20 +29,31 @@ def handle_proposing(kind: ReactionKind, reply_to: int, shout_id: int):
# patch shout's body
shout = session.query(Shout).filter(Shout.id == shout_id).first()
body = replied_reaction.quote
Shout.update(shout, {body})
session.add(shout)
session.commit()
if shout:
body = replied_reaction.quote
# Use setattr instead of Shout.update for Column assignment
shout.body = body
session.add(shout)
session.commit()
# реакция содержит цитату -> обновляются все предложения
# (proposals) для соответствующего Shout.
for proposal in proposals:
if proposal.quote:
proposal_diff = get_diff(shout.body, proposal.quote)
proposal_dict = proposal.dict()
proposal_dict["quote"] = apply_diff(replied_reaction.quote, proposal_diff)
Reaction.update(proposal, proposal_dict)
session.add(proposal)
# реакция содержит цитату -> обновляются все предложения
# (proposals) для соответствующего Shout.
for proposal in proposals:
if proposal.quote:
# Convert Column to string for get_diff
shout_body = str(shout.body) if shout.body else ""
proposal_dict = proposal.dict() if hasattr(proposal, "dict") else {"quote": proposal.quote}
proposal_diff = get_diff(shout_body, proposal_dict["quote"])
replied_reaction_dict = (
replied_reaction.dict()
if hasattr(replied_reaction, "dict")
else {"quote": replied_reaction.quote}
)
proposal_dict["quote"] = apply_diff(replied_reaction_dict["quote"], proposal_diff)
# Update proposal quote
proposal.quote = proposal_dict["quote"] # type: ignore[assignment]
session.add(proposal)
if is_negative(kind):
# TODO: rejection logic

View File

@@ -1,9 +1,12 @@
from typing import Any
from graphql import GraphQLResolveInfo
from sqlalchemy import and_, case, func, select, true
from sqlalchemy.orm import aliased
from sqlalchemy.orm import Session, aliased
from auth.orm import Author, AuthorRating
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout
from orm.shout import Shout, ShoutAuthor
from services.auth import login_required
from services.db import local_session
from services.schema import mutation, query
@@ -12,7 +15,7 @@ from utils.logger import root_logger as logger
@query.field("get_my_rates_comments")
@login_required
async def get_my_rates_comments(_, info, comments: list[int]) -> list[dict]:
async def get_my_rates_comments(_: None, info: GraphQLResolveInfo, comments: list[int]) -> list[dict]:
"""
Получение реакций пользователя на комментарии
@@ -47,12 +50,13 @@ async def get_my_rates_comments(_, info, comments: list[int]) -> list[dict]:
)
with local_session() as session:
comments_result = session.execute(rated_query).all()
return [{"comment_id": row.comment_id, "my_rate": row.my_rate} for row in comments_result]
# For each row, we need to extract the Reaction object and its attributes
return [{"comment_id": reaction.id, "my_rate": reaction.kind} for (reaction,) in comments_result]
@query.field("get_my_rates_shouts")
@login_required
async def get_my_rates_shouts(_, info, shouts):
async def get_my_rates_shouts(_: None, info: GraphQLResolveInfo, shouts: list[int]) -> list[dict]:
"""
Получение реакций пользователя на публикации
"""
@@ -83,10 +87,10 @@ async def get_my_rates_shouts(_, info, shouts):
return [
{
"shout_id": row[0].shout, # Получаем shout_id из объекта Reaction
"my_rate": row[0].kind, # Получаем kind (my_rate) из объекта Reaction
"shout_id": reaction.shout, # Получаем shout_id из объекта Reaction
"my_rate": reaction.kind, # Получаем kind (my_rate) из объекта Reaction
}
for row in result
for (reaction,) in result
]
except Exception as e:
logger.error(f"Error in get_my_rates_shouts: {e}")
@@ -95,13 +99,13 @@ async def get_my_rates_shouts(_, info, shouts):
@mutation.field("rate_author")
@login_required
async def rate_author(_, info, rated_slug, value):
async def rate_author(_: None, info: GraphQLResolveInfo, rated_slug: str, value: int) -> dict:
rater_id = info.context.get("author", {}).get("id")
with local_session() as session:
rater_id = int(rater_id)
rated_author = session.query(Author).filter(Author.slug == rated_slug).first()
if rater_id and rated_author:
rating: AuthorRating = (
rating = (
session.query(AuthorRating)
.filter(
and_(
@@ -112,21 +116,20 @@ async def rate_author(_, info, rated_slug, value):
.first()
)
if rating:
rating.plus = value > 0
rating.plus = value > 0 # type: ignore[assignment]
session.add(rating)
session.commit()
return {}
else:
try:
rating = AuthorRating(rater=rater_id, author=rated_author.id, plus=value > 0)
session.add(rating)
session.commit()
except Exception as err:
return {"error": err}
try:
rating = AuthorRating(rater=rater_id, author=rated_author.id, plus=value > 0)
session.add(rating)
session.commit()
except Exception as err:
return {"error": err}
return {}
def count_author_comments_rating(session, author_id) -> int:
def count_author_comments_rating(session: Session, author_id: int) -> int:
replied_alias = aliased(Reaction)
replies_likes = (
session.query(replied_alias)
@@ -156,7 +159,37 @@ def count_author_comments_rating(session, author_id) -> int:
return replies_likes - replies_dislikes
def count_author_shouts_rating(session, author_id) -> int:
def count_author_replies_rating(session: Session, author_id: int) -> int:
replied_alias = aliased(Reaction)
replies_likes = (
session.query(replied_alias)
.join(Reaction, replied_alias.id == Reaction.reply_to)
.where(
and_(
replied_alias.created_by == author_id,
replied_alias.kind == ReactionKind.COMMENT.value,
)
)
.filter(replied_alias.kind == ReactionKind.LIKE.value)
.count()
) or 0
replies_dislikes = (
session.query(replied_alias)
.join(Reaction, replied_alias.id == Reaction.reply_to)
.where(
and_(
replied_alias.created_by == author_id,
replied_alias.kind == ReactionKind.COMMENT.value,
)
)
.filter(replied_alias.kind == ReactionKind.DISLIKE.value)
.count()
) or 0
return replies_likes - replies_dislikes
def count_author_shouts_rating(session: Session, author_id: int) -> int:
shouts_likes = (
session.query(Reaction, Shout)
.join(Shout, Shout.id == Reaction.shout)
@@ -184,79 +217,72 @@ def count_author_shouts_rating(session, author_id) -> int:
return shouts_likes - shouts_dislikes
def get_author_rating_old(session, author: Author):
def get_author_rating_old(session: Session, author: Author) -> dict[str, int]:
likes_count = (
session.query(AuthorRating).filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True))).count()
)
dislikes_count = (
session.query(AuthorRating)
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_not(True)))
.count()
session.query(AuthorRating).filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(False))).count()
)
return likes_count - dislikes_count
rating = likes_count - dislikes_count
return {"rating": rating, "likes": likes_count, "dislikes": dislikes_count}
def get_author_rating_shouts(session, author: Author) -> int:
def get_author_rating_shouts(session: Session, author: Author) -> int:
q = (
select(
func.coalesce(
func.sum(
case(
(Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
),
0,
).label("shouts_rating")
Reaction.shout,
Reaction.plus,
)
.select_from(Reaction)
.outerjoin(Shout, Shout.authors.any(id=author.id))
.outerjoin(
Reaction,
.join(ShoutAuthor, Reaction.shout == ShoutAuthor.shout)
.where(
and_(
Reaction.reply_to.is_(None),
Reaction.shout == Shout.id,
ShoutAuthor.author == author.id,
Reaction.kind == "RATING",
Reaction.deleted_at.is_(None),
),
)
)
)
result = session.execute(q).scalar()
return result
results = session.execute(q)
rating = 0
for row in results:
rating += 1 if row[1] else -1
return rating
def get_author_rating_comments(session, author: Author) -> int:
def get_author_rating_comments(session: Session, author: Author) -> int:
replied_comment = aliased(Reaction)
q = (
select(
func.coalesce(
func.sum(
case(
(Reaction.kind == ReactionKind.LIKE.value, 1),
(Reaction.kind == ReactionKind.DISLIKE.value, -1),
else_=0,
)
),
0,
).label("shouts_rating")
Reaction.id,
Reaction.plus,
)
.select_from(Reaction)
.outerjoin(
Reaction,
.outerjoin(replied_comment, Reaction.reply_to == replied_comment.id)
.join(Shout, Reaction.shout == Shout.id)
.join(ShoutAuthor, Shout.id == ShoutAuthor.shout)
.where(
and_(
replied_comment.kind == ReactionKind.COMMENT.value,
replied_comment.created_by == author.id,
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
Reaction.reply_to == replied_comment.id,
ShoutAuthor.author == author.id,
Reaction.kind == "RATING",
Reaction.created_by != author.id,
Reaction.deleted_at.is_(None),
),
)
)
)
result = session.execute(q).scalar()
return result
results = session.execute(q)
rating = 0
for row in results:
rating += 1 if row[1] else -1
return rating
def add_author_rating_columns(q, group_list):
def add_author_rating_columns(q: Any, group_list: list[Any]) -> Any:
# NOTE: method is not used
# old karma

View File

@@ -1,7 +1,11 @@
import contextlib
import time
from typing import Any
from graphql import GraphQLResolveInfo
from sqlalchemy import and_, asc, case, desc, func, select
from sqlalchemy.orm import aliased
from sqlalchemy.orm import Session, aliased
from sqlalchemy.sql import ColumnElement
from auth.orm import Author
from orm.rating import PROPOSAL_REACTIONS, RATING_REACTIONS, is_negative, is_positive
@@ -17,7 +21,7 @@ from services.schema import mutation, query
from utils.logger import root_logger as logger
def query_reactions():
def query_reactions() -> select:
"""
Base query for fetching reactions with associated authors and shouts.
@@ -35,7 +39,7 @@ def query_reactions():
)
def add_reaction_stat_columns(q):
def add_reaction_stat_columns(q: select) -> select:
"""
Add statistical columns to a reaction query.
@@ -44,7 +48,7 @@ def add_reaction_stat_columns(q):
"""
aliased_reaction = aliased(Reaction)
# Join reactions and add statistical columns
q = q.outerjoin(
return q.outerjoin(
aliased_reaction,
and_(
aliased_reaction.reply_to == Reaction.id,
@@ -64,10 +68,9 @@ def add_reaction_stat_columns(q):
)
).label("rating_stat"),
)
return q
def get_reactions_with_stat(q, limit=10, offset=0):
def get_reactions_with_stat(q: select, limit: int = 10, offset: int = 0) -> list[dict]:
"""
Execute the reaction query and retrieve reactions with statistics.
@@ -102,7 +105,7 @@ def get_reactions_with_stat(q, limit=10, offset=0):
return reactions
def is_featured_author(session, author_id) -> bool:
def is_featured_author(session: Session, author_id: int) -> bool:
"""
Check if an author has at least one non-deleted featured article.
@@ -118,7 +121,7 @@ def is_featured_author(session, author_id) -> bool:
).scalar()
def check_to_feature(session, approver_id, reaction) -> bool:
def check_to_feature(session: Session, approver_id: int, reaction: dict) -> bool:
"""
Make a shout featured if it receives more than 4 votes from authors.
@@ -127,7 +130,7 @@ def check_to_feature(session, approver_id, reaction) -> bool:
:param reaction: Reaction object.
:return: True if shout should be featured, else False.
"""
if not reaction.reply_to and is_positive(reaction.kind):
if not reaction.get("reply_to") and is_positive(reaction.get("kind")):
# Проверяем, не содержит ли пост более 20% дизлайков
# Если да, то не должен быть featured независимо от количества лайков
if check_to_unfeature(session, reaction):
@@ -138,7 +141,7 @@ def check_to_feature(session, approver_id, reaction) -> bool:
reacted_readers = (
session.query(Reaction.created_by)
.filter(
Reaction.shout == reaction.shout,
Reaction.shout == reaction.get("shout"),
is_positive(Reaction.kind),
# Рейтинги (LIKE, DISLIKE) физически удаляются, поэтому фильтр deleted_at не нужен
)
@@ -157,12 +160,12 @@ def check_to_feature(session, approver_id, reaction) -> bool:
author_approvers.add(reader_id)
# Публикация становится featured при наличии более 4 лайков от авторов
logger.debug(f"Публикация {reaction.shout} имеет {len(author_approvers)} лайков от авторов")
logger.debug(f"Публикация {reaction.get('shout')} имеет {len(author_approvers)} лайков от авторов")
return len(author_approvers) > 4
return False
def check_to_unfeature(session, reaction) -> bool:
def check_to_unfeature(session: Session, reaction: dict) -> bool:
"""
Unfeature a shout if 20% of reactions are negative.
@@ -170,12 +173,12 @@ def check_to_unfeature(session, reaction) -> bool:
:param reaction: Reaction object.
:return: True if shout should be unfeatured, else False.
"""
if not reaction.reply_to:
if not reaction.get("reply_to"):
# Проверяем соотношение дизлайков, даже если текущая реакция не дизлайк
total_reactions = (
session.query(Reaction)
.filter(
Reaction.shout == reaction.shout,
Reaction.shout == reaction.get("shout"),
Reaction.reply_to.is_(None),
Reaction.kind.in_(RATING_REACTIONS),
# Рейтинги физически удаляются при удалении, поэтому фильтр deleted_at не нужен
@@ -186,7 +189,7 @@ def check_to_unfeature(session, reaction) -> bool:
negative_reactions = (
session.query(Reaction)
.filter(
Reaction.shout == reaction.shout,
Reaction.shout == reaction.get("shout"),
is_negative(Reaction.kind),
Reaction.reply_to.is_(None),
# Рейтинги физически удаляются при удалении, поэтому фильтр deleted_at не нужен
@@ -197,13 +200,13 @@ def check_to_unfeature(session, reaction) -> bool:
# Проверяем, составляют ли отрицательные реакции 20% или более от всех реакций
negative_ratio = negative_reactions / total_reactions if total_reactions > 0 else 0
logger.debug(
f"Публикация {reaction.shout}: {negative_reactions}/{total_reactions} отрицательных реакций ({negative_ratio:.2%})"
f"Публикация {reaction.get('shout')}: {negative_reactions}/{total_reactions} отрицательных реакций ({negative_ratio:.2%})"
)
return total_reactions > 0 and negative_ratio >= 0.2
return False
async def set_featured(session, shout_id):
async def set_featured(session: Session, shout_id: int) -> None:
"""
Feature a shout and update the author's role.
@@ -213,7 +216,8 @@ async def set_featured(session, shout_id):
s = session.query(Shout).filter(Shout.id == shout_id).first()
if s:
current_time = int(time.time())
s.featured_at = current_time
# Use setattr to avoid MyPy complaints about Column assignment
s.featured_at = current_time # type: ignore[assignment]
session.commit()
author = session.query(Author).filter(Author.id == s.created_by).first()
if author:
@@ -222,7 +226,7 @@ async def set_featured(session, shout_id):
session.commit()
def set_unfeatured(session, shout_id):
def set_unfeatured(session: Session, shout_id: int) -> None:
"""
Unfeature a shout.
@@ -233,7 +237,7 @@ def set_unfeatured(session, shout_id):
session.commit()
async def _create_reaction(session, shout_id: int, is_author: bool, author_id: int, reaction) -> dict:
async def _create_reaction(session: Session, shout_id: int, is_author: bool, author_id: int, reaction: dict) -> dict:
"""
Create a new reaction and perform related actions such as updating counters and notification.
@@ -255,26 +259,28 @@ async def _create_reaction(session, shout_id: int, is_author: bool, author_id: i
# Handle proposal
if r.reply_to and r.kind in PROPOSAL_REACTIONS and is_author:
handle_proposing(r.kind, r.reply_to, shout_id)
reply_to = int(r.reply_to)
if reply_to:
handle_proposing(ReactionKind(r.kind), reply_to, shout_id)
# Handle rating
if r.kind in RATING_REACTIONS:
# Проверяем сначала условие для unfeature (дизлайки имеют приоритет)
if check_to_unfeature(session, r):
if check_to_unfeature(session, rdict):
set_unfeatured(session, shout_id)
logger.info(f"Публикация {shout_id} потеряла статус featured из-за высокого процента дизлайков")
# Только если не было unfeature, проверяем условие для feature
elif check_to_feature(session, author_id, r):
elif check_to_feature(session, author_id, rdict):
await set_featured(session, shout_id)
logger.info(f"Публикация {shout_id} получила статус featured благодаря лайкам от авторов")
# Notify creation
await notify_reaction(rdict, "create")
await notify_reaction(r, "create")
return rdict
def prepare_new_rating(reaction: dict, shout_id: int, session, author_id: int):
def prepare_new_rating(reaction: dict, shout_id: int, session: Session, author_id: int) -> dict[str, Any] | None:
"""
Check for the possibility of rating a shout.
@@ -306,12 +312,12 @@ def prepare_new_rating(reaction: dict, shout_id: int, session, author_id: int):
if shout_id in [r.shout for r in existing_ratings]:
return {"error": "You can't rate your own thing"}
return
return None
@mutation.field("create_reaction")
@login_required
async def create_reaction(_, info, reaction):
async def create_reaction(_: None, info: GraphQLResolveInfo, reaction: dict) -> dict:
"""
Create a new reaction through a GraphQL request.
@@ -355,10 +361,8 @@ async def create_reaction(_, info, reaction):
# follow if liked
if kind == ReactionKind.LIKE.value:
try:
with contextlib.suppress(Exception):
follow(None, info, "shout", shout_id=shout_id)
except Exception:
pass
shout = session.query(Shout).filter(Shout.id == shout_id).first()
if not shout:
return {"error": "Shout not found"}
@@ -375,7 +379,7 @@ async def create_reaction(_, info, reaction):
@mutation.field("update_reaction")
@login_required
async def update_reaction(_, info, reaction):
async def update_reaction(_: None, info: GraphQLResolveInfo, reaction: dict) -> dict:
"""
Update an existing reaction through a GraphQL request.
@@ -419,9 +423,10 @@ async def update_reaction(_, info, reaction):
"rating": rating_stat,
}
await notify_reaction(r.dict(), "update")
await notify_reaction(r, "update")
return {"reaction": r}
return {"reaction": r.dict()}
return {"error": "Reaction not found"}
except Exception as e:
logger.error(f"{type(e).__name__}: {e}")
return {"error": "Cannot update reaction"}
@@ -429,7 +434,7 @@ async def update_reaction(_, info, reaction):
@mutation.field("delete_reaction")
@login_required
async def delete_reaction(_, info, reaction_id: int):
async def delete_reaction(_: None, info: GraphQLResolveInfo, reaction_id: int) -> dict:
"""
Delete an existing reaction through a GraphQL request.
@@ -477,7 +482,7 @@ async def delete_reaction(_, info, reaction_id: int):
return {"error": "Cannot delete reaction"}
def apply_reaction_filters(by, q):
def apply_reaction_filters(by: dict, q: select) -> select:
"""
Apply filters to a reaction query.
@@ -528,7 +533,9 @@ def apply_reaction_filters(by, q):
@query.field("load_reactions_by")
async def load_reactions_by(_, _info, by, limit=50, offset=0):
async def load_reactions_by(
_: None, _info: GraphQLResolveInfo, by: dict, limit: int = 50, offset: int = 0
) -> list[dict]:
"""
Load reactions based on specified parameters.
@@ -550,7 +557,7 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
# Group and sort
q = q.group_by(Reaction.id, Author.id, Shout.id)
order_stat = by.get("sort", "").lower()
order_by_stmt = desc(Reaction.created_at)
order_by_stmt: ColumnElement = desc(Reaction.created_at)
if order_stat == "oldest":
order_by_stmt = asc(Reaction.created_at)
elif order_stat.endswith("like"):
@@ -562,7 +569,9 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
@query.field("load_shout_ratings")
async def load_shout_ratings(_, info, shout: int, limit=100, offset=0):
async def load_shout_ratings(
_: None, info: GraphQLResolveInfo, shout: int, limit: int = 100, offset: int = 0
) -> list[dict[str, Any]]:
"""
Load ratings for a specified shout with pagination.
@@ -590,7 +599,9 @@ async def load_shout_ratings(_, info, shout: int, limit=100, offset=0):
@query.field("load_shout_comments")
async def load_shout_comments(_, info, shout: int, limit=50, offset=0):
async def load_shout_comments(
_: None, info: GraphQLResolveInfo, shout: int, limit: int = 50, offset: int = 0
) -> list[dict[str, Any]]:
"""
Load comments for a specified shout with pagination and statistics.
@@ -620,7 +631,9 @@ async def load_shout_comments(_, info, shout: int, limit=50, offset=0):
@query.field("load_comment_ratings")
async def load_comment_ratings(_, info, comment: int, limit=50, offset=0):
async def load_comment_ratings(
_: None, info: GraphQLResolveInfo, comment: int, limit: int = 50, offset: int = 0
) -> list[dict[str, Any]]:
"""
Load ratings for a specified comment with pagination.
@@ -649,16 +662,16 @@ async def load_comment_ratings(_, info, comment: int, limit=50, offset=0):
@query.field("load_comments_branch")
async def load_comments_branch(
_,
_info,
_: None,
_info: GraphQLResolveInfo,
shout: int,
parent_id: int | None = None,
limit=10,
offset=0,
sort="newest",
children_limit=3,
children_offset=0,
):
limit: int = 50,
offset: int = 0,
sort: str = "newest",
children_limit: int = 3,
children_offset: int = 0,
) -> list[dict[str, Any]]:
"""
Загружает иерархические комментарии с возможностью пагинации корневых и дочерних.
@@ -686,12 +699,7 @@ async def load_comments_branch(
)
# Фильтруем по родительскому ID
if parent_id is None:
# Загружаем только корневые комментарии
q = q.filter(Reaction.reply_to.is_(None))
else:
# Загружаем только прямые ответы на указанный комментарий
q = q.filter(Reaction.reply_to == parent_id)
q = q.filter(Reaction.reply_to.is_(None)) if parent_id is None else q.filter(Reaction.reply_to == parent_id)
# Сортировка и группировка
q = q.group_by(Reaction.id, Author.id, Shout.id)
@@ -721,7 +729,7 @@ async def load_comments_branch(
return comments
async def load_replies_count(comments):
async def load_replies_count(comments: list[Any]) -> None:
"""
Загружает количество ответов для списка комментариев и обновляет поле stat.comments_count.
@@ -761,7 +769,7 @@ async def load_replies_count(comments):
comment["stat"]["comments_count"] = replies_count.get(comment["id"], 0)
async def load_first_replies(comments, limit, offset, sort="newest"):
async def load_first_replies(comments: list[Any], limit: int, offset: int, sort: str = "newest") -> None:
"""
Загружает первые N ответов для каждого комментария.
@@ -808,12 +816,13 @@ async def load_first_replies(comments, limit, offset, sort="newest"):
replies = get_reactions_with_stat(q, limit=100, offset=0)
# Группируем ответы по родительским ID
replies_by_parent = {}
replies_by_parent: dict[int, list[dict[str, Any]]] = {}
for reply in replies:
parent_id = reply.get("reply_to")
if parent_id not in replies_by_parent:
if parent_id is not None and parent_id not in replies_by_parent:
replies_by_parent[parent_id] = []
replies_by_parent[parent_id].append(reply)
if parent_id is not None:
replies_by_parent[parent_id].append(reply)
# Добавляем ответы к соответствующим комментариям с учетом смещения и лимита
for comment in comments:

View File

@@ -1,3 +1,5 @@
from typing import Any, Optional
import orjson
from graphql import GraphQLResolveInfo
from sqlalchemy import and_, nulls_last, text
@@ -15,7 +17,7 @@ from services.viewed import ViewedStorage
from utils.logger import root_logger as logger
def apply_options(q, options, reactions_created_by=0):
def apply_options(q: select, options: dict[str, Any], reactions_created_by: int = 0) -> tuple[select, int, int]:
"""
Применяет опции фильтрации и сортировки
[опционально] выбирая те публикации, на которые есть реакции/комментарии от указанного автора
@@ -39,7 +41,7 @@ def apply_options(q, options, reactions_created_by=0):
return q, limit, offset
def has_field(info, fieldname: str) -> bool:
def has_field(info: GraphQLResolveInfo, fieldname: str) -> bool:
"""
Проверяет, запрошено ли поле :fieldname: в GraphQL запросе
@@ -48,13 +50,15 @@ def has_field(info, fieldname: str) -> bool:
:return: True, если поле запрошено, False в противном случае
"""
field_node = info.field_nodes[0]
if field_node.selection_set is None:
return False
for selection in field_node.selection_set.selections:
if hasattr(selection, "name") and selection.name.value == fieldname:
return True
return False
def query_with_stat(info):
def query_with_stat(info: GraphQLResolveInfo) -> select:
"""
:param info: Информация о контексте GraphQL - для получения id авторизованного пользователя
:return: Запрос с подзапросами статистики.
@@ -63,8 +67,8 @@ def query_with_stat(info):
"""
q = select(Shout).filter(
and_(
Shout.published_at.is_not(None), # Проверяем published_at
Shout.deleted_at.is_(None), # Проверяем deleted_at
Shout.published_at.is_not(None), # type: ignore[union-attr]
Shout.deleted_at.is_(None), # type: ignore[union-attr]
)
)
@@ -188,7 +192,7 @@ def query_with_stat(info):
return q
def get_shouts_with_links(info, q, limit=20, offset=0):
def get_shouts_with_links(info: GraphQLResolveInfo, q: select, limit: int = 20, offset: int = 0) -> list[Shout]:
"""
получение публикаций с применением пагинации
"""
@@ -219,12 +223,13 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
if has_field(info, "created_by") and shout_dict.get("created_by"):
main_author_id = shout_dict.get("created_by")
a = session.query(Author).filter(Author.id == main_author_id).first()
shout_dict["created_by"] = {
"id": main_author_id,
"name": a.name,
"slug": a.slug,
"pic": a.pic,
}
if a:
shout_dict["created_by"] = {
"id": main_author_id,
"name": a.name,
"slug": a.slug,
"pic": a.pic,
}
# Обработка поля updated_by
if has_field(info, "updated_by"):
@@ -266,10 +271,11 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
if has_field(info, "stat"):
stat = {}
if isinstance(row.stat, str):
stat = orjson.loads(row.stat)
elif isinstance(row.stat, dict):
stat = row.stat
if hasattr(row, "stat"):
if isinstance(row.stat, str):
stat = orjson.loads(row.stat)
elif isinstance(row.stat, dict):
stat = row.stat
viewed = ViewedStorage.get_shout(shout_id=shout_id) or 0
shout_dict["stat"] = {**stat, "viewed": viewed}
@@ -337,7 +343,7 @@ def get_shouts_with_links(info, q, limit=20, offset=0):
return shouts
def apply_filters(q, filters):
def apply_filters(q: select, filters: dict[str, Any]) -> select:
"""
Применение общих фильтров к запросу.
@@ -348,10 +354,9 @@ def apply_filters(q, filters):
if isinstance(filters, dict):
if "featured" in filters:
featured_filter = filters.get("featured")
if featured_filter:
q = q.filter(Shout.featured_at.is_not(None))
else:
q = q.filter(Shout.featured_at.is_(None))
featured_at_col = getattr(Shout, "featured_at", None)
if featured_at_col is not None:
q = q.filter(featured_at_col.is_not(None)) if featured_filter else q.filter(featured_at_col.is_(None))
by_layouts = filters.get("layouts")
if by_layouts and isinstance(by_layouts, list):
q = q.filter(Shout.layout.in_(by_layouts))
@@ -370,7 +375,7 @@ def apply_filters(q, filters):
@query.field("get_shout")
async def get_shout(_, info: GraphQLResolveInfo, slug="", shout_id=0):
async def get_shout(_: None, info: GraphQLResolveInfo, slug: str = "", shout_id: int = 0) -> Optional[Shout]:
"""
Получение публикации по slug или id.
@@ -396,14 +401,16 @@ async def get_shout(_, info: GraphQLResolveInfo, slug="", shout_id=0):
shouts = get_shouts_with_links(info, q, limit=1)
# Возвращаем первую (и единственную) публикацию, если она найдена
return shouts[0] if shouts else None
if shouts:
return shouts[0]
return None
except Exception as exc:
logger.error(f"Error in get_shout: {exc}", exc_info=True)
return None
def apply_sorting(q, options):
def apply_sorting(q: select, options: dict[str, Any]) -> select:
"""
Применение сортировки с сохранением порядка
"""
@@ -414,13 +421,14 @@ def apply_sorting(q, options):
nulls_last(query_order_by), Shout.id
)
else:
q = q.distinct(Shout.published_at, Shout.id).order_by(Shout.published_at.desc(), Shout.id)
published_at_col = getattr(Shout, "published_at", Shout.id)
q = q.distinct(published_at_col, Shout.id).order_by(published_at_col.desc(), Shout.id)
return q
@query.field("load_shouts_by")
async def load_shouts_by(_, info: GraphQLResolveInfo, options):
async def load_shouts_by(_: None, info: GraphQLResolveInfo, options: dict[str, Any]) -> list[Shout]:
"""
Загрузка публикаций с фильтрацией, сортировкой и пагинацией.
@@ -436,11 +444,12 @@ async def load_shouts_by(_, info: GraphQLResolveInfo, options):
q, limit, offset = apply_options(q, options)
# Передача сформированного запроса в метод получения публикаций с учетом сортировки и пагинации
return get_shouts_with_links(info, q, limit, offset)
shouts_dicts = get_shouts_with_links(info, q, limit, offset)
return shouts_dicts
@query.field("load_shouts_search")
async def load_shouts_search(_, info, text, options):
async def load_shouts_search(_: None, info: GraphQLResolveInfo, text: str, options: dict[str, Any]) -> list[Shout]:
"""
Поиск публикаций по тексту.
@@ -471,16 +480,16 @@ async def load_shouts_search(_, info, text, options):
q = q.filter(Shout.id.in_(hits_ids))
q = apply_filters(q, options)
q = apply_sorting(q, options)
shouts = get_shouts_with_links(info, q, limit, offset)
for shout in shouts:
shout["score"] = scores[f"{shout['id']}"]
shouts.sort(key=lambda x: x["score"], reverse=True)
return shouts
shouts_dicts = get_shouts_with_links(info, q, limit, offset)
for shout_dict in shouts_dicts:
shout_dict["score"] = scores[f"{shout_dict['id']}"]
shouts_dicts.sort(key=lambda x: x["score"], reverse=True)
return shouts_dicts
return []
@query.field("load_shouts_unrated")
async def load_shouts_unrated(_, info, options):
async def load_shouts_unrated(_: None, info: GraphQLResolveInfo, options: dict[str, Any]) -> list[Shout]:
"""
Загрузка публикаций с менее чем 3 реакциями типа LIKE/DISLIKE
@@ -515,11 +524,12 @@ async def load_shouts_unrated(_, info, options):
limit = options.get("limit", 5)
offset = options.get("offset", 0)
return get_shouts_with_links(info, q, limit, offset)
shouts_dicts = get_shouts_with_links(info, q, limit, offset)
return shouts_dicts
@query.field("load_shouts_random_top")
async def load_shouts_random_top(_, info, options):
async def load_shouts_random_top(_: None, info: GraphQLResolveInfo, options: dict[str, Any]) -> list[Shout]:
"""
Загрузка случайных публикаций, упорядоченных по топовым реакциям.
@@ -555,4 +565,5 @@ async def load_shouts_random_top(_, info, options):
q = q.filter(Shout.id.in_(subquery))
q = q.order_by(func.random())
limit = options.get("limit", 10)
return get_shouts_with_links(info, q, limit)
shouts_dicts = get_shouts_with_links(info, q, limit)
return shouts_dicts

View File

@@ -1,18 +1,25 @@
import asyncio
import sys
from typing import Any, Optional
from sqlalchemy import and_, distinct, func, join, select
from sqlalchemy.orm import aliased
from sqlalchemy.sql.expression import Select
from auth.orm import Author, AuthorFollower
from cache.cache import cache_author
from orm.community import Community, CommunityFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from services.db import local_session
from utils.logger import root_logger as logger
# Type alias for queries
QueryType = Select
def add_topic_stat_columns(q):
def add_topic_stat_columns(q: QueryType) -> QueryType:
"""
Добавляет статистические колонки к запросу тем.
@@ -51,12 +58,10 @@ def add_topic_stat_columns(q):
)
# Группировка по идентификатору темы
new_q = new_q.group_by(Topic.id)
return new_q
return new_q.group_by(Topic.id)
def add_author_stat_columns(q):
def add_author_stat_columns(q: QueryType) -> QueryType:
"""
Добавляет статистические колонки к запросу авторов.
@@ -80,14 +85,12 @@ def add_author_stat_columns(q):
)
# Основной запрос
q = (
return (
q.select_from(Author)
.add_columns(shouts_subq.label("shouts_stat"), followers_subq.label("followers_stat"))
.group_by(Author.id)
)
return q
def get_topic_shouts_stat(topic_id: int) -> int:
"""
@@ -106,8 +109,8 @@ def get_topic_shouts_stat(topic_id: int) -> int:
)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
result = session.execute(q).scalar()
return int(result) if result else 0
def get_topic_authors_stat(topic_id: int) -> int:
@@ -132,8 +135,8 @@ def get_topic_authors_stat(topic_id: int) -> int:
# Выполнение запроса и получение результата
with local_session() as session:
result = session.execute(count_query).first()
return result[0] if result else 0
result = session.execute(count_query).scalar()
return int(result) if result else 0
def get_topic_followers_stat(topic_id: int) -> int:
@@ -146,8 +149,8 @@ def get_topic_followers_stat(topic_id: int) -> int:
aliased_followers = aliased(TopicFollower)
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.topic == topic_id)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
result = session.execute(q).scalar()
return int(result) if result else 0
def get_topic_comments_stat(topic_id: int) -> int:
@@ -180,8 +183,8 @@ def get_topic_comments_stat(topic_id: int) -> int:
q = select(func.coalesce(func.sum(sub_comments.c.comments_count), 0)).filter(ShoutTopic.topic == topic_id)
q = q.outerjoin(sub_comments, ShoutTopic.shout == sub_comments.c.shout_id)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
result = session.execute(q).scalar()
return int(result) if result else 0
def get_author_shouts_stat(author_id: int) -> int:
@@ -199,51 +202,52 @@ def get_author_shouts_stat(author_id: int) -> int:
and_(
aliased_shout_author.author == author_id,
aliased_shout.published_at.is_not(None),
aliased_shout.deleted_at.is_(None), # Добавляем проверку на удаление
aliased_shout.deleted_at.is_(None),
)
)
)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
result = session.execute(q).scalar()
return int(result) if result else 0
def get_author_authors_stat(author_id: int) -> int:
"""
Получает количество авторов, на которых подписан указанный автор.
:param author_id: Идентификатор автора.
:return: Количество уникальных авторов, на которых подписан автор.
Получает количество уникальных авторов, с которыми взаимодействовал указанный автор
"""
aliased_authors = aliased(AuthorFollower)
q = select(func.count(distinct(aliased_authors.author))).filter(
and_(
aliased_authors.follower == author_id,
aliased_authors.author != author_id,
q = (
select(func.count(distinct(ShoutAuthor.author)))
.select_from(ShoutAuthor)
.join(Shout, ShoutAuthor.shout == Shout.id)
.join(Reaction, Reaction.shout == Shout.id)
.filter(
and_(
Reaction.created_by == author_id,
Shout.published_at.is_not(None),
Shout.deleted_at.is_(None),
Reaction.deleted_at.is_(None),
)
)
)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
result = session.execute(q).scalar()
return int(result) if result else 0
def get_author_followers_stat(author_id: int) -> int:
"""
Получает количество подписчиков для указанного автора.
:param author_id: Идентификатор автора.
:return: Количество уникальных подписчиков автора.
Получает количество подписчиков для указанного автора
"""
aliased_followers = aliased(AuthorFollower)
q = select(func.count(distinct(aliased_followers.follower))).filter(aliased_followers.author == author_id)
q = select(func.count(AuthorFollower.follower)).filter(AuthorFollower.author == author_id)
with local_session() as session:
result = session.execute(q).first()
return result[0] if result else 0
result = session.execute(q).scalar()
return int(result) if result else 0
def get_author_comments_stat(author_id: int):
def get_author_comments_stat(author_id: int) -> int:
q = (
select(func.coalesce(func.count(Reaction.id), 0).label("comments_count"))
.select_from(Author)
@@ -260,11 +264,13 @@ def get_author_comments_stat(author_id: int):
)
with local_session() as session:
result = session.execute(q).first()
return result.comments_count if result else 0
result = session.execute(q).scalar()
if result and hasattr(result, "comments_count"):
return int(result.comments_count)
return 0
def get_with_stat(q):
def get_with_stat(q: QueryType) -> list[Any]:
"""
Выполняет запрос с добавлением статистики.
@@ -285,7 +291,7 @@ def get_with_stat(q):
result = session.execute(q).unique()
for cols in result:
entity = cols[0]
stat = dict()
stat = {}
stat["shouts"] = cols[1] # Статистика по публикациям
stat["followers"] = cols[2] # Статистика по подписчикам
if is_author:
@@ -322,7 +328,7 @@ def get_with_stat(q):
return records
def author_follows_authors(author_id: int):
def author_follows_authors(author_id: int) -> list[Any]:
"""
Получает список авторов, на которых подписан указанный автор.
@@ -336,7 +342,7 @@ def author_follows_authors(author_id: int):
return get_with_stat(author_follows_authors_query)
def author_follows_topics(author_id: int):
def author_follows_topics(author_id: int) -> list[Any]:
"""
Получает список тем, на которые подписан указанный автор.
@@ -351,7 +357,7 @@ def author_follows_topics(author_id: int):
return get_with_stat(author_follows_topics_query)
def update_author_stat(author_id: int):
def update_author_stat(author_id: int) -> None:
"""
Обновляет статистику для указанного автора и сохраняет её в кэше.
@@ -365,6 +371,198 @@ def update_author_stat(author_id: int):
if isinstance(author_with_stat, Author):
author_dict = author_with_stat.dict()
# Асинхронное кэширование данных автора
asyncio.create_task(cache_author(author_dict))
task = asyncio.create_task(cache_author(author_dict))
# Store task reference to prevent garbage collection
if not hasattr(update_author_stat, "_background_tasks"):
update_author_stat._background_tasks = set() # type: ignore[attr-defined]
update_author_stat._background_tasks.add(task) # type: ignore[attr-defined]
task.add_done_callback(update_author_stat._background_tasks.discard) # type: ignore[attr-defined]
except Exception as exc:
logger.error(exc, exc_info=True)
def get_followers_count(entity_type: str, entity_id: int) -> int:
"""Получает количество подписчиков для сущности"""
try:
with local_session() as session:
if entity_type == "topic":
result = (
session.query(func.count(TopicFollower.follower)).filter(TopicFollower.topic == entity_id).scalar()
)
elif entity_type == "author":
# Count followers of this author
result = (
session.query(func.count(AuthorFollower.follower))
.filter(AuthorFollower.author == entity_id)
.scalar()
)
elif entity_type == "community":
result = (
session.query(func.count(CommunityFollower.follower))
.filter(CommunityFollower.community == entity_id)
.scalar()
)
else:
return 0
return int(result) if result else 0
except Exception as e:
logger.error(f"Error getting followers count: {e}")
return 0
def get_following_count(entity_type: str, entity_id: int) -> int:
"""Получает количество подписок сущности"""
try:
with local_session() as session:
if entity_type == "author":
# Count what this author follows
topic_follows = (
session.query(func.count(TopicFollower.topic)).filter(TopicFollower.follower == entity_id).scalar()
or 0
)
community_follows = (
session.query(func.count(CommunityFollower.community))
.filter(CommunityFollower.follower == entity_id)
.scalar()
or 0
)
return int(topic_follows) + int(community_follows)
return 0
except Exception as e:
logger.error(f"Error getting following count: {e}")
return 0
def get_shouts_count(
author_id: Optional[int] = None, topic_id: Optional[int] = None, community_id: Optional[int] = None
) -> int:
"""Получает количество публикаций"""
try:
with local_session() as session:
query = session.query(func.count(Shout.id)).filter(Shout.published_at.isnot(None))
if author_id:
query = query.filter(Shout.created_by == author_id)
if topic_id:
# This would need ShoutTopic association table
pass
if community_id:
query = query.filter(Shout.community == community_id)
result = query.scalar()
return int(result) if result else 0
except Exception as e:
logger.error(f"Error getting shouts count: {e}")
return 0
def get_authors_count(community_id: Optional[int] = None) -> int:
"""Получает количество авторов"""
try:
with local_session() as session:
if community_id:
# Count authors in specific community
result = (
session.query(func.count(distinct(CommunityFollower.follower)))
.filter(CommunityFollower.community == community_id)
.scalar()
)
else:
# Count all authors
result = session.query(func.count(Author.id)).filter(Author.deleted == False).scalar()
return int(result) if result else 0
except Exception as e:
logger.error(f"Error getting authors count: {e}")
return 0
def get_topics_count(author_id: Optional[int] = None) -> int:
"""Получает количество топиков"""
try:
with local_session() as session:
if author_id:
# Count topics followed by author
result = (
session.query(func.count(TopicFollower.topic)).filter(TopicFollower.follower == author_id).scalar()
)
else:
# Count all topics
result = session.query(func.count(Topic.id)).scalar()
return int(result) if result else 0
except Exception as e:
logger.error(f"Error getting topics count: {e}")
return 0
def get_communities_count() -> int:
"""Получает количество сообществ"""
try:
with local_session() as session:
result = session.query(func.count(Community.id)).scalar()
return int(result) if result else 0
except Exception as e:
logger.error(f"Error getting communities count: {e}")
return 0
def get_reactions_count(shout_id: Optional[int] = None, author_id: Optional[int] = None) -> int:
"""Получает количество реакций"""
try:
from orm.reaction import Reaction
with local_session() as session:
query = session.query(func.count(Reaction.id))
if shout_id:
query = query.filter(Reaction.shout == shout_id)
if author_id:
query = query.filter(Reaction.created_by == author_id)
result = query.scalar()
return int(result) if result else 0
except Exception as e:
logger.error(f"Error getting reactions count: {e}")
return 0
def get_comments_count_by_shout(shout_id: int) -> int:
"""Получает количество комментариев к статье"""
try:
from orm.reaction import Reaction
with local_session() as session:
# Using text() to access 'kind' column which might be enum
result = (
session.query(func.count(Reaction.id))
.filter(
and_(
Reaction.shout == shout_id,
Reaction.kind == "comment", # Assuming 'comment' is a valid enum value
)
)
.scalar()
)
return int(result) if result else 0
except Exception as e:
logger.error(f"Error getting comments count: {e}")
return 0
async def get_stat_background_task() -> None:
"""Фоновая задача для обновления статистики"""
try:
if not hasattr(sys.modules[__name__], "_background_tasks"):
sys.modules[__name__]._background_tasks = set() # type: ignore[attr-defined]
# Perform background statistics calculations
logger.info("Running background statistics update")
# Here you would implement actual background statistics updates
# This is just a placeholder
except Exception as e:
logger.error(f"Error in background statistics task: {e}")

View File

@@ -1,4 +1,7 @@
from sqlalchemy import desc, select, text
from typing import Any, Optional
from graphql import GraphQLResolveInfo
from sqlalchemy import desc, func, select, text
from auth.orm import Author
from cache.cache import (
@@ -9,8 +12,9 @@ from cache.cache import (
get_cached_topic_followers,
invalidate_cache_by_prefix,
)
from orm.reaction import ReactionKind
from orm.topic import Topic
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat
from services.auth import login_required
from services.db import local_session
@@ -20,7 +24,7 @@ from utils.logger import root_logger as logger
# Вспомогательная функция для получения всех тем без статистики
async def get_all_topics():
async def get_all_topics() -> list[Any]:
"""
Получает все темы без статистики.
Используется для случаев, когда нужен полный список тем без дополнительной информации.
@@ -31,7 +35,7 @@ async def get_all_topics():
cache_key = "topics:all:basic"
# Функция для получения всех тем из БД
async def fetch_all_topics():
async def fetch_all_topics() -> list[dict]:
logger.debug("Получаем список всех тем из БД и кешируем результат")
with local_session() as session:
@@ -47,7 +51,9 @@ async def get_all_topics():
# Вспомогательная функция для получения тем со статистикой с пагинацией
async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None):
async def get_topics_with_stats(
limit: int = 100, offset: int = 0, community_id: Optional[int] = None, by: Optional[str] = None
) -> dict[str, Any]:
"""
Получает темы со статистикой с пагинацией.
@@ -55,17 +61,21 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
limit: Максимальное количество возвращаемых тем
offset: Смещение для пагинации
community_id: Опциональный ID сообщества для фильтрации
by: Опциональный параметр сортировки
by: Опциональный параметр сортировки ('popular', 'authors', 'followers', 'comments')
- 'popular' - по количеству публикаций (по умолчанию)
- 'authors' - по количеству авторов
- 'followers' - по количеству подписчиков
- 'comments' - по количеству комментариев
Returns:
list: Список тем с их статистикой
list: Список тем с их статистикой, отсортированный по популярности
"""
# Формируем ключ кеша с помощью универсальной функции
cache_key = f"topics:stats:limit={limit}:offset={offset}:community_id={community_id}"
cache_key = f"topics:stats:limit={limit}:offset={offset}:community_id={community_id}:by={by}"
# Функция для получения тем из БД
async def fetch_topics_with_stats():
logger.debug(f"Выполняем запрос на получение тем со статистикой: limit={limit}, offset={offset}")
async def fetch_topics_with_stats() -> list[dict]:
logger.debug(f"Выполняем запрос на получение тем со статистикой: limit={limit}, offset={offset}, by={by}")
with local_session() as session:
# Базовый запрос для получения тем
@@ -87,17 +97,89 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
else:
base_query = base_query.order_by(column)
elif by == "popular":
# Сортировка по популярности (количеству публикаций)
# Примечание: это требует дополнительного запроса или подзапроса
base_query = base_query.order_by(
desc(Topic.id)
) # Временно, нужно заменить на proper implementation
# Сортировка по популярности - по количеству публикаций
shouts_subquery = (
select(ShoutTopic.topic, func.count(ShoutTopic.shout).label("shouts_count"))
.join(Shout, ShoutTopic.shout == Shout.id)
.where(Shout.deleted_at.is_(None), Shout.published_at.isnot(None))
.group_by(ShoutTopic.topic)
.subquery()
)
base_query = base_query.outerjoin(shouts_subquery, Topic.id == shouts_subquery.c.topic).order_by(
desc(func.coalesce(shouts_subquery.c.shouts_count, 0))
)
elif by == "authors":
# Сортировка по количеству авторов
authors_subquery = (
select(ShoutTopic.topic, func.count(func.distinct(ShoutAuthor.author)).label("authors_count"))
.join(Shout, ShoutTopic.shout == Shout.id)
.join(ShoutAuthor, ShoutAuthor.shout == Shout.id)
.where(Shout.deleted_at.is_(None), Shout.published_at.isnot(None))
.group_by(ShoutTopic.topic)
.subquery()
)
base_query = base_query.outerjoin(authors_subquery, Topic.id == authors_subquery.c.topic).order_by(
desc(func.coalesce(authors_subquery.c.authors_count, 0))
)
elif by == "followers":
# Сортировка по количеству подписчиков
followers_subquery = (
select(TopicFollower.topic, func.count(TopicFollower.follower).label("followers_count"))
.group_by(TopicFollower.topic)
.subquery()
)
base_query = base_query.outerjoin(
followers_subquery, Topic.id == followers_subquery.c.topic
).order_by(desc(func.coalesce(followers_subquery.c.followers_count, 0)))
elif by == "comments":
# Сортировка по количеству комментариев
comments_subquery = (
select(ShoutTopic.topic, func.count(func.distinct(Reaction.id)).label("comments_count"))
.join(Shout, ShoutTopic.shout == Shout.id)
.join(Reaction, Reaction.shout == Shout.id)
.where(
Shout.deleted_at.is_(None),
Shout.published_at.isnot(None),
Reaction.kind == ReactionKind.COMMENT.value,
Reaction.deleted_at.is_(None),
)
.group_by(ShoutTopic.topic)
.subquery()
)
base_query = base_query.outerjoin(
comments_subquery, Topic.id == comments_subquery.c.topic
).order_by(desc(func.coalesce(comments_subquery.c.comments_count, 0)))
else:
# По умолчанию сортируем по ID в обратном порядке
base_query = base_query.order_by(desc(Topic.id))
# Неизвестный параметр сортировки - используем дефолтную (по популярности)
shouts_subquery = (
select(ShoutTopic.topic, func.count(ShoutTopic.shout).label("shouts_count"))
.join(Shout, ShoutTopic.shout == Shout.id)
.where(Shout.deleted_at.is_(None), Shout.published_at.isnot(None))
.group_by(ShoutTopic.topic)
.subquery()
)
base_query = base_query.outerjoin(shouts_subquery, Topic.id == shouts_subquery.c.topic).order_by(
desc(func.coalesce(shouts_subquery.c.shouts_count, 0))
)
else:
# По умолчанию сортируем по ID в обратном порядке
base_query = base_query.order_by(desc(Topic.id))
# По умолчанию сортируем по популярности (количество публикаций)
# Это более логично для списка топиков сообщества
shouts_subquery = (
select(ShoutTopic.topic, func.count(ShoutTopic.shout).label("shouts_count"))
.join(Shout, ShoutTopic.shout == Shout.id)
.where(Shout.deleted_at.is_(None), Shout.published_at.isnot(None))
.group_by(ShoutTopic.topic)
.subquery()
)
base_query = base_query.outerjoin(shouts_subquery, Topic.id == shouts_subquery.c.topic).order_by(
desc(func.coalesce(shouts_subquery.c.shouts_count, 0))
)
# Применяем лимит и смещение
base_query = base_query.limit(limit).offset(offset)
@@ -109,47 +191,53 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
if not topic_ids:
return []
# Запрос на получение статистики по публикациям для выбранных тем
shouts_stats_query = f"""
SELECT st.topic, COUNT(DISTINCT s.id) as shouts_count
FROM shout_topic st
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
WHERE st.topic IN ({",".join(map(str, topic_ids))})
GROUP BY st.topic
"""
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query))}
# Исправляю S608 - используем параметризированные запросы
if topic_ids:
placeholders = ",".join([f":id{i}" for i in range(len(topic_ids))])
# Запрос на получение статистики по подписчикам для выбранных тем
followers_stats_query = f"""
SELECT topic, COUNT(DISTINCT follower) as followers_count
FROM topic_followers tf
WHERE topic IN ({",".join(map(str, topic_ids))})
GROUP BY topic
"""
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query))}
# Запрос на получение статистики по публикациям для выбранных тем
shouts_stats_query = f"""
SELECT st.topic, COUNT(DISTINCT s.id) as shouts_count
FROM shout_topic st
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
WHERE st.topic IN ({placeholders})
GROUP BY st.topic
"""
params = {f"id{i}": topic_id for i, topic_id in enumerate(topic_ids)}
shouts_stats = {row[0]: row[1] for row in session.execute(text(shouts_stats_query), params)}
# Запрос на получение статистики авторов для выбранных тем
authors_stats_query = f"""
SELECT st.topic, COUNT(DISTINCT sa.author) as authors_count
FROM shout_topic st
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
JOIN shout_author sa ON sa.shout = s.id
WHERE st.topic IN ({",".join(map(str, topic_ids))})
GROUP BY st.topic
"""
authors_stats = {row[0]: row[1] for row in session.execute(text(authors_stats_query))}
# Запрос на получение статистики по подписчикам для выбранных тем
followers_stats_query = f"""
SELECT topic, COUNT(DISTINCT follower) as followers_count
FROM topic_followers tf
WHERE topic IN ({placeholders})
GROUP BY topic
"""
followers_stats = {row[0]: row[1] for row in session.execute(text(followers_stats_query), params)}
# Запрос на получение статистики комментариев для выбранных тем
comments_stats_query = f"""
SELECT st.topic, COUNT(DISTINCT r.id) as comments_count
FROM shout_topic st
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
JOIN reaction r ON r.shout = s.id AND r.kind = '{ReactionKind.COMMENT.value}' AND r.deleted_at IS NULL
JOIN author a ON r.created_by = a.id AND a.deleted_at IS NULL
WHERE st.topic IN ({",".join(map(str, topic_ids))})
GROUP BY st.topic
"""
comments_stats = {row[0]: row[1] for row in session.execute(text(comments_stats_query))}
# Запрос на получение статистики авторов для выбранных тем
authors_stats_query = f"""
SELECT st.topic, COUNT(DISTINCT sa.author) as authors_count
FROM shout_topic st
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
JOIN shout_author sa ON sa.shout = s.id
WHERE st.topic IN ({placeholders})
GROUP BY st.topic
"""
authors_stats = {row[0]: row[1] for row in session.execute(text(authors_stats_query), params)}
# Запрос на получение статистики комментариев для выбранных тем
comments_stats_query = f"""
SELECT st.topic, COUNT(DISTINCT r.id) as comments_count
FROM shout_topic st
JOIN shout s ON st.shout = s.id AND s.deleted_at IS NULL AND s.published_at IS NOT NULL
JOIN reaction r ON r.shout = s.id AND r.kind = :comment_kind AND r.deleted_at IS NULL
JOIN author a ON r.created_by = a.id
WHERE st.topic IN ({placeholders})
GROUP BY st.topic
"""
params["comment_kind"] = ReactionKind.COMMENT.value
comments_stats = {row[0]: row[1] for row in session.execute(text(comments_stats_query), params)}
# Формируем результат с добавлением статистики
result = []
@@ -173,7 +261,7 @@ async def get_topics_with_stats(limit=100, offset=0, community_id=None, by=None)
# Функция для инвалидации кеша тем
async def invalidate_topics_cache(topic_id=None):
async def invalidate_topics_cache(topic_id: Optional[int] = None) -> None:
"""
Инвалидирует кеши тем при изменении данных.
@@ -218,7 +306,7 @@ async def invalidate_topics_cache(topic_id=None):
# Запрос на получение всех тем
@query.field("get_topics_all")
async def get_topics_all(_, _info):
async def get_topics_all(_: None, _info: GraphQLResolveInfo) -> list[Any]:
"""
Получает список всех тем без статистики.
@@ -230,7 +318,9 @@ async def get_topics_all(_, _info):
# Запрос на получение тем по сообществу
@query.field("get_topics_by_community")
async def get_topics_by_community(_, _info, community_id: int, limit=100, offset=0, by=None):
async def get_topics_by_community(
_: None, _info: GraphQLResolveInfo, community_id: int, limit: int = 100, offset: int = 0, by: Optional[str] = None
) -> list[Any]:
"""
Получает список тем, принадлежащих указанному сообществу с пагинацией и статистикой.
@@ -243,12 +333,15 @@ async def get_topics_by_community(_, _info, community_id: int, limit=100, offset
Returns:
list: Список тем с их статистикой
"""
return await get_topics_with_stats(limit, offset, community_id, by)
result = await get_topics_with_stats(limit, offset, community_id, by)
return result.get("topics", []) if isinstance(result, dict) else result
# Запрос на получение тем по автору
@query.field("get_topics_by_author")
async def get_topics_by_author(_, _info, author_id=0, slug="", user=""):
async def get_topics_by_author(
_: None, _info: GraphQLResolveInfo, author_id: int = 0, slug: str = "", user: str = ""
) -> list[Any]:
topics_by_author_query = select(Topic)
if author_id:
topics_by_author_query = topics_by_author_query.join(Author).where(Author.id == author_id)
@@ -262,16 +355,17 @@ async def get_topics_by_author(_, _info, author_id=0, slug="", user=""):
# Запрос на получение одной темы по её slug
@query.field("get_topic")
async def get_topic(_, _info, slug: str):
async def get_topic(_: None, _info: GraphQLResolveInfo, slug: str) -> Optional[Any]:
topic = await get_cached_topic_by_slug(slug, get_with_stat)
if topic:
return topic
return None
# Мутация для создания новой темы
@mutation.field("create_topic")
@login_required
async def create_topic(_, _info, topic_input):
async def create_topic(_: None, _info: GraphQLResolveInfo, topic_input: dict[str, Any]) -> dict[str, Any]:
with local_session() as session:
# TODO: проверить права пользователя на создание темы для конкретного сообщества
# и разрешение на создание
@@ -288,50 +382,49 @@ async def create_topic(_, _info, topic_input):
# Мутация для обновления темы
@mutation.field("update_topic")
@login_required
async def update_topic(_, _info, topic_input):
async def update_topic(_: None, _info: GraphQLResolveInfo, topic_input: dict[str, Any]) -> dict[str, Any]:
slug = topic_input["slug"]
with local_session() as session:
topic = session.query(Topic).filter(Topic.slug == slug).first()
if not topic:
return {"error": "topic not found"}
else:
old_slug = topic.slug
Topic.update(topic, topic_input)
session.add(topic)
session.commit()
old_slug = str(getattr(topic, "slug", ""))
Topic.update(topic, topic_input)
session.add(topic)
session.commit()
# Инвалидируем кеш только для этой конкретной темы
await invalidate_topics_cache(topic.id)
# Инвалидируем кеш только для этой конкретной темы
await invalidate_topics_cache(int(getattr(topic, "id", 0)))
# Если slug изменился, удаляем старый ключ
if old_slug != topic.slug:
await redis.execute("DEL", f"topic:slug:{old_slug}")
logger.debug(f"Удален ключ кеша для старого slug: {old_slug}")
# Если slug изменился, удаляем старый ключ
if old_slug != str(getattr(topic, "slug", "")):
await redis.execute("DEL", f"topic:slug:{old_slug}")
logger.debug(f"Удален ключ кеша для старого slug: {old_slug}")
return {"topic": topic}
return {"topic": topic}
# Мутация для удаления темы
@mutation.field("delete_topic")
@login_required
async def delete_topic(_, info, slug: str):
async def delete_topic(_: None, info: GraphQLResolveInfo, slug: str) -> dict[str, Any]:
viewer_id = info.context.get("author", {}).get("id")
with local_session() as session:
t: Topic = session.query(Topic).filter(Topic.slug == slug).first()
if not t:
topic = session.query(Topic).filter(Topic.slug == slug).first()
if not topic:
return {"error": "invalid topic slug"}
author = session.query(Author).filter(Author.id == viewer_id).first()
if author:
if t.created_by != author.id:
if getattr(topic, "created_by", None) != author.id:
return {"error": "access denied"}
session.delete(t)
session.delete(topic)
session.commit()
# Инвалидируем кеш всех тем и конкретной темы
await invalidate_topics_cache()
await redis.execute("DEL", f"topic:slug:{slug}")
await redis.execute("DEL", f"topic:id:{t.id}")
await redis.execute("DEL", f"topic:id:{getattr(topic, 'id', 0)}")
return {}
return {"error": "access denied"}
@@ -339,19 +432,17 @@ async def delete_topic(_, info, slug: str):
# Запрос на получение подписчиков темы
@query.field("get_topic_followers")
async def get_topic_followers(_, _info, slug: str):
async def get_topic_followers(_: None, _info: GraphQLResolveInfo, slug: str) -> list[Any]:
logger.debug(f"getting followers for @{slug}")
topic = await get_cached_topic_by_slug(slug, get_with_stat)
topic_id = topic.id if isinstance(topic, Topic) else topic.get("id")
followers = await get_cached_topic_followers(topic_id)
return followers
topic_id = getattr(topic, "id", None) if isinstance(topic, Topic) else topic.get("id") if topic else None
return await get_cached_topic_followers(topic_id) if topic_id else []
# Запрос на получение авторов темы
@query.field("get_topic_authors")
async def get_topic_authors(_, _info, slug: str):
async def get_topic_authors(_: None, _info: GraphQLResolveInfo, slug: str) -> list[Any]:
logger.debug(f"getting authors for @{slug}")
topic = await get_cached_topic_by_slug(slug, get_with_stat)
topic_id = topic.id if isinstance(topic, Topic) else topic.get("id")
authors = await get_cached_topic_authors(topic_id)
return authors
topic_id = getattr(topic, "id", None) if isinstance(topic, Topic) else topic.get("id") if topic else None
return await get_cached_topic_authors(topic_id) if topic_id else []