core/resolvers/editor.py

813 lines
35 KiB
Python
Raw Normal View History

2024-02-02 20:38:16 +00:00
import time
2023-12-17 20:30:20 +00:00
2025-03-20 08:55:21 +00:00
import orjson
2025-04-15 17:16:01 +00:00
import trafilatura
2024-04-08 07:38:58 +00:00
from sqlalchemy import and_, desc, select
2025-05-03 07:53:40 +00:00
from sqlalchemy.orm import joinedload, selectinload
2024-02-29 12:52:36 +00:00
from sqlalchemy.sql.functions import coalesce
2023-11-23 23:00:28 +00:00
2025-02-11 09:00:35 +00:00
from cache.cache import (
cache_author,
cache_topic,
invalidate_shout_related_cache,
invalidate_shouts_cache,
)
2024-05-06 21:06:31 +00:00
from orm.author import Author
2025-04-26 12:35:31 +00:00
from orm.draft import Draft
2024-02-02 16:36:30 +00:00
from orm.shout import Shout, ShoutAuthor, ShoutTopic
2023-05-14 17:02:26 +00:00
from orm.topic import Topic
2024-06-05 15:29:15 +00:00
from resolvers.follower import follow, unfollow
2024-05-05 15:46:16 +00:00
from resolvers.stat import get_with_stat
2023-12-17 20:30:20 +00:00
from services.auth import login_required
from services.db import local_session
2023-10-25 18:33:53 +00:00
from services.notify import notify_shout
from services.schema import mutation, query
2024-01-29 03:42:02 +00:00
from services.search import search_service
2025-04-27 09:53:49 +00:00
from utils.html_wrapper import wrap_html_fragment
2024-08-09 06:37:06 +00:00
from utils.logger import root_logger as logger
2024-02-17 18:04:01 +00:00
2024-06-05 18:04:48 +00:00
async def cache_by_id(entity, entity_id: int, cache_method):
2025-02-09 14:18:01 +00:00
"""Cache an entity by its ID using the provided cache method.
Args:
entity: The SQLAlchemy model class to query
entity_id (int): The ID of the entity to cache
cache_method: The caching function to use
Returns:
dict: The cached entity data if successful, None if entity not found
Example:
>>> async def test_cache():
... author = await cache_by_id(Author, 1, cache_author)
... assert author['id'] == 1
... assert 'name' in author
... return author
"""
2024-05-18 11:15:05 +00:00
caching_query = select(entity).filter(entity.id == entity_id)
2024-12-11 21:20:43 +00:00
result = get_with_stat(caching_query)
if not result or not result[0]:
2024-12-11 21:21:51 +00:00
logger.warning(f"{entity.__name__} with id {entity_id} not found")
2024-05-05 15:46:16 +00:00
return
2024-12-11 21:20:43 +00:00
x = result[0]
2024-05-05 15:46:16 +00:00
d = x.dict() # convert object to dictionary
2024-06-05 18:04:48 +00:00
cache_method(d)
2024-05-05 15:46:16 +00:00
return d
2024-04-17 15:32:23 +00:00
@query.field("get_my_shout")
2024-03-05 17:12:17 +00:00
@login_required
async def get_my_shout(_, info, shout_id: int):
2025-02-09 14:18:01 +00:00
"""Get a shout by ID if the requesting user has permission to view it.
DEPRECATED: use `load_drafts` instead
Args:
info: GraphQL resolver info containing context
shout_id (int): ID of the shout to retrieve
Returns:
dict: Contains either:
- error (str): Error message if retrieval failed
- shout (Shout): The requested shout if found and accessible
Permissions:
User must be:
- The shout creator
- Listed as an author
- Have editor role
Example:
>>> async def test_get_my_shout():
... context = {'user_id': '123', 'author': {'id': 1}, 'roles': []}
... info = type('Info', (), {'context': context})()
... result = await get_my_shout(None, info, 1)
... assert result['error'] is None
... assert result['shout'].id == 1
... return result
"""
2024-04-30 23:42:25 +00:00
user_id = info.context.get("user_id", "")
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
2024-05-01 00:29:25 +00:00
roles = info.context.get("roles", [])
shout = None
2024-04-30 23:42:25 +00:00
if not user_id or not author_id:
return {"error": "unauthorized", "shout": None}
2024-03-28 12:56:32 +00:00
with local_session() as session:
shout = (
session.query(Shout)
.filter(Shout.id == shout_id)
.options(joinedload(Shout.authors), joinedload(Shout.topics))
2024-04-26 08:06:13 +00:00
.filter(Shout.deleted_at.is_(None))
2024-03-28 12:56:32 +00:00
.first()
)
2024-03-07 11:42:48 +00:00
if not shout:
2024-04-17 15:32:23 +00:00
return {"error": "no shout found", "shout": None}
2024-05-01 00:29:25 +00:00
2025-01-28 12:38:10 +00:00
# Преобразуем media JSON в список объектов MediaItem
if hasattr(shout, "media") and shout.media:
if isinstance(shout.media, str):
try:
2025-03-20 08:55:21 +00:00
shout.media = orjson.loads(shout.media)
2025-02-02 18:41:03 +00:00
except Exception as e:
logger.error(f"Error parsing shout media: {e}")
2025-01-28 12:38:10 +00:00
shout.media = []
if not isinstance(shout.media, list):
shout.media = [shout.media] if shout.media else []
else:
shout.media = []
2024-10-23 08:22:07 +00:00
logger.debug(f"got {len(shout.authors)} shout authors, created by {shout.created_by}")
2024-05-01 00:29:25 +00:00
is_editor = "editor" in roles
2025-02-11 09:00:35 +00:00
logger.debug(f"viewer is{'' if is_editor else ' not'} editor")
2024-05-01 00:29:25 +00:00
is_creator = author_id == shout.created_by
2025-02-11 09:00:35 +00:00
logger.debug(f"viewer is{'' if is_creator else ' not'} creator")
2024-05-30 04:12:00 +00:00
is_author = bool(list(filter(lambda x: x.id == int(author_id), [x for x in shout.authors])))
2025-02-11 09:00:35 +00:00
logger.debug(f"viewer is{'' if is_creator else ' not'} author")
2024-05-01 00:29:25 +00:00
can_edit = is_editor or is_author or is_creator
if not can_edit:
return {"error": "forbidden", "shout": None}
2024-05-01 01:01:21 +00:00
logger.debug("got shout editor with data")
2024-04-17 15:32:23 +00:00
return {"error": None, "shout": shout}
2024-03-05 17:12:17 +00:00
2024-04-17 15:32:23 +00:00
@query.field("get_shouts_drafts")
2023-11-23 23:00:28 +00:00
@login_required
2023-11-28 07:53:48 +00:00
async def get_shouts_drafts(_, info):
2024-04-30 11:10:01 +00:00
# user_id = info.context.get("user_id")
author_dict = info.context.get("author")
if not author_dict:
2024-05-06 18:14:17 +00:00
return {"error": "author profile was not found"}
2024-04-19 15:22:07 +00:00
author_id = author_dict.get("id")
2024-02-03 14:31:00 +00:00
shouts = []
2023-10-23 14:47:11 +00:00
with local_session() as session:
2024-04-19 15:22:07 +00:00
if author_id:
2023-11-27 16:15:34 +00:00
q = (
select(Shout)
2024-02-21 16:14:58 +00:00
.options(joinedload(Shout.authors), joinedload(Shout.topics))
2024-05-30 04:12:00 +00:00
.filter(and_(Shout.deleted_at.is_(None), Shout.created_by == int(author_id)))
2024-02-05 09:47:26 +00:00
.filter(Shout.published_at.is_(None))
2024-02-29 12:52:36 +00:00
.order_by(desc(coalesce(Shout.updated_at, Shout.created_at)))
2024-02-02 20:38:16 +00:00
.group_by(Shout.id)
2023-11-23 23:00:28 +00:00
)
2024-02-02 20:38:16 +00:00
shouts = [shout for [shout] in session.execute(q).unique()]
2024-05-06 21:06:31 +00:00
return {"shouts": shouts}
2023-10-23 14:47:11 +00:00
2023-11-22 16:38:39 +00:00
2025-02-09 14:18:01 +00:00
# @mutation.field("create_shout")
# @login_required
2024-02-27 10:07:14 +00:00
async def create_shout(_, info, inp):
2025-01-16 02:34:43 +00:00
logger.info(f"Starting create_shout with input: {inp}")
2024-04-17 15:32:23 +00:00
user_id = info.context.get("user_id")
2024-05-06 17:59:56 +00:00
author_dict = info.context.get("author")
2025-01-16 02:34:43 +00:00
logger.debug(f"Context user_id: {user_id}, author: {author_dict}")
2024-05-06 17:59:56 +00:00
if not author_dict:
2025-01-21 14:50:02 +00:00
logger.error("Author profile not found in context")
2024-05-06 17:59:56 +00:00
return {"error": "author profile was not found"}
2025-01-16 02:34:43 +00:00
2025-01-21 14:50:02 +00:00
author_id = author_dict.get("id")
if user_id and author_id:
try:
with local_session() as session:
author_id = int(author_id)
current_time = int(time.time())
slug = inp.get("slug") or f"draft-{current_time}"
2025-01-21 15:28:03 +00:00
2025-01-21 16:58:20 +00:00
logger.info(f"Creating shout with input: {inp}")
2025-01-21 17:39:54 +00:00
# Создаем публикацию без topics
2025-04-15 17:09:22 +00:00
body = inp.get("body", "")
lead = inp.get("lead", "")
2025-04-27 09:53:49 +00:00
body_html = wrap_html_fragment(body)
lead_html = wrap_html_fragment(lead)
body_text = trafilatura.extract(body_html)
lead_text = trafilatura.extract(lead_html)
seo = inp.get("seo", lead_text.strip() or body_text.strip()[:300].split(". ")[:-1].join(". "))
2025-01-21 15:28:03 +00:00
new_shout = Shout(
2025-01-21 16:58:20 +00:00
slug=slug,
2025-04-15 17:09:22 +00:00
body=body,
seo=seo,
lead=lead,
2025-01-21 16:58:20 +00:00
layout=inp.get("layout", "article"),
title=inp.get("title", ""),
2025-01-21 15:28:03 +00:00
created_by=author_id,
created_at=current_time,
2025-01-21 17:53:27 +00:00
community=1,
2025-01-21 15:28:03 +00:00
)
2025-01-21 14:50:02 +00:00
2025-01-21 17:39:54 +00:00
# Проверяем уникальность slug
2025-01-21 14:50:02 +00:00
logger.debug(f"Checking for existing slug: {slug}")
same_slug_shout = session.query(Shout).filter(Shout.slug == new_shout.slug).first()
c = 1
while same_slug_shout is not None:
logger.debug(f"Found duplicate slug, trying iteration {c}")
new_shout.slug = f"{slug}-{c}"
same_slug_shout = session.query(Shout).filter(Shout.slug == new_shout.slug).first()
c += 1
try:
logger.info("Creating new shout object")
session.add(new_shout)
session.commit()
logger.info(f"Created shout with ID: {new_shout.id}")
except Exception as e:
logger.error(f"Error creating shout object: {e}", exc_info=True)
return {"error": f"Database error: {str(e)}"}
2025-01-21 17:39:54 +00:00
# Связываем с автором
2025-01-21 14:50:02 +00:00
try:
2025-01-21 17:39:54 +00:00
logger.debug(f"Linking author {author_id} to shout {new_shout.id}")
sa = ShoutAuthor(shout=new_shout.id, author=author_id)
session.add(sa)
2025-01-21 14:50:02 +00:00
except Exception as e:
logger.error(f"Error linking author: {e}", exc_info=True)
return {"error": f"Error linking author: {str(e)}"}
2025-01-21 17:39:54 +00:00
# Связываем с темами
2025-01-21 17:53:27 +00:00
input_topics = inp.get("topics", [])
if input_topics:
2025-01-21 17:39:54 +00:00
try:
2025-01-21 17:53:27 +00:00
logger.debug(f"Linking topics: {[t.slug for t in input_topics]}")
main_topic = inp.get("main_topic")
for topic in input_topics:
st = ShoutTopic(
topic=topic.id,
shout=new_shout.id,
main=(topic.slug == main_topic) if main_topic else False,
)
2025-01-21 17:39:54 +00:00
session.add(st)
2025-01-21 17:53:27 +00:00
logger.debug(f"Added topic {topic.slug} {'(main)' if st.main else ''}")
2025-01-21 17:39:54 +00:00
except Exception as e:
logger.error(f"Error linking topics: {e}", exc_info=True)
return {"error": f"Error linking topics: {str(e)}"}
2025-01-21 14:50:02 +00:00
try:
2025-01-16 02:34:43 +00:00
session.commit()
2025-01-21 14:50:02 +00:00
logger.info("Final commit successful")
except Exception as e:
logger.error(f"Error in final commit: {e}", exc_info=True)
return {"error": f"Error in final commit: {str(e)}"}
2025-01-21 17:39:54 +00:00
# Получаем созданную публикацию
shout = session.query(Shout).filter(Shout.id == new_shout.id).first()
# Подписываем автора
2025-01-21 14:50:02 +00:00
try:
logger.debug("Following created shout")
await follow(None, info, "shout", shout.slug)
except Exception as e:
logger.warning(f"Error following shout: {e}", exc_info=True)
logger.info(f"Successfully created shout {shout.id}")
2025-01-21 10:11:15 +00:00
return {"shout": shout}
2025-01-21 14:50:02 +00:00
except Exception as e:
logger.error(f"Unexpected error in create_shout: {e}", exc_info=True)
return {"error": f"Unexpected error: {str(e)}"}
error_msg = "cant create shout" if user_id else "unauthorized"
logger.error(f"Create shout failed: {error_msg}")
return {"error": error_msg}
2022-09-03 10:50:14 +00:00
2023-11-22 16:38:39 +00:00
2025-02-02 18:41:03 +00:00
def patch_main_topic(session, main_topic_slug, shout):
2025-02-12 17:04:06 +00:00
"""Update the main topic for a shout."""
2025-02-02 18:41:03 +00:00
logger.info(f"Starting patch_main_topic for shout#{shout.id} with slug '{main_topic_slug}'")
2025-02-12 17:04:06 +00:00
logger.debug(f"Current shout topics: {[(t.topic.slug, t.main) for t in shout.topics]}")
2025-02-02 18:41:03 +00:00
2024-03-07 08:55:23 +00:00
with session.begin():
2025-02-02 18:41:03 +00:00
# Получаем текущий главный топик
old_main = (
2024-05-30 04:12:00 +00:00
session.query(ShoutTopic).filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.main.is_(True))).first()
2024-02-02 20:59:42 +00:00
)
2025-02-02 18:41:03 +00:00
if old_main:
2025-02-12 17:04:06 +00:00
logger.info(f"Found current main topic: {old_main.topic.slug}")
else:
logger.info("No current main topic found")
2024-02-02 20:59:42 +00:00
2025-02-02 18:41:03 +00:00
# Находим новый главный топик
main_topic = session.query(Topic).filter(Topic.slug == main_topic_slug).first()
if not main_topic:
logger.error(f"Main topic with slug '{main_topic_slug}' not found")
return
2024-03-07 08:55:23 +00:00
2025-02-12 17:04:06 +00:00
logger.info(f"Found new main topic: {main_topic.slug} (id={main_topic.id})")
2025-02-02 18:41:03 +00:00
# Находим связь с новым главным топиком
new_main = (
session.query(ShoutTopic)
.filter(and_(ShoutTopic.shout == shout.id, ShoutTopic.topic == main_topic.id))
.first()
)
2025-02-12 17:04:06 +00:00
logger.debug(f"Found new main topic relation: {new_main is not None}")
2025-02-02 18:41:03 +00:00
if old_main and new_main and old_main is not new_main:
2025-02-12 17:04:06 +00:00
logger.info(f"Updating main topic flags: {old_main.topic.slug} -> {new_main.topic.slug}")
2025-02-02 18:41:03 +00:00
old_main.main = False
session.add(old_main)
2024-03-07 08:55:23 +00:00
2025-02-02 18:41:03 +00:00
new_main.main = True
session.add(new_main)
2024-02-02 20:59:42 +00:00
2025-02-02 18:41:03 +00:00
session.flush()
logger.info(f"Main topic updated for shout#{shout.id}")
2025-02-12 17:04:06 +00:00
else:
logger.warning(f"No changes needed for main topic (old={old_main is not None}, new={new_main is not None})")
2024-02-02 20:59:42 +00:00
def patch_topics(session, shout, topics_input):
2025-02-09 14:18:01 +00:00
"""Update the topics associated with a shout.
Args:
session: SQLAlchemy session
shout (Shout): The shout to update
topics_input (list): List of topic dicts with fields:
- id (int): Topic ID (<0 for new topics)
- slug (str): Topic slug
- title (str): Topic title (for new topics)
Side Effects:
- Creates new topics if needed
- Updates shout-topic associations
- Refreshes shout object with new topics
Example:
>>> def test_patch_topics():
... topics = [
... {'id': -1, 'slug': 'new-topic', 'title': 'New Topic'},
... {'id': 1, 'slug': 'existing-topic'}
... ]
... with local_session() as session:
... shout = session.query(Shout).first()
... patch_topics(session, shout, topics)
... assert len(shout.topics) == 2
... assert any(t.slug == 'new-topic' for t in shout.topics)
... return shout.topics
"""
2025-02-02 18:41:03 +00:00
logger.info(f"Starting patch_topics for shout#{shout.id}")
logger.info(f"Received topics_input: {topics_input}")
# Создаем новые топики если есть
2024-05-30 04:12:00 +00:00
new_topics_to_link = [Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0]
2024-02-02 20:59:42 +00:00
if new_topics_to_link:
2025-02-02 18:41:03 +00:00
logger.info(f"Creating new topics: {[t.dict() for t in new_topics_to_link]}")
2024-02-02 20:59:42 +00:00
session.add_all(new_topics_to_link)
2025-02-02 18:45:24 +00:00
session.flush()
2025-02-02 18:41:03 +00:00
# Получаем текущие связи
current_links = session.query(ShoutTopic).filter(ShoutTopic.shout == shout.id).all()
logger.info(f"Current topic links: {[{t.topic: t.main} for t in current_links]}")
# Удаляем старые связи
if current_links:
logger.info(f"Removing old topic links for shout#{shout.id}")
for link in current_links:
session.delete(link)
session.flush()
# Создаем новые связи
for topic_input in topics_input:
topic_id = topic_input["id"]
if topic_id < 0:
topic = next(t for t in new_topics_to_link if t.slug == topic_input["slug"])
topic_id = topic.id
logger.info(f"Creating new topic link: shout#{shout.id} -> topic#{topic_id}")
2025-02-02 18:45:24 +00:00
new_link = ShoutTopic(shout=shout.id, topic=topic_id, main=False)
2025-02-02 18:41:03 +00:00
session.add(new_link)
2024-02-02 20:59:42 +00:00
2025-02-02 18:45:24 +00:00
session.flush()
# Обновляем связи в объекте шаута
session.refresh(shout)
2024-02-02 20:59:42 +00:00
2025-02-02 18:45:24 +00:00
logger.info(f"Successfully updated topics for shout#{shout.id}")
logger.info(f"Final shout topics: {[t.dict() for t in shout.topics]}")
2024-02-02 20:59:42 +00:00
2025-02-09 14:18:01 +00:00
# @mutation.field("update_shout")
# @login_required
2024-03-05 15:13:39 +00:00
async def update_shout(_, info, shout_id: int, shout_input=None, publish=False):
2025-02-02 18:41:03 +00:00
logger.info(f"Starting update_shout with id={shout_id}, publish={publish}")
2025-04-15 17:16:01 +00:00
logger.debug(f"Full shout_input: {shout_input}") # DraftInput
2025-02-02 18:41:03 +00:00
2024-04-17 15:32:23 +00:00
user_id = info.context.get("user_id")
roles = info.context.get("roles", [])
2024-05-06 18:01:10 +00:00
author_dict = info.context.get("author")
if not author_dict:
2025-02-02 18:41:03 +00:00
logger.error("Author profile not found")
2024-05-06 18:01:10 +00:00
return {"error": "author profile was not found"}
2025-02-02 18:41:03 +00:00
2024-04-19 15:22:07 +00:00
author_id = author_dict.get("id")
2024-03-05 13:59:55 +00:00
shout_input = shout_input or {}
2024-03-05 14:53:49 +00:00
current_time = int(time.time())
2024-04-17 15:32:23 +00:00
shout_id = shout_id or shout_input.get("id", shout_id)
slug = shout_input.get("slug")
2025-02-02 18:41:03 +00:00
2024-03-05 13:59:55 +00:00
if not user_id:
2025-02-02 18:41:03 +00:00
logger.error("Unauthorized update attempt")
2024-04-17 15:32:23 +00:00
return {"error": "unauthorized"}
2025-02-02 18:41:03 +00:00
2024-02-27 13:28:54 +00:00
try:
with local_session() as session:
2024-04-19 15:22:07 +00:00
if author_id:
2025-02-02 18:41:03 +00:00
logger.info(f"Processing update for shout#{shout_id} by author #{author_id}")
2025-02-02 18:45:24 +00:00
shout_by_id = (
session.query(Shout)
2025-02-11 21:55:55 +00:00
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
2025-02-02 18:45:24 +00:00
.filter(Shout.id == shout_id)
.first()
)
2024-12-11 21:32:27 +00:00
2024-03-05 14:53:49 +00:00
if not shout_by_id:
2024-12-11 21:32:27 +00:00
logger.error(f"shout#{shout_id} not found")
2024-04-17 15:32:23 +00:00
return {"error": "shout not found"}
2025-02-02 18:41:03 +00:00
logger.info(f"Found shout#{shout_id}")
# Логируем текущие топики
current_topics = (
[{"id": t.id, "slug": t.slug, "title": t.title} for t in shout_by_id.topics]
if shout_by_id.topics
else []
)
logger.info(f"Current topics for shout#{shout_id}: {current_topics}")
2024-12-11 21:32:27 +00:00
2024-03-11 08:16:12 +00:00
if slug != shout_by_id.slug:
2025-01-21 14:50:02 +00:00
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
c = 1
while same_slug_shout is not None:
c += 1
slug = f"{slug}-{c}"
same_slug_shout = session.query(Shout).filter(Shout.slug == slug).first()
shout_input["slug"] = slug
2024-12-11 21:32:27 +00:00
logger.info(f"shout#{shout_id} slug patched")
2024-12-11 22:04:11 +00:00
2024-05-30 04:12:00 +00:00
if filter(lambda x: x.id == author_id, [x for x in shout_by_id.authors]) or "editor" in roles:
2025-02-02 18:41:03 +00:00
logger.info(f"Author #{author_id} has permission to edit shout#{shout_id}")
2024-03-06 07:44:08 +00:00
# topics patch
2024-04-17 15:32:23 +00:00
topics_input = shout_input.get("topics")
2024-03-06 07:44:08 +00:00
if topics_input:
2025-02-02 18:41:03 +00:00
logger.info(f"Received topics_input for shout#{shout_id}: {topics_input}")
try:
patch_topics(session, shout_by_id, topics_input)
logger.info(f"Successfully patched topics for shout#{shout_id}")
2025-02-02 18:45:24 +00:00
# Обновляем связи в сессии после patch_topics
session.refresh(shout_by_id)
2025-02-02 18:41:03 +00:00
except Exception as e:
logger.error(f"Error patching topics: {e}", exc_info=True)
return {"error": f"Failed to update topics: {str(e)}"}
2024-04-17 15:32:23 +00:00
del shout_input["topics"]
2024-05-05 15:46:16 +00:00
for tpc in topics_input:
2024-06-05 18:04:48 +00:00
await cache_by_id(Topic, tpc["id"], cache_topic)
2025-02-02 18:41:03 +00:00
else:
logger.warning(f"No topics_input received for shout#{shout_id}")
2024-03-06 07:44:08 +00:00
# main topic
2024-04-17 15:32:23 +00:00
main_topic = shout_input.get("main_topic")
2024-03-06 07:44:08 +00:00
if main_topic:
2025-02-02 18:41:03 +00:00
logger.info(f"Updating main topic for shout#{shout_id} to {main_topic}")
2024-03-06 07:44:08 +00:00
patch_main_topic(session, main_topic, shout_by_id)
2024-04-17 15:32:23 +00:00
shout_input["updated_at"] = current_time
2025-01-21 18:54:23 +00:00
if publish:
2025-02-02 18:41:03 +00:00
logger.info(f"Publishing shout#{shout_id}")
2025-01-21 18:54:23 +00:00
shout_input["published_at"] = current_time
2025-01-25 08:57:10 +00:00
# Проверяем наличие связи с автором
logger.info(f"Checking author link for shout#{shout_id} and author#{author_id}")
2025-01-26 14:53:16 +00:00
author_link = (
session.query(ShoutAuthor)
.filter(and_(ShoutAuthor.shout == shout_id, ShoutAuthor.author == author_id))
.first()
)
2025-01-25 08:57:10 +00:00
if not author_link:
logger.info(f"Adding missing author link for shout#{shout_id}")
sa = ShoutAuthor(shout=shout_id, author=author_id)
session.add(sa)
session.flush()
2025-01-25 12:19:19 +00:00
logger.info("Author link added successfully")
2025-01-25 08:57:10 +00:00
else:
2025-01-25 12:19:19 +00:00
logger.info("Author link already exists")
2025-01-25 08:57:10 +00:00
2025-02-02 18:41:03 +00:00
# Логируем финальное состояние перед сохранением
logger.info(f"Final shout_input for update: {shout_input}")
2024-03-06 07:44:08 +00:00
Shout.update(shout_by_id, shout_input)
session.add(shout_by_id)
2024-02-27 13:28:54 +00:00
2025-02-02 18:41:03 +00:00
try:
session.commit()
2025-02-02 18:45:24 +00:00
# Обновляем объект после коммита чтобы получить все связи
session.refresh(shout_by_id)
2025-02-02 18:41:03 +00:00
logger.info(f"Successfully committed updates for shout#{shout_id}")
except Exception as e:
logger.error(f"Commit failed: {e}", exc_info=True)
return {"error": f"Failed to save changes: {str(e)}"}
# После обновления проверяем топики
updated_topics = (
[{"id": t.id, "slug": t.slug, "title": t.title} for t in shout_by_id.topics]
if shout_by_id.topics
else []
)
logger.info(f"Updated topics for shout#{shout_id}: {updated_topics}")
2024-02-27 13:28:54 +00:00
2025-01-16 02:42:53 +00:00
# Инвалидация кэша после обновления
try:
logger.info("Invalidating cache after shout update")
2025-01-21 15:28:03 +00:00
2025-01-16 02:53:37 +00:00
cache_keys = [
2025-01-16 02:42:53 +00:00
"feed", # лента
f"author_{author_id}", # публикации автора
"random_top", # случайные топовые
"unrated", # неоцененные
2025-01-16 02:53:37 +00:00
]
2025-01-25 08:23:20 +00:00
# Добавляем ключи для тем публикации
2025-01-16 02:53:37 +00:00
for topic in shout_by_id.topics:
cache_keys.append(f"topic_{topic.id}")
cache_keys.append(f"topic_shouts_{topic.id}")
2025-01-21 15:28:03 +00:00
2025-01-16 02:53:37 +00:00
await invalidate_shouts_cache(cache_keys)
2025-01-25 08:23:20 +00:00
await invalidate_shout_related_cache(shout_by_id, author_id)
2025-01-21 15:28:03 +00:00
2025-01-16 02:53:37 +00:00
# Обновляем кэш тем и авторов
2025-01-16 02:42:53 +00:00
for topic in shout_by_id.topics:
2025-01-16 02:53:37 +00:00
await cache_by_id(Topic, topic.id, cache_topic)
for author in shout_by_id.authors:
await cache_author(author.dict())
2025-01-21 15:28:03 +00:00
2025-01-16 02:42:53 +00:00
logger.info("Cache invalidated successfully")
except Exception as cache_error:
logger.warning(f"Cache invalidation error: {cache_error}", exc_info=True)
2024-03-06 07:44:08 +00:00
if not publish:
2025-02-02 18:41:03 +00:00
await notify_shout(shout_by_id.dict(), "update")
2024-03-06 07:44:08 +00:00
else:
2025-02-02 18:41:03 +00:00
await notify_shout(shout_by_id.dict(), "published")
2024-03-06 07:44:08 +00:00
# search service indexing
search_service.index(shout_by_id)
2024-05-05 15:46:16 +00:00
for a in shout_by_id.authors:
2024-06-05 18:04:48 +00:00
await cache_by_id(Author, a.id, cache_author)
2024-12-11 21:32:27 +00:00
logger.info(f"shout#{shout_id} updated")
2025-02-03 16:06:00 +00:00
# Получаем полные данные шаута со связями
shout_with_relations = (
session.query(Shout)
2025-02-12 16:21:21 +00:00
.options(joinedload(Shout.topics).joinedload(ShoutTopic.topic), joinedload(Shout.authors))
2025-02-03 16:06:00 +00:00
.filter(Shout.id == shout_id)
.first()
)
2025-02-03 23:53:01 +00:00
2025-02-03 16:06:00 +00:00
# Создаем словарь с базовыми полями
shout_dict = shout_with_relations.dict()
2025-02-03 23:53:01 +00:00
2025-02-03 16:06:00 +00:00
# Явно добавляем связанные данные
2025-02-03 23:53:01 +00:00
shout_dict["topics"] = (
[
{"id": topic.id, "slug": topic.slug, "title": topic.title}
for topic in shout_with_relations.topics
]
if shout_with_relations.topics
else []
)
2025-02-11 21:31:18 +00:00
# Add main_topic to the shout dictionary
2025-02-12 16:33:02 +00:00
shout_dict["main_topic"] = get_main_topic(shout_with_relations.topics)
2025-02-11 21:31:18 +00:00
2025-02-03 23:53:01 +00:00
shout_dict["authors"] = (
[
{"id": author.id, "name": author.name, "slug": author.slug}
for author in shout_with_relations.authors
]
if shout_with_relations.authors
else []
)
2025-02-03 16:06:00 +00:00
2025-02-02 18:49:28 +00:00
logger.info(f"Final shout data with relations: {shout_dict}")
2025-02-12 16:21:21 +00:00
logger.debug(
f"Loaded topics details: {[(t.topic.slug if t.topic else 'no-topic', t.main) for t in shout_with_relations.topics]}"
)
2025-02-02 18:45:24 +00:00
return {"shout": shout_dict, "error": None}
2024-02-27 13:28:54 +00:00
else:
2025-02-02 18:41:03 +00:00
logger.warning(f"Access denied: author #{author_id} cannot edit shout#{shout_id}")
2024-04-17 15:32:23 +00:00
return {"error": "access denied", "shout": None}
2024-02-27 13:28:54 +00:00
except Exception as exc:
2025-02-02 18:41:03 +00:00
logger.error(f"Unexpected error in update_shout: {exc}", exc_info=True)
logger.error(f"Failed input data: {shout_input}")
return {"error": "cant update shout"}
2024-02-17 06:35:11 +00:00
2024-04-17 15:32:23 +00:00
return {"error": "cant update shout"}
2022-09-03 10:50:14 +00:00
2023-11-22 16:38:39 +00:00
2025-02-09 14:18:01 +00:00
# @mutation.field("delete_shout")
# @login_required
2024-03-06 07:44:08 +00:00
async def delete_shout(_, info, shout_id: int):
2024-04-17 15:32:23 +00:00
user_id = info.context.get("user_id")
2024-04-19 15:22:07 +00:00
roles = info.context.get("roles", [])
2024-05-06 18:01:10 +00:00
author_dict = info.context.get("author")
if not author_dict:
return {"error": "author profile was not found"}
2024-04-19 15:22:07 +00:00
author_id = author_dict.get("id")
if user_id and author_id:
author_id = int(author_id)
2024-02-27 11:29:28 +00:00
with local_session() as session:
shout = session.query(Shout).filter(Shout.id == shout_id).first()
2024-04-19 15:22:07 +00:00
if not isinstance(shout, Shout):
2024-04-17 15:32:23 +00:00
return {"error": "invalid shout id"}
2024-04-19 15:22:07 +00:00
shout_dict = shout.dict()
# NOTE: only owner and editor can mark the shout as deleted
if shout_dict["created_by"] == author_id or "editor" in roles:
shout_dict["deleted_at"] = int(time.time())
Shout.update(shout, shout_dict)
session.add(shout)
session.commit()
2024-05-05 15:46:16 +00:00
2024-05-06 17:41:34 +00:00
for author in shout.authors:
2024-06-05 18:04:48 +00:00
await cache_by_id(Author, author.id, cache_author)
2024-06-05 15:29:15 +00:00
info.context["author"] = author.dict()
info.context["user_id"] = author.user
unfollow(None, info, "shout", shout.slug)
2024-05-05 15:46:16 +00:00
2024-05-06 17:41:34 +00:00
for topic in shout.topics:
2024-06-05 18:04:48 +00:00
await cache_by_id(Topic, topic.id, cache_topic)
2024-05-05 15:46:16 +00:00
2024-04-19 15:22:07 +00:00
await notify_shout(shout_dict, "delete")
return {"error": None}
else:
return {"error": "access denied"}
2025-02-11 21:31:18 +00:00
2025-02-12 16:33:02 +00:00
def get_main_topic(topics):
2025-02-12 17:04:06 +00:00
"""Get the main topic from a list of ShoutTopic objects."""
logger.info(f"Starting get_main_topic with {len(topics) if topics else 0} topics")
2025-02-12 18:59:05 +00:00
logger.debug(
f"Topics data: {[(t.topic.slug if t.topic else 'no-topic', t.main) for t in topics] if topics else []}"
)
2025-02-12 17:04:06 +00:00
2025-02-11 21:31:18 +00:00
if not topics:
2025-02-12 17:04:06 +00:00
logger.warning("No topics provided to get_main_topic")
2025-02-12 18:59:05 +00:00
return {"id": 0, "title": "no topic", "slug": "notopic", "is_main": True}
2025-02-12 16:21:21 +00:00
2025-02-11 21:55:55 +00:00
# Find first main topic in original order
main_topic_rel = next((st for st in topics if st.main), None)
2025-02-12 18:59:05 +00:00
logger.debug(
f"Found main topic relation: {main_topic_rel.topic.slug if main_topic_rel and main_topic_rel.topic else None}"
)
2025-02-12 16:21:21 +00:00
2025-02-11 21:55:55 +00:00
if main_topic_rel and main_topic_rel.topic:
2025-02-12 17:04:06 +00:00
result = {
2025-02-11 21:55:55 +00:00
"slug": main_topic_rel.topic.slug,
"title": main_topic_rel.topic.title,
2025-02-12 16:21:21 +00:00
"id": main_topic_rel.topic.id,
2025-02-12 18:59:05 +00:00
"is_main": True,
2025-02-11 21:55:55 +00:00
}
2025-02-12 17:04:06 +00:00
logger.info(f"Returning main topic: {result}")
return result
2025-02-12 16:21:21 +00:00
2025-02-11 21:55:55 +00:00
# If no main found but topics exist, return first
if topics and topics[0].topic:
2025-02-12 17:04:06 +00:00
logger.info(f"No main topic found, using first topic: {topics[0].topic.slug}")
result = {
2025-02-12 16:33:02 +00:00
"slug": topics[0].topic.slug,
"title": topics[0].topic.title,
"id": topics[0].topic.id,
2025-02-12 18:59:05 +00:00
"is_main": True,
2025-02-12 16:33:02 +00:00
}
2025-02-12 17:04:06 +00:00
return result
2025-02-12 16:21:21 +00:00
2025-02-12 17:04:06 +00:00
logger.warning("No valid topics found, returning default")
2025-02-12 18:59:05 +00:00
return {"slug": "notopic", "title": "no topic", "id": 0, "is_main": True}
@mutation.field("unpublish_shout")
@login_required
async def unpublish_shout(_, info, shout_id: int):
"""Снимает публикацию (shout) с публикации.
Предзагружает связанный черновик (draft) и его авторов/темы, чтобы избежать
ошибок при последующем доступе к ним в GraphQL.
Args:
shout_id: ID публикации для снятия с публикации
Returns:
dict: Снятая с публикации публикация или сообщение об ошибке
"""
author_dict = info.context.get("author", {})
author_id = author_dict.get("id")
if not author_id:
# В идеале нужна проверка прав, имеет ли автор право снимать публикацию
return {"error": "Author ID is required"}
shout = None
with local_session() as session:
try:
2025-05-03 08:35:03 +00:00
# Загружаем Shout со всеми связями для правильного формирования ответа
shout = (
session.query(Shout)
.options(
joinedload(Shout.authors),
joinedload(Shout.topics).joinedload(ShoutTopic.topic)
)
.filter(Shout.id == shout_id)
.first()
)
if not shout:
logger.warning(f"Shout not found for unpublish: ID {shout_id}")
return {"error": "Shout not found"}
2025-05-03 07:53:40 +00:00
# Если у публикации есть связанный черновик, загружаем его с relationships
if shout.draft:
# Отдельно загружаем черновик с его связями
draft = (
session.query(Draft)
.options(
selectinload(Draft.authors),
selectinload(Draft.topics)
)
.filter(Draft.id == shout.draft)
.first()
)
# Связываем черновик с публикацией вручную для доступа через API
if draft:
shout.draft_obj = draft
# TODO: Добавить проверку прав доступа, если необходимо
# if author_id not in [a.id for a in shout.authors]: # Требует selectinload(Shout.authors) выше
# logger.warning(f"Author {author_id} denied unpublishing shout {shout_id}")
# return {"error": "Access denied"}
2025-05-03 08:35:03 +00:00
# Запоминаем старый slug и id для формирования поля publication
shout_slug = shout.slug
shout_id_for_publication = shout.id
# Снимаем с публикации (устанавливаем published_at в None)
shout.published_at = None
session.commit()
2025-05-03 08:35:03 +00:00
# Формируем полноценный словарь для ответа
shout_dict = shout.dict()
# Добавляем связанные данные
shout_dict["topics"] = (
[
{"id": topic.topic.id, "slug": topic.topic.slug, "title": topic.topic.title}
for topic in shout.topics if topic.topic
]
if shout.topics
else []
)
# Добавляем main_topic
shout_dict["main_topic"] = get_main_topic(shout.topics)
# Добавляем авторов
shout_dict["authors"] = (
[
{"id": author.id, "name": author.name, "slug": author.slug}
for author in shout.authors
]
if shout.authors
else []
)
# Важно! Обновляем поле publication, отражая состояние "снят с публикации"
shout_dict["publication"] = {
"id": shout_id_for_publication,
"slug": shout_slug,
"published_at": None # Ключевое изменение - устанавливаем published_at в None
}
# Инвалидация кэша
try:
2025-05-03 08:07:03 +00:00
cache_keys = [
"feed", # лента
f"author_{author_id}", # публикации автора
"random_top", # случайные топовые
"unrated", # неоцененные
]
2025-05-03 08:00:19 +00:00
await invalidate_shout_related_cache(shout, author_id)
2025-05-03 08:07:03 +00:00
await invalidate_shouts_cache(cache_keys)
logger.info(f"Cache invalidated after unpublishing shout {shout_id}")
except Exception as cache_err:
logger.error(f"Failed to invalidate cache for unpublish shout {shout_id}: {cache_err}")
except Exception as e:
session.rollback()
logger.error(f"Failed to unpublish shout {shout_id}: {e}", exc_info=True)
2025-05-03 07:53:40 +00:00
return {"error": f"Failed to unpublish shout: {str(e)}"}
2025-05-03 08:35:03 +00:00
# Возвращаем сформированный словарь вместо объекта
logger.info(f"Shout {shout_id} unpublished successfully by author {author_id}")
2025-05-03 08:35:03 +00:00
return {"shout": shout_dict}