This commit is contained in:
parent
4f26812340
commit
7cf702eb98
|
@ -1,3 +1,6 @@
|
|||
[0.3.2]
|
||||
- redis cache for what author follows
|
||||
|
||||
[0.3.1]
|
||||
- enabling sentry
|
||||
- long query log report added
|
||||
|
|
23
main.py
23
main.py
|
@ -14,33 +14,32 @@ from services.viewed import ViewedStorage
|
|||
from services.webhook import WebhookEndpoint
|
||||
from settings import DEV_SERVER_PID_FILE_NAME, MODE
|
||||
|
||||
import_module('resolvers')
|
||||
schema = make_executable_schema(load_schema_from_path('schema/'), resolvers)
|
||||
import_module("resolvers")
|
||||
schema = make_executable_schema(load_schema_from_path("schema/"), resolvers)
|
||||
|
||||
|
||||
async def start():
|
||||
if MODE == 'development':
|
||||
if MODE == "development":
|
||||
if not exists(DEV_SERVER_PID_FILE_NAME):
|
||||
# pid file management
|
||||
with open(DEV_SERVER_PID_FILE_NAME, 'w', encoding='utf-8') as f:
|
||||
with open(DEV_SERVER_PID_FILE_NAME, "w", encoding="utf-8") as f:
|
||||
f.write(str(os.getpid()))
|
||||
print(f'[main] process started in {MODE} mode')
|
||||
print(f"[main] process started in {MODE} mode")
|
||||
|
||||
|
||||
# main starlette app object with ariadne mounted in root
|
||||
app = Starlette(
|
||||
routes=[
|
||||
Route('/', GraphQL(schema, debug=True)),
|
||||
Route('/new-author', WebhookEndpoint),
|
||||
Route("/", GraphQL(schema, debug=True)),
|
||||
Route("/new-author", WebhookEndpoint),
|
||||
],
|
||||
on_startup=[
|
||||
redis.connect,
|
||||
ViewedStorage.init,
|
||||
search_service.info,
|
||||
# start_sentry,
|
||||
start
|
||||
start,
|
||||
],
|
||||
on_shutdown=[
|
||||
redis.disconnect
|
||||
],
|
||||
debug=True
|
||||
on_shutdown=[redis.disconnect],
|
||||
debug=True,
|
||||
)
|
||||
|
|
|
@ -1,75 +1,44 @@
|
|||
import time
|
||||
from importlib import import_module
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy import event
|
||||
|
||||
from services.db import Base
|
||||
|
||||
|
||||
class AuthorRating(Base):
|
||||
__tablename__ = 'author_rating'
|
||||
__tablename__ = "author_rating"
|
||||
|
||||
id = None # type: ignore
|
||||
rater = Column(ForeignKey('author.id'), primary_key=True)
|
||||
author = Column(ForeignKey('author.id'), primary_key=True)
|
||||
rater = Column(ForeignKey("author.id"), primary_key=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
plus = Column(Boolean)
|
||||
|
||||
|
||||
class AuthorFollower(Base):
|
||||
__tablename__ = 'author_follower'
|
||||
__tablename__ = "author_follower"
|
||||
|
||||
id = None # type: ignore
|
||||
follower = Column(ForeignKey('author.id'), primary_key=True)
|
||||
author = Column(ForeignKey('author.id'), primary_key=True)
|
||||
follower = Column(ForeignKey("author.id"), primary_key=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
|
||||
class Author(Base):
|
||||
__tablename__ = 'author'
|
||||
__tablename__ = "author"
|
||||
|
||||
user = Column(String,
|
||||
) # unbounded link with authorizer's User type
|
||||
user = Column(String) # unbounded link with authorizer's User type
|
||||
|
||||
name = Column(String, nullable=True, comment='Display name')
|
||||
name = Column(String, nullable=True, comment="Display name")
|
||||
slug = Column(String, unique=True, comment="Author's slug")
|
||||
bio = Column(String, nullable=True, comment='Bio') # status description
|
||||
about = Column(String, nullable=True, comment='About') # long and formatted
|
||||
pic = Column(String, nullable=True, comment='Picture')
|
||||
links = Column(JSON, nullable=True, comment='Links')
|
||||
bio = Column(String, nullable=True, comment="Bio") # status description
|
||||
about = Column(String, nullable=True, comment="About") # long and formatted
|
||||
pic = Column(String, nullable=True, comment="Picture")
|
||||
links = Column(JSON, nullable=True, comment="Links")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
last_seen = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
deleted_at = Column(Integer, nullable=True, comment='Deleted at')
|
||||
|
||||
def get_object(connection, table_name, object_id):
|
||||
return connection.execute(f"SELECT * FROM {table_name} WHERE id = :object_id", {"object_id": object_id}).fetchone()
|
||||
|
||||
def update_app_data(connection, user_id, app_data):
|
||||
connection.execute("UPDATE authorizer_users SET app_data = :app_data WHERE id = :user_id", {"app_data": app_data, "user_id": user_id})
|
||||
|
||||
def update_follows(user, entity_type, entity):
|
||||
app_data = user.app_data or {}
|
||||
app_data['follows'] = user.app_data or {"topics": [], "authors": [], "shouts": [], "communities": []}
|
||||
app_data['follows'][f'{entity_type}s'].append(vars(entity))
|
||||
return app_data
|
||||
|
||||
@event.listens_for(Author, 'after_insert')
|
||||
@event.listens_for(Author, 'after_update')
|
||||
def after_author_update(mapper, connection, target):
|
||||
user_id = target.user
|
||||
user = get_object(connection, 'authorizer_users', user_id)
|
||||
if user:
|
||||
app_data = update_follows(user, 'author', target)
|
||||
update_app_data(connection, user_id, app_data)
|
||||
deleted_at = Column(Integer, nullable=True, comment="Deleted at")
|
||||
|
||||
|
||||
@event.listens_for(AuthorFollower, 'after_insert')
|
||||
@event.listens_for(AuthorFollower, 'after_delete')
|
||||
def after_author_follower_change(mapper, connection, target):
|
||||
author_id = target.author
|
||||
follower_id = target.follower
|
||||
user = get_object(connection, 'authorizer_users', follower_id)
|
||||
if user:
|
||||
app_data = update_follows(user, 'author', get_object(connection, 'author', author_id))
|
||||
update_app_data(connection, follower_id, app_data)
|
||||
import_module("orm.author_events")
|
||||
|
|
104
orm/author_events.py
Normal file
104
orm/author_events.py
Normal file
|
@ -0,0 +1,104 @@
|
|||
from sqlalchemy import event, select
|
||||
|
||||
from services.rediscache import redis
|
||||
from orm.author import Author, AuthorFollower
|
||||
from orm.topic import Topic, TopicFollower
|
||||
from orm.shout import Shout, ShoutReactionsFollower
|
||||
|
||||
|
||||
@event.listens_for(Author, "after_insert")
|
||||
@event.listens_for(Author, "after_update")
|
||||
async def after_author_update(mapper, connection, target):
|
||||
redis_key = f"user:{target.user}:author"
|
||||
await redis.execute("HSET", redis_key, vars(target))
|
||||
|
||||
|
||||
async def update_follows_for_user(connection, user_id, entity_type, entity, is_insert):
|
||||
redis_key = f"user:{user_id}:follows"
|
||||
follows = await redis.execute("HGET", redis_key)
|
||||
if not follows:
|
||||
follows = {
|
||||
"topics": [],
|
||||
"authors": [],
|
||||
"shouts": [],
|
||||
"communities": [
|
||||
{"slug": "discours", "name": "Дискурс", "id": 1, "desc": ""}
|
||||
],
|
||||
}
|
||||
entity_type = "communitie" if entity_type == "community" else entity_type
|
||||
if is_insert:
|
||||
follows[f"{entity_type}s"].append(entity)
|
||||
else:
|
||||
# Remove the entity from follows
|
||||
follows[f"{entity_type}s"] = [
|
||||
e for e in follows[f"{entity_type}s"] if e["id"] != entity.id
|
||||
]
|
||||
|
||||
await redis.execute("HSET", redis_key, vars(follows))
|
||||
|
||||
|
||||
async def handle_author_follower_change(connection, author_id, follower_id, is_insert):
|
||||
author = connection.execute(select(Author).filter(Author.id == author_id)).first()
|
||||
follower = connection.execute(
|
||||
select(Author).filter(Author.id == follower_id)
|
||||
).first()
|
||||
if follower and author:
|
||||
await update_follows_for_user(
|
||||
connection, follower.user, "author", author, is_insert
|
||||
)
|
||||
|
||||
|
||||
async def handle_shout_follower_change(connection, shout_id, follower_id, is_insert):
|
||||
shout = connection.execute(select(Topic).filter(Shout.id == shout_id)).first()
|
||||
follower = connection.execute(
|
||||
select(Author).filter(Author.id == follower_id)
|
||||
).first()
|
||||
if follower and shout:
|
||||
await update_follows_for_user(
|
||||
connection, follower.user, "shout", shout, is_insert
|
||||
)
|
||||
|
||||
|
||||
async def handle_topic_follower_change(connection, topic_id, follower_id, is_insert):
|
||||
topic = connection.execute(select(Topic).filter(Topic.id == topic_id)).first()
|
||||
follower = connection.execute(
|
||||
select(Author).filter(Author.id == follower_id)
|
||||
).first()
|
||||
if follower and topic:
|
||||
await update_follows_for_user(
|
||||
connection, follower.user, "topic", topic, is_insert
|
||||
)
|
||||
|
||||
|
||||
@event.listens_for(TopicFollower, "after_insert")
|
||||
async def after_topic_follower_insert(mapper, connection, target):
|
||||
await handle_topic_follower_change(connection, target.topic, target.follower, True)
|
||||
|
||||
|
||||
@event.listens_for(TopicFollower, "after_delete")
|
||||
async def after_topic_follower_delete(mapper, connection, target):
|
||||
await handle_topic_follower_change(connection, target.topic, target.follower, False)
|
||||
|
||||
|
||||
@event.listens_for(ShoutReactionsFollower, "after_insert")
|
||||
async def after_shout_follower_insert(mapper, connection, target):
|
||||
await handle_shout_follower_change(connection, target.shout, target.follower, True)
|
||||
|
||||
|
||||
@event.listens_for(ShoutReactionsFollower, "after_delete")
|
||||
async def after_shout_follower_delete(mapper, connection, target):
|
||||
await handle_shout_follower_change(connection, target.shout, target.follower, False)
|
||||
|
||||
|
||||
@event.listens_for(AuthorFollower, "after_insert")
|
||||
async def after_author_follower_insert(mapper, connection, target):
|
||||
await handle_author_follower_change(
|
||||
connection, target.author, target.follower, True
|
||||
)
|
||||
|
||||
|
||||
@event.listens_for(AuthorFollower, "after_delete")
|
||||
async def after_author_follower_delete(mapper, connection, target):
|
||||
await handle_author_follower_change(
|
||||
connection, target.author, target.follower, False
|
||||
)
|
|
@ -6,20 +6,20 @@ from services.db import Base
|
|||
|
||||
|
||||
class ShoutCollection(Base):
|
||||
__tablename__ = 'shout_collection'
|
||||
__tablename__ = "shout_collection"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
||||
collection = Column(ForeignKey('collection.id'), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
collection = Column(ForeignKey("collection.id"), primary_key=True)
|
||||
|
||||
|
||||
class Collection(Base):
|
||||
__tablename__ = 'collection'
|
||||
__tablename__ = "collection"
|
||||
|
||||
slug = Column(String, unique=True)
|
||||
title = Column(String, nullable=False, comment='Title')
|
||||
body = Column(String, nullable=True, comment='Body')
|
||||
pic = Column(String, nullable=True, comment='Picture')
|
||||
title = Column(String, nullable=False, comment="Title")
|
||||
body = Column(String, nullable=True, comment="Body")
|
||||
pic = Column(String, nullable=True, comment="Picture")
|
||||
created_at = Column(Integer, default=lambda: int(time.time()))
|
||||
created_by = Column(ForeignKey('author.id'), comment='Created By')
|
||||
created_by = Column(ForeignKey("author.id"), comment="Created By")
|
||||
published_at = Column(Integer, default=lambda: int(time.time()))
|
||||
|
|
|
@ -8,22 +8,22 @@ from orm.author import Author
|
|||
|
||||
|
||||
class CommunityAuthor(Base):
|
||||
__tablename__ = 'community_author'
|
||||
__tablename__ = "community_author"
|
||||
|
||||
id = None # type: ignore
|
||||
author = Column(ForeignKey('author.id'), primary_key=True)
|
||||
community = Column(ForeignKey('community.id'), primary_key=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
community = Column(ForeignKey("community.id"), primary_key=True)
|
||||
joined_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
role = Column(String, nullable=False)
|
||||
|
||||
|
||||
class Community(Base):
|
||||
__tablename__ = 'community'
|
||||
__tablename__ = "community"
|
||||
|
||||
name = Column(String, nullable=False)
|
||||
slug = Column(String, nullable=False, unique=True)
|
||||
desc = Column(String, nullable=False, default='')
|
||||
pic = Column(String, nullable=False, default='')
|
||||
desc = Column(String, nullable=False, default="")
|
||||
pic = Column(String, nullable=False, default="")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
|
||||
authors = relationship(Author, secondary='community_author')
|
||||
authors = relationship(Author, secondary="community_author")
|
||||
|
|
|
@ -7,19 +7,19 @@ from services.db import Base
|
|||
|
||||
|
||||
class InviteStatus(Enumeration):
|
||||
PENDING = 'PENDING'
|
||||
ACCEPTED = 'ACCEPTED'
|
||||
REJECTED = 'REJECTED'
|
||||
PENDING = "PENDING"
|
||||
ACCEPTED = "ACCEPTED"
|
||||
REJECTED = "REJECTED"
|
||||
|
||||
|
||||
class Invite(Base):
|
||||
__tablename__ = 'invite'
|
||||
__tablename__ = "invite"
|
||||
|
||||
inviter_id = Column(ForeignKey('author.id'), primary_key=True)
|
||||
author_id = Column(ForeignKey('author.id'), primary_key=True)
|
||||
shout_id = Column(ForeignKey('shout.id'), primary_key=True)
|
||||
inviter_id = Column(ForeignKey("author.id"), primary_key=True)
|
||||
author_id = Column(ForeignKey("author.id"), primary_key=True)
|
||||
shout_id = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
status = Column(String, default=InviteStatus.PENDING.value)
|
||||
|
||||
inviter = relationship('author', foreign_keys=[inviter_id])
|
||||
author = relationship('author', foreign_keys=[author_id])
|
||||
shout = relationship('shout')
|
||||
inviter = relationship("author", foreign_keys=[inviter_id])
|
||||
author = relationship("author", foreign_keys=[author_id])
|
||||
shout = relationship("shout")
|
||||
|
|
|
@ -10,34 +10,34 @@ class ReactionKind(Enumeration):
|
|||
# TYPE = <reaction index> # rating diff
|
||||
|
||||
# editor mode
|
||||
AGREE = 'AGREE' # +1
|
||||
DISAGREE = 'DISAGREE' # -1
|
||||
ASK = 'ASK' # +0
|
||||
PROPOSE = 'PROPOSE' # +0
|
||||
PROOF = 'PROOF' # +1
|
||||
DISPROOF = 'DISPROOF' # -1
|
||||
ACCEPT = 'ACCEPT' # +1
|
||||
REJECT = 'REJECT' # -1
|
||||
AGREE = "AGREE" # +1
|
||||
DISAGREE = "DISAGREE" # -1
|
||||
ASK = "ASK" # +0
|
||||
PROPOSE = "PROPOSE" # +0
|
||||
PROOF = "PROOF" # +1
|
||||
DISPROOF = "DISPROOF" # -1
|
||||
ACCEPT = "ACCEPT" # +1
|
||||
REJECT = "REJECT" # -1
|
||||
|
||||
# public feed
|
||||
QUOTE = 'QUOTE' # +0 TODO: use to bookmark in collection
|
||||
COMMENT = 'COMMENT' # +0
|
||||
LIKE = 'LIKE' # +1
|
||||
DISLIKE = 'DISLIKE' # -1
|
||||
QUOTE = "QUOTE" # +0 TODO: use to bookmark in collection
|
||||
COMMENT = "COMMENT" # +0
|
||||
LIKE = "LIKE" # +1
|
||||
DISLIKE = "DISLIKE" # -1
|
||||
|
||||
|
||||
class Reaction(Base):
|
||||
__tablename__ = 'reaction'
|
||||
__tablename__ = "reaction"
|
||||
|
||||
body = Column(String, default='', comment='Reaction Body')
|
||||
body = Column(String, default="", comment="Reaction Body")
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, nullable=True, comment='Updated at')
|
||||
deleted_at = Column(Integer, nullable=True, comment='Deleted at')
|
||||
deleted_by = Column(ForeignKey('author.id'), nullable=True)
|
||||
reply_to = Column(ForeignKey('reaction.id'), nullable=True)
|
||||
quote = Column(String, nullable=True, comment='Original quoted text')
|
||||
shout = Column(ForeignKey('shout.id'), nullable=False)
|
||||
created_by = Column(ForeignKey('author.id'), nullable=False)
|
||||
updated_at = Column(Integer, nullable=True, comment="Updated at")
|
||||
deleted_at = Column(Integer, nullable=True, comment="Deleted at")
|
||||
deleted_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
reply_to = Column(ForeignKey("reaction.id"), nullable=True)
|
||||
quote = Column(String, nullable=True, comment="Original quoted text")
|
||||
shout = Column(ForeignKey("shout.id"), nullable=False)
|
||||
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||
kind = Column(String, nullable=False)
|
||||
|
||||
oid = Column(String)
|
||||
|
|
65
orm/shout.py
65
orm/shout.py
|
@ -1,56 +1,54 @@
|
|||
import time
|
||||
|
||||
from sqlalchemy import JSON, Boolean, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy.orm import relationship
|
||||
|
||||
from services.db import Base
|
||||
from orm.community import Community
|
||||
from orm.author import Author
|
||||
from orm.author import get_object, update_follows, update_app_data
|
||||
from orm.reaction import Reaction
|
||||
from orm.topic import Topic
|
||||
|
||||
|
||||
class ShoutTopic(Base):
|
||||
__tablename__ = 'shout_topic'
|
||||
__tablename__ = "shout_topic"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
||||
topic = Column(ForeignKey('topic.id'), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
topic = Column(ForeignKey("topic.id"), primary_key=True)
|
||||
main = Column(Boolean, nullable=True)
|
||||
|
||||
|
||||
class ShoutReactionsFollower(Base):
|
||||
__tablename__ = 'shout_reactions_followers'
|
||||
__tablename__ = "shout_reactions_followers"
|
||||
|
||||
id = None # type: ignore
|
||||
follower = Column(ForeignKey('author.id'), primary_key=True)
|
||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
||||
follower = Column(ForeignKey("author.id"), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
deleted_at = Column(Integer, nullable=True)
|
||||
|
||||
|
||||
class ShoutAuthor(Base):
|
||||
__tablename__ = 'shout_author'
|
||||
__tablename__ = "shout_author"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
||||
author = Column(ForeignKey('author.id'), primary_key=True)
|
||||
caption = Column(String, nullable=True, default='')
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
author = Column(ForeignKey("author.id"), primary_key=True)
|
||||
caption = Column(String, nullable=True, default="")
|
||||
|
||||
|
||||
class ShoutCommunity(Base):
|
||||
__tablename__ = 'shout_community'
|
||||
__tablename__ = "shout_community"
|
||||
|
||||
id = None # type: ignore
|
||||
shout = Column(ForeignKey('shout.id'), primary_key=True)
|
||||
community = Column(ForeignKey('community.id'), primary_key=True)
|
||||
shout = Column(ForeignKey("shout.id"), primary_key=True)
|
||||
community = Column(ForeignKey("community.id"), primary_key=True)
|
||||
|
||||
|
||||
class Shout(Base):
|
||||
__tablename__ = 'shout'
|
||||
__tablename__ = "shout"
|
||||
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, nullable=True)
|
||||
|
@ -58,39 +56,28 @@ class Shout(Base):
|
|||
featured_at = Column(Integer, nullable=True)
|
||||
deleted_at = Column(Integer, nullable=True)
|
||||
|
||||
created_by = Column(ForeignKey('author.id'), nullable=False)
|
||||
updated_by = Column(ForeignKey('author.id'), nullable=True)
|
||||
deleted_by = Column(ForeignKey('author.id'), nullable=True)
|
||||
created_by = Column(ForeignKey("author.id"), nullable=False)
|
||||
updated_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
deleted_by = Column(ForeignKey("author.id"), nullable=True)
|
||||
|
||||
body = Column(String, nullable=False, comment='Body')
|
||||
body = Column(String, nullable=False, comment="Body")
|
||||
slug = Column(String, unique=True)
|
||||
cover = Column(String, nullable=True, comment='Cover image url')
|
||||
cover_caption = Column(String, nullable=True, comment='Cover image alt caption')
|
||||
cover = Column(String, nullable=True, comment="Cover image url")
|
||||
cover_caption = Column(String, nullable=True, comment="Cover image alt caption")
|
||||
lead = Column(String, nullable=True)
|
||||
description = Column(String, nullable=True)
|
||||
title = Column(String, nullable=False)
|
||||
subtitle = Column(String, nullable=True)
|
||||
layout = Column(String, nullable=False, default='article')
|
||||
layout = Column(String, nullable=False, default="article")
|
||||
media = Column(JSON, nullable=True)
|
||||
|
||||
authors = relationship(Author, secondary='shout_author')
|
||||
topics = relationship(Topic, secondary='shout_topic')
|
||||
communities = relationship(Community, secondary='shout_community')
|
||||
authors = relationship(Author, secondary="shout_author")
|
||||
topics = relationship(Topic, secondary="shout_topic")
|
||||
communities = relationship(Community, secondary="shout_community")
|
||||
reactions = relationship(Reaction)
|
||||
|
||||
lang = Column(String, nullable=False, default='ru', comment='Language')
|
||||
version_of = Column(ForeignKey('shout.id'), nullable=True)
|
||||
lang = Column(String, nullable=False, default="ru", comment="Language")
|
||||
version_of = Column(ForeignKey("shout.id"), nullable=True)
|
||||
oid = Column(String, nullable=True)
|
||||
|
||||
seo = Column(String, nullable=True) # JSON
|
||||
|
||||
|
||||
@event.listens_for(ShoutReactionsFollower, 'after_insert')
|
||||
@event.listens_for(ShoutReactionsFollower, 'after_delete')
|
||||
def after_topic_follower_change(mapper, connection, target):
|
||||
shout_id = target.shout
|
||||
follower_id = target.follower
|
||||
user = get_object(connection, 'authorizer_users', follower_id)
|
||||
if user:
|
||||
app_data = update_follows(user, 'shout', get_object(connection, 'shout', shout_id))
|
||||
update_app_data(connection, follower_id, app_data)
|
||||
|
|
32
orm/topic.py
32
orm/topic.py
|
@ -1,38 +1,26 @@
|
|||
import time
|
||||
|
||||
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
|
||||
from sqlalchemy import event
|
||||
|
||||
from services.db import Base
|
||||
from orm.author import get_object, update_follows, update_app_data
|
||||
|
||||
|
||||
class TopicFollower(Base):
|
||||
__tablename__ = 'topic_followers'
|
||||
__tablename__ = "topic_followers"
|
||||
|
||||
id = None # type: ignore
|
||||
follower = Column(ForeignKey('author.id'), primary_key=True)
|
||||
topic = Column(ForeignKey('topic.id'), primary_key=True)
|
||||
follower = Column(ForeignKey("author.id"), primary_key=True)
|
||||
topic = Column(ForeignKey("topic.id"), primary_key=True)
|
||||
created_at = Column(Integer, nullable=False, default=lambda: int(time.time()))
|
||||
auto = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
|
||||
class Topic(Base):
|
||||
__tablename__ = 'topic'
|
||||
__tablename__ = "topic"
|
||||
|
||||
slug = Column(String, unique=True)
|
||||
title = Column(String, nullable=False, comment='Title')
|
||||
body = Column(String, nullable=True, comment='Body')
|
||||
pic = Column(String, nullable=True, comment='Picture')
|
||||
community = Column(ForeignKey('community.id'), default=1)
|
||||
oid = Column(String, nullable=True, comment='Old ID')
|
||||
|
||||
|
||||
@event.listens_for(TopicFollower, 'after_insert')
|
||||
@event.listens_for(TopicFollower, 'after_delete')
|
||||
def after_topic_follower_change(mapper, connection, target):
|
||||
topic_id = target.topic
|
||||
follower_id = target.follower
|
||||
user = get_object(connection, 'authorizer_users', follower_id)
|
||||
if user:
|
||||
app_data = update_follows(user, 'topic', get_object(connection, 'topic', topic_id))
|
||||
update_app_data(connection, follower_id, app_data)
|
||||
title = Column(String, nullable=False, comment="Title")
|
||||
body = Column(String, nullable=True, comment="Body")
|
||||
pic = Column(String, nullable=True, comment="Picture")
|
||||
community = Column(ForeignKey("community.id"), default=1)
|
||||
oid = Column(String, nullable=True, comment="Old ID")
|
||||
|
|
|
@ -6,7 +6,7 @@ from services.db import Base
|
|||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = 'authorizer_users'
|
||||
__tablename__ = "authorizer_users"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True, nullable=False, default=None)
|
||||
key = Column(String)
|
||||
|
@ -24,7 +24,7 @@ class User(Base):
|
|||
# preferred_username = Column(String, nullable=False)
|
||||
picture = Column(String)
|
||||
revoked_timestamp = Column(Integer)
|
||||
roles = Column(String, default='author, reader')
|
||||
signup_methods = Column(String, default='magic_link_login')
|
||||
roles = Column(String, default="author, reader")
|
||||
signup_methods = Column(String, default="magic_link_login")
|
||||
created_at = Column(Integer, default=lambda: int(time.time()))
|
||||
updated_at = Column(Integer, default=lambda: int(time.time()))
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from resolvers.author import (
|
||||
get_author,
|
||||
get_author_followed,
|
||||
get_author_follows,
|
||||
get_author_followers,
|
||||
get_author_id,
|
||||
get_authors_all,
|
||||
|
@ -27,46 +27,52 @@ from resolvers.reader import (
|
|||
load_shouts_search,
|
||||
load_shouts_unrated,
|
||||
)
|
||||
from resolvers.topic import get_topic, get_topics_all, get_topics_by_author, get_topics_by_community
|
||||
from resolvers.topic import (
|
||||
get_topic,
|
||||
get_topics_all,
|
||||
get_topics_by_author,
|
||||
get_topics_by_community,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
# author
|
||||
'get_author',
|
||||
'get_author_id',
|
||||
'get_authors_all',
|
||||
'get_author_followers',
|
||||
'get_author_followed',
|
||||
'load_authors_by',
|
||||
'rate_author',
|
||||
'update_author',
|
||||
"get_author",
|
||||
"get_author_id",
|
||||
"get_authors_all",
|
||||
"get_author_followers",
|
||||
"get_author_follows",
|
||||
"load_authors_by",
|
||||
"rate_author",
|
||||
"update_author",
|
||||
# community
|
||||
'get_community',
|
||||
'get_communities_all',
|
||||
"get_community",
|
||||
"get_communities_all",
|
||||
# topic
|
||||
'get_topic',
|
||||
'get_topics_all',
|
||||
'get_topics_by_community',
|
||||
'get_topics_by_author',
|
||||
"get_topic",
|
||||
"get_topics_all",
|
||||
"get_topics_by_community",
|
||||
"get_topics_by_author",
|
||||
# reader
|
||||
'get_shout',
|
||||
'load_shouts_by',
|
||||
'load_shouts_feed',
|
||||
'load_shouts_search',
|
||||
'load_shouts_followed',
|
||||
'load_shouts_unrated',
|
||||
'load_shouts_random_top',
|
||||
'load_shouts_random_topic',
|
||||
"get_shout",
|
||||
"load_shouts_by",
|
||||
"load_shouts_feed",
|
||||
"load_shouts_search",
|
||||
"load_shouts_followed",
|
||||
"load_shouts_unrated",
|
||||
"load_shouts_random_top",
|
||||
"load_shouts_random_topic",
|
||||
# follower
|
||||
'follow',
|
||||
'unfollow',
|
||||
'get_my_followed',
|
||||
"follow",
|
||||
"unfollow",
|
||||
"get_my_followed",
|
||||
# editor
|
||||
'create_shout',
|
||||
'update_shout',
|
||||
'delete_shout',
|
||||
"create_shout",
|
||||
"update_shout",
|
||||
"delete_shout",
|
||||
# reaction
|
||||
'create_reaction',
|
||||
'update_reaction',
|
||||
'delete_reaction',
|
||||
'load_reactions_by',
|
||||
"create_reaction",
|
||||
"update_reaction",
|
||||
"delete_reaction",
|
||||
"load_reactions_by",
|
||||
]
|
||||
|
|
|
@ -1,21 +1,18 @@
|
|||
import time
|
||||
from typing import List
|
||||
|
||||
from sqlalchemy import and_, desc, distinct, func, select
|
||||
from sqlalchemy import and_, desc, distinct, func, select, or_
|
||||
from sqlalchemy.orm import aliased
|
||||
|
||||
from orm.author import Author, AuthorFollower, AuthorRating
|
||||
from orm.community import Community
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
from orm.topic import Topic
|
||||
from resolvers.community import followed_communities
|
||||
from resolvers.reaction import reacted_shouts_updates as followed_reactions
|
||||
from resolvers.topic import followed_topics
|
||||
from resolvers.follower import get_follows_by_user_id
|
||||
from services.auth import login_required
|
||||
from services.db import local_session
|
||||
from services.rediscache import redis
|
||||
from services.schema import mutation, query
|
||||
from services.unread import get_total_unread_counter
|
||||
from services.viewed import ViewedStorage
|
||||
from services.logger import root_logger as logger
|
||||
|
||||
|
@ -23,18 +20,18 @@ from services.logger import root_logger as logger
|
|||
def add_author_stat_columns(q):
|
||||
shout_author_aliased = aliased(ShoutAuthor)
|
||||
q = q.outerjoin(shout_author_aliased).add_columns(
|
||||
func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
|
||||
func.count(distinct(shout_author_aliased.shout)).label("shouts_stat")
|
||||
)
|
||||
|
||||
followers_table = aliased(AuthorFollower)
|
||||
q = q.outerjoin(followers_table, followers_table.author == Author.id).add_columns(
|
||||
func.count(distinct(followers_table.follower)).label('followers_stat')
|
||||
func.count(distinct(followers_table.follower)).label("followers_stat")
|
||||
)
|
||||
|
||||
followings_table = aliased(AuthorFollower)
|
||||
q = q.outerjoin(followings_table, followings_table.follower == Author.id).add_columns(
|
||||
func.count(distinct(followers_table.author)).label('followings_stat')
|
||||
)
|
||||
q = q.outerjoin(
|
||||
followings_table, followings_table.follower == Author.id
|
||||
).add_columns(func.count(distinct(followers_table.author)).label("followings_stat"))
|
||||
|
||||
q = q.group_by(Author.id)
|
||||
return q
|
||||
|
@ -43,42 +40,33 @@ def add_author_stat_columns(q):
|
|||
async def get_authors_from_query(q):
|
||||
authors = []
|
||||
with local_session() as session:
|
||||
for [author, shouts_stat, followers_stat, followings_stat] in session.execute(q):
|
||||
for [author, shouts_stat, followers_stat, followings_stat] in session.execute(
|
||||
q
|
||||
):
|
||||
author.stat = {
|
||||
'shouts': shouts_stat,
|
||||
'viewed': await ViewedStorage.get_author(author.slug),
|
||||
'followers': followers_stat,
|
||||
'followings': followings_stat,
|
||||
"shouts": shouts_stat,
|
||||
"viewed": await ViewedStorage.get_author(author.slug),
|
||||
"followers": followers_stat,
|
||||
"followings": followings_stat,
|
||||
}
|
||||
authors.append(author)
|
||||
return authors
|
||||
|
||||
|
||||
async def author_followings(author_id: int):
|
||||
# NOTE: topics, authors, shout-reactions and communities slugs list
|
||||
return {
|
||||
'unread': await get_total_unread_counter(author_id),
|
||||
'topics': [t.slug for t in await followed_topics(author_id)],
|
||||
'authors': [a.slug for a in await followed_authors(author_id)],
|
||||
'reactions': [s.slug for s in await followed_reactions(author_id)],
|
||||
'communities': [c.slug for c in [followed_communities(author_id)] if isinstance(c, Community)],
|
||||
}
|
||||
|
||||
|
||||
@mutation.field('update_author')
|
||||
@mutation.field("update_author")
|
||||
@login_required
|
||||
async def update_author(_, info, profile):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
with local_session() as session:
|
||||
author = session.query(Author).where(Author.user == user_id).first()
|
||||
Author.update(author, profile)
|
||||
session.add(author)
|
||||
session.commit()
|
||||
return {'error': None, 'author': author}
|
||||
return {"error": None, "author": author}
|
||||
|
||||
|
||||
# TODO: caching query
|
||||
@query.field('get_authors_all')
|
||||
@query.field("get_authors_all")
|
||||
async def get_authors_all(_, _info):
|
||||
authors = []
|
||||
with local_session() as session:
|
||||
|
@ -165,23 +153,33 @@ async def load_author_with_stats(q):
|
|||
)
|
||||
likes_count = (
|
||||
session.query(AuthorRating)
|
||||
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True)))
|
||||
.filter(
|
||||
and_(AuthorRating.author == author.id, AuthorRating.plus.is_(True))
|
||||
)
|
||||
.count()
|
||||
)
|
||||
dislikes_count = (
|
||||
session.query(AuthorRating)
|
||||
.filter(and_(AuthorRating.author == author.id, AuthorRating.plus.is_not(True)))
|
||||
.filter(
|
||||
and_(
|
||||
AuthorRating.author == author.id, AuthorRating.plus.is_not(True)
|
||||
)
|
||||
)
|
||||
.count()
|
||||
)
|
||||
author.stat['rating'] = likes_count - dislikes_count
|
||||
author.stat['rating_shouts'] = count_author_shouts_rating(session, author.id)
|
||||
author.stat['rating_comments'] = count_author_comments_rating(session, author.id)
|
||||
author.stat['commented'] = comments_count
|
||||
author.stat["rating"] = likes_count - dislikes_count
|
||||
author.stat["rating_shouts"] = count_author_shouts_rating(
|
||||
session, author.id
|
||||
)
|
||||
author.stat["rating_comments"] = count_author_comments_rating(
|
||||
session, author.id
|
||||
)
|
||||
author.stat["commented"] = comments_count
|
||||
return author
|
||||
|
||||
|
||||
@query.field('get_author')
|
||||
async def get_author(_, _info, slug='', author_id=None):
|
||||
@query.field("get_author")
|
||||
async def get_author(_, _info, slug="", author_id=None):
|
||||
q = None
|
||||
if slug or author_id:
|
||||
if bool(slug):
|
||||
|
@ -192,34 +190,51 @@ async def get_author(_, _info, slug='', author_id=None):
|
|||
return await load_author_with_stats(q)
|
||||
|
||||
|
||||
@query.field('get_author_id')
|
||||
async def get_author_by_user_id(user_id: str):
|
||||
redis_key = f"user:{user_id}:author"
|
||||
res = await redis.execute("HGET", redis_key)
|
||||
if res:
|
||||
return res
|
||||
|
||||
logger.info(f"getting author id for {user_id}")
|
||||
q = select(Author).filter(Author.user == user_id)
|
||||
author = await load_author_with_stats(q)
|
||||
await redis.execute("HSET", redis_key, author.dict())
|
||||
|
||||
return author
|
||||
|
||||
|
||||
@query.field("get_author_id")
|
||||
async def get_author_id(_, _info, user: str):
|
||||
logger.info(f'getting author id for {user}')
|
||||
q = select(Author).filter(Author.user == user)
|
||||
return await load_author_with_stats(q)
|
||||
return get_author_by_user_id(user)
|
||||
|
||||
|
||||
@query.field('load_authors_by')
|
||||
@query.field("load_authors_by")
|
||||
async def load_authors_by(_, _info, by, limit, offset):
|
||||
q = select(Author)
|
||||
q = add_author_stat_columns(q)
|
||||
if by.get('slug'):
|
||||
if by.get("slug"):
|
||||
q = q.filter(Author.slug.ilike(f"%{by['slug']}%"))
|
||||
elif by.get('name'):
|
||||
elif by.get("name"):
|
||||
q = q.filter(Author.name.ilike(f"%{by['name']}%"))
|
||||
elif by.get('topic'):
|
||||
q = q.join(ShoutAuthor).join(ShoutTopic).join(Topic).where(Topic.slug == by['topic'])
|
||||
elif by.get("topic"):
|
||||
q = (
|
||||
q.join(ShoutAuthor)
|
||||
.join(ShoutTopic)
|
||||
.join(Topic)
|
||||
.where(Topic.slug == by["topic"])
|
||||
)
|
||||
|
||||
if by.get('last_seen'): # in unixtime
|
||||
before = int(time.time()) - by['last_seen']
|
||||
if by.get("last_seen"): # in unix time
|
||||
before = int(time.time()) - by["last_seen"]
|
||||
q = q.filter(Author.last_seen > before)
|
||||
elif by.get('created_at'): # in unixtime
|
||||
before = int(time.time()) - by['created_at']
|
||||
elif by.get("created_at"): # in unix time
|
||||
before = int(time.time()) - by["created_at"]
|
||||
q = q.filter(Author.created_at > before)
|
||||
|
||||
order = by.get('order')
|
||||
if order == 'followers' or order == 'shouts':
|
||||
q = q.order_by(desc(f'{order}_stat'))
|
||||
order = by.get("order")
|
||||
if order == "followers" or order == "shouts":
|
||||
q = q.order_by(desc(f"{order}_stat"))
|
||||
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
||||
|
@ -228,24 +243,28 @@ async def load_authors_by(_, _info, by, limit, offset):
|
|||
return authors
|
||||
|
||||
|
||||
@query.field('get_author_followed')
|
||||
async def get_author_followed(_, _info, slug='', user=None, author_id=None) -> List[Author]:
|
||||
author_id_query = None
|
||||
if slug:
|
||||
author_id_query = select(Author.id).where(Author.slug == slug)
|
||||
elif user:
|
||||
author_id_query = select(Author.id).where(Author.user == user)
|
||||
if author_id_query is not None and not author_id:
|
||||
@query.field("get_author_follows")
|
||||
async def get_author_follows(
|
||||
_, _info, slug="", user=None, author_id=None
|
||||
) -> List[Author]:
|
||||
user_id = user
|
||||
if author_id or slug:
|
||||
with local_session() as session:
|
||||
author_id = session.execute(author_id_query).scalar()
|
||||
author = (
|
||||
session.query(Author)
|
||||
.where(or_(Author.id == author_id, Author.slug == slug))
|
||||
.first()
|
||||
)
|
||||
user_id = author.user
|
||||
|
||||
if author_id is None:
|
||||
raise ValueError('Author not found')
|
||||
if user_id:
|
||||
follows = await get_follows_by_user_id(user)
|
||||
return follows
|
||||
else:
|
||||
return await followed_authors(author_id) # Author[]
|
||||
raise ValueError("Author not found")
|
||||
|
||||
|
||||
@query.field('get_author_followers')
|
||||
@query.field("get_author_followers")
|
||||
async def get_author_followers(_, _info, slug) -> List[Author]:
|
||||
q = select(Author)
|
||||
q = add_author_stat_columns(q)
|
||||
|
@ -260,18 +279,10 @@ async def get_author_followers(_, _info, slug) -> List[Author]:
|
|||
return await get_authors_from_query(q)
|
||||
|
||||
|
||||
async def followed_authors(follower_id):
|
||||
q = select(Author)
|
||||
q = add_author_stat_columns(q)
|
||||
q = q.join(AuthorFollower, AuthorFollower.author == Author.id).where(AuthorFollower.follower == follower_id)
|
||||
# Pass the query to the get_authors_from_query function and return the results
|
||||
return await get_authors_from_query(q)
|
||||
|
||||
|
||||
@mutation.field('rate_author')
|
||||
@mutation.field("rate_author")
|
||||
@login_required
|
||||
async def rate_author(_, info, rated_slug, value):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
|
||||
with local_session() as session:
|
||||
rated_author = session.query(Author).filter(Author.slug == rated_slug).first()
|
||||
|
@ -294,17 +305,19 @@ async def rate_author(_, info, rated_slug, value):
|
|||
return {}
|
||||
else:
|
||||
try:
|
||||
rating = AuthorRating(rater=rater.id, author=rated_author.id, plus=value > 0)
|
||||
rating = AuthorRating(
|
||||
rater=rater.id, author=rated_author.id, plus=value > 0
|
||||
)
|
||||
session.add(rating)
|
||||
session.commit()
|
||||
except Exception as err:
|
||||
return {'error': err}
|
||||
return {"error": err}
|
||||
return {}
|
||||
|
||||
|
||||
async def create_author(user_id: str, slug: str, name: str = ''):
|
||||
async def create_author(user_id: str, slug: str, name: str = ""):
|
||||
with local_session() as session:
|
||||
new_author = Author(user=user_id, slug=slug, name=name)
|
||||
session.add(new_author)
|
||||
session.commit()
|
||||
logger.info(f'author created by webhook {new_author.dict()}')
|
||||
logger.info(f"author created by webhook {new_author.dict()}")
|
||||
|
|
|
@ -6,10 +6,10 @@ from services.db import local_session
|
|||
from services.schema import mutation
|
||||
|
||||
|
||||
@mutation.field('accept_invite')
|
||||
@mutation.field("accept_invite")
|
||||
@login_required
|
||||
async def accept_invite(_, info, invite_id: int):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
|
||||
# Check if the user exists
|
||||
with local_session() as session:
|
||||
|
@ -17,7 +17,11 @@ async def accept_invite(_, info, invite_id: int):
|
|||
if author:
|
||||
# Check if the invite exists
|
||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||
if invite and invite.author_id is author.id and invite.status is InviteStatus.PENDING.value:
|
||||
if (
|
||||
invite
|
||||
and invite.author_id is author.id
|
||||
and invite.status is InviteStatus.PENDING.value
|
||||
):
|
||||
# Add the user to the shout authors
|
||||
shout = session.query(Shout).filter(Shout.id == invite.shout_id).first()
|
||||
if shout:
|
||||
|
@ -26,19 +30,19 @@ async def accept_invite(_, info, invite_id: int):
|
|||
session.delete(invite)
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
return {'success': True, 'message': 'Invite accepted'}
|
||||
return {"success": True, "message": "Invite accepted"}
|
||||
else:
|
||||
return {'error': 'Shout not found'}
|
||||
return {"error": "Shout not found"}
|
||||
else:
|
||||
return {'error': 'Invalid invite or already accepted/rejected'}
|
||||
return {"error": "Invalid invite or already accepted/rejected"}
|
||||
else:
|
||||
return {'error': 'User not found'}
|
||||
return {"error": "User not found"}
|
||||
|
||||
|
||||
@mutation.field('reject_invite')
|
||||
@mutation.field("reject_invite")
|
||||
@login_required
|
||||
async def reject_invite(_, info, invite_id: int):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
|
||||
# Check if the user exists
|
||||
with local_session() as session:
|
||||
|
@ -46,21 +50,25 @@ async def reject_invite(_, info, invite_id: int):
|
|||
if author:
|
||||
# Check if the invite exists
|
||||
invite = session.query(Invite).filter(Invite.id == invite_id).first()
|
||||
if invite and invite.author_id is author.id and invite.status is InviteStatus.PENDING.value:
|
||||
if (
|
||||
invite
|
||||
and invite.author_id is author.id
|
||||
and invite.status is InviteStatus.PENDING.value
|
||||
):
|
||||
# Delete the invite
|
||||
session.delete(invite)
|
||||
session.commit()
|
||||
return {'success': True, 'message': 'Invite rejected'}
|
||||
return {"success": True, "message": "Invite rejected"}
|
||||
else:
|
||||
return {'error': 'Invalid invite or already accepted/rejected'}
|
||||
return {"error": "Invalid invite or already accepted/rejected"}
|
||||
else:
|
||||
return {'error': 'User not found'}
|
||||
return {"error": "User not found"}
|
||||
|
||||
|
||||
@mutation.field('create_invite')
|
||||
@mutation.field("create_invite")
|
||||
@login_required
|
||||
async def create_invite(_, info, slug: str = '', author_id: int = 0):
|
||||
user_id = info.context['user_id']
|
||||
async def create_invite(_, info, slug: str = "", author_id: int = 0):
|
||||
user_id = info.context["user_id"]
|
||||
|
||||
# Check if the inviter is the owner of the shout
|
||||
with local_session() as session:
|
||||
|
@ -82,42 +90,47 @@ async def create_invite(_, info, slug: str = '', author_id: int = 0):
|
|||
.first()
|
||||
)
|
||||
if existing_invite:
|
||||
return {'error': 'Invite already sent'}
|
||||
return {"error": "Invite already sent"}
|
||||
|
||||
# Create a new invite
|
||||
new_invite = Invite(
|
||||
inviter_id=user_id, author_id=author_id, shout_id=shout.id, status=InviteStatus.PENDING.value
|
||||
inviter_id=user_id,
|
||||
author_id=author_id,
|
||||
shout_id=shout.id,
|
||||
status=InviteStatus.PENDING.value,
|
||||
)
|
||||
session.add(new_invite)
|
||||
session.commit()
|
||||
|
||||
return {'error': None, 'invite': new_invite}
|
||||
return {"error": None, "invite": new_invite}
|
||||
else:
|
||||
return {'error': 'Invalid author'}
|
||||
return {"error": "Invalid author"}
|
||||
else:
|
||||
return {'error': 'Access denied'}
|
||||
return {"error": "Access denied"}
|
||||
|
||||
|
||||
@mutation.field('remove_author')
|
||||
@mutation.field("remove_author")
|
||||
@login_required
|
||||
async def remove_author(_, info, slug: str = '', author_id: int = 0):
|
||||
user_id = info.context['user_id']
|
||||
async def remove_author(_, info, slug: str = "", author_id: int = 0):
|
||||
user_id = info.context["user_id"]
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if author:
|
||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||
# NOTE: owner should be first in a list
|
||||
if shout and author.id is shout.created_by:
|
||||
shout.authors = [author for author in shout.authors if author.id != author_id]
|
||||
shout.authors = [
|
||||
author for author in shout.authors if author.id != author_id
|
||||
]
|
||||
session.commit()
|
||||
return {}
|
||||
return {'error': 'Access denied'}
|
||||
return {"error": "Access denied"}
|
||||
|
||||
|
||||
@mutation.field('remove_invite')
|
||||
@mutation.field("remove_invite")
|
||||
@login_required
|
||||
async def remove_invite(_, info, invite_id: int):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
|
||||
# Check if the user exists
|
||||
with local_session() as session:
|
||||
|
@ -135,6 +148,6 @@ async def remove_invite(_, info, invite_id: int):
|
|||
session.commit()
|
||||
return {}
|
||||
else:
|
||||
return {'error': 'Invalid invite or already accepted/rejected'}
|
||||
return {"error": "Invalid invite or already accepted/rejected"}
|
||||
else:
|
||||
return {'error': 'Author not found'}
|
||||
return {"error": "Author not found"}
|
||||
|
|
|
@ -14,10 +14,12 @@ def add_community_stat_columns(q):
|
|||
shout_community_aliased = aliased(ShoutCommunity)
|
||||
|
||||
q = q.outerjoin(shout_community_aliased).add_columns(
|
||||
func.count(distinct(shout_community_aliased.shout)).label('shouts_stat')
|
||||
func.count(distinct(shout_community_aliased.shout)).label("shouts_stat")
|
||||
)
|
||||
q = q.outerjoin(community_followers, community_followers.author == Author.id).add_columns(
|
||||
func.count(distinct(community_followers.follower)).label('followers_stat')
|
||||
q = q.outerjoin(
|
||||
community_followers, community_followers.author == Author.id
|
||||
).add_columns(
|
||||
func.count(distinct(community_followers.follower)).label("followers_stat")
|
||||
)
|
||||
|
||||
q = q.group_by(Author.id)
|
||||
|
@ -30,8 +32,8 @@ def get_communities_from_query(q):
|
|||
with local_session() as session:
|
||||
for [c, shouts_stat, followers_stat] in session.execute(q):
|
||||
c.stat = {
|
||||
'shouts': shouts_stat,
|
||||
'followers': followers_stat,
|
||||
"shouts": shouts_stat,
|
||||
"followers": followers_stat,
|
||||
# "commented": commented_stat,
|
||||
}
|
||||
ccc.append(c)
|
||||
|
@ -39,26 +41,6 @@ def get_communities_from_query(q):
|
|||
return ccc
|
||||
|
||||
|
||||
SINGLE_COMMUNITY = True
|
||||
|
||||
|
||||
def followed_communities(follower_id):
|
||||
if SINGLE_COMMUNITY:
|
||||
with local_session() as session:
|
||||
c = session.query(Community).first()
|
||||
return [
|
||||
c,
|
||||
]
|
||||
else:
|
||||
q = select(Community)
|
||||
q = add_community_stat_columns(q)
|
||||
q = q.join(CommunityAuthor, CommunityAuthor.community == Community.id).where(
|
||||
CommunityAuthor.author == follower_id
|
||||
)
|
||||
# 3. Pass the query to the get_authors_from_query function and return the results
|
||||
return get_communities_from_query(q)
|
||||
|
||||
|
||||
# for mutation.field("follow")
|
||||
def community_follow(follower_id, slug):
|
||||
try:
|
||||
|
@ -90,7 +72,7 @@ def community_unfollow(follower_id, slug):
|
|||
return False
|
||||
|
||||
|
||||
@query.field('get_communities_all')
|
||||
@query.field("get_communities_all")
|
||||
async def get_communities_all(_, _info):
|
||||
q = select(Author)
|
||||
q = add_community_stat_columns(q)
|
||||
|
@ -98,7 +80,7 @@ async def get_communities_all(_, _info):
|
|||
return get_communities_from_query(q)
|
||||
|
||||
|
||||
@query.field('get_community')
|
||||
@query.field("get_community")
|
||||
async def get_community(_, _info, slug):
|
||||
q = select(Community).where(Community.slug == slug)
|
||||
q = add_community_stat_columns(q)
|
||||
|
|
|
@ -18,10 +18,10 @@ from services.search import search_service
|
|||
from services.logger import root_logger as logger
|
||||
|
||||
|
||||
@query.field('get_shouts_drafts')
|
||||
@query.field("get_shouts_drafts")
|
||||
@login_required
|
||||
async def get_shouts_drafts(_, info):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
shouts = []
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
|
@ -40,28 +40,28 @@ async def get_shouts_drafts(_, info):
|
|||
return shouts
|
||||
|
||||
|
||||
@mutation.field('create_shout')
|
||||
@mutation.field("create_shout")
|
||||
@login_required
|
||||
async def create_shout(_, info, inp):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if isinstance(author, Author):
|
||||
current_time = int(time.time())
|
||||
slug = inp.get('slug') or f'draft-{current_time}'
|
||||
slug = inp.get("slug") or f"draft-{current_time}"
|
||||
shout_dict = {
|
||||
'title': inp.get('title', ''),
|
||||
'subtitle': inp.get('subtitle', ''),
|
||||
'lead': inp.get('lead', ''),
|
||||
'description': inp.get('description', ''),
|
||||
'body': inp.get('body', ''),
|
||||
'layout': inp.get('layout', 'article'),
|
||||
'created_by': author.id,
|
||||
'authors': [],
|
||||
'slug': slug,
|
||||
'topics': inp.get('topics', []),
|
||||
'published_at': None,
|
||||
'created_at': current_time, # Set created_at as Unix timestamp
|
||||
"title": inp.get("title", ""),
|
||||
"subtitle": inp.get("subtitle", ""),
|
||||
"lead": inp.get("lead", ""),
|
||||
"description": inp.get("description", ""),
|
||||
"body": inp.get("body", ""),
|
||||
"layout": inp.get("layout", "article"),
|
||||
"created_by": author.id,
|
||||
"authors": [],
|
||||
"slug": slug,
|
||||
"topics": inp.get("topics", []),
|
||||
"published_at": None,
|
||||
"created_at": current_time, # Set created_at as Unix timestamp
|
||||
}
|
||||
|
||||
new_shout = Shout(**shout_dict)
|
||||
|
@ -75,7 +75,11 @@ async def create_shout(_, info, inp):
|
|||
sa = ShoutAuthor(shout=shout.id, author=author.id)
|
||||
session.add(sa)
|
||||
|
||||
topics = session.query(Topic).filter(Topic.slug.in_(inp.get('topics', []))).all()
|
||||
topics = (
|
||||
session.query(Topic)
|
||||
.filter(Topic.slug.in_(inp.get("topics", [])))
|
||||
.all()
|
||||
)
|
||||
for topic in topics:
|
||||
t = ShoutTopic(topic=topic.id, shout=shout.id)
|
||||
session.add(t)
|
||||
|
@ -85,9 +89,9 @@ async def create_shout(_, info, inp):
|
|||
# notifier
|
||||
# await notify_shout(shout_dict, 'create')
|
||||
|
||||
return { 'shout': shout.dict() }
|
||||
return {"shout": shout.dict()}
|
||||
|
||||
return {'error': 'cant create shout'}
|
||||
return {"error": "cant create shout"}
|
||||
|
||||
|
||||
def patch_main_topic(session, main_topic, shout):
|
||||
|
@ -117,15 +121,17 @@ def patch_main_topic(session, main_topic, shout):
|
|||
)
|
||||
|
||||
if old_main_topic and new_main_topic and old_main_topic is not new_main_topic:
|
||||
ShoutTopic.update(old_main_topic, {'main': False})
|
||||
ShoutTopic.update(old_main_topic, {"main": False})
|
||||
session.add(old_main_topic)
|
||||
|
||||
ShoutTopic.update(new_main_topic, {'main': True})
|
||||
ShoutTopic.update(new_main_topic, {"main": True})
|
||||
session.add(new_main_topic)
|
||||
|
||||
|
||||
def patch_topics(session, shout, topics_input):
|
||||
new_topics_to_link = [Topic(**new_topic) for new_topic in topics_input if new_topic['id'] < 0]
|
||||
new_topics_to_link = [
|
||||
Topic(**new_topic) for new_topic in topics_input if new_topic["id"] < 0
|
||||
]
|
||||
if new_topics_to_link:
|
||||
session.add_all(new_topics_to_link)
|
||||
session.commit()
|
||||
|
@ -134,21 +140,25 @@ def patch_topics(session, shout, topics_input):
|
|||
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=new_topic_to_link.id)
|
||||
session.add(created_unlinked_topic)
|
||||
|
||||
existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get('id', 0) > 0]
|
||||
existing_topics_input = [
|
||||
topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
|
||||
]
|
||||
existing_topic_to_link_ids = [
|
||||
existing_topic_input['id']
|
||||
existing_topic_input["id"]
|
||||
for existing_topic_input in existing_topics_input
|
||||
if existing_topic_input['id'] not in [topic.id for topic in shout.topics]
|
||||
if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
|
||||
]
|
||||
|
||||
for existing_topic_to_link_id in existing_topic_to_link_ids:
|
||||
created_unlinked_topic = ShoutTopic(shout=shout.id, topic=existing_topic_to_link_id)
|
||||
created_unlinked_topic = ShoutTopic(
|
||||
shout=shout.id, topic=existing_topic_to_link_id
|
||||
)
|
||||
session.add(created_unlinked_topic)
|
||||
|
||||
topic_to_unlink_ids = [
|
||||
topic.id
|
||||
for topic in shout.topics
|
||||
if topic.id not in [topic_input['id'] for topic_input in existing_topics_input]
|
||||
if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
|
||||
]
|
||||
|
||||
session.query(ShoutTopic).filter(
|
||||
|
@ -159,16 +169,16 @@ def patch_topics(session, shout, topics_input):
|
|||
).delete(synchronize_session=False)
|
||||
|
||||
|
||||
@mutation.field('update_shout')
|
||||
@mutation.field("update_shout")
|
||||
@login_required
|
||||
async def update_shout(_, info, shout_id, shout_input=None, publish=False):
|
||||
user_id = info.context['user_id']
|
||||
roles = info.context['roles']
|
||||
user_id = info.context["user_id"]
|
||||
roles = info.context["roles"]
|
||||
shout_input = shout_input or {}
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
current_time = int(time.time())
|
||||
shout_id = shout_id or shout_input.get('id')
|
||||
shout_id = shout_id or shout_input.get("id")
|
||||
if isinstance(author, Author) and isinstance(shout_id, int):
|
||||
shout = (
|
||||
session.query(Shout)
|
||||
|
@ -181,23 +191,27 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
|
|||
)
|
||||
|
||||
if not shout:
|
||||
return {'error': 'shout not found'}
|
||||
if shout.created_by is not author.id and author.id not in shout.authors and 'editor' not in roles:
|
||||
return {'error': 'access denied'}
|
||||
return {"error": "shout not found"}
|
||||
if (
|
||||
shout.created_by is not author.id
|
||||
and author.id not in shout.authors
|
||||
and "editor" not in roles
|
||||
):
|
||||
return {"error": "access denied"}
|
||||
|
||||
# topics patch
|
||||
topics_input = shout_input.get('topics')
|
||||
topics_input = shout_input.get("topics")
|
||||
if topics_input:
|
||||
patch_topics(session, shout, topics_input)
|
||||
del shout_input['topics']
|
||||
del shout_input["topics"]
|
||||
|
||||
# main topic
|
||||
main_topic = shout_input.get('main_topic')
|
||||
main_topic = shout_input.get("main_topic")
|
||||
if main_topic:
|
||||
patch_main_topic(session, main_topic, shout)
|
||||
|
||||
shout_input['updated_at'] = current_time
|
||||
shout_input['published_at'] = current_time if publish else None
|
||||
shout_input["updated_at"] = current_time
|
||||
shout_input["published_at"] = current_time if publish else None
|
||||
Shout.update(shout, shout_input)
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
|
@ -205,50 +219,61 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
|
|||
shout_dict = shout.dict()
|
||||
|
||||
if not publish:
|
||||
await notify_shout(shout_dict, 'update')
|
||||
await notify_shout(shout_dict, "update")
|
||||
else:
|
||||
await notify_shout(shout_dict, 'published')
|
||||
await notify_shout(shout_dict, "published")
|
||||
# search service indexing
|
||||
search_service.index(shout)
|
||||
|
||||
return {'shout': shout_dict}
|
||||
logger.debug(f' cannot update with data: {shout_input}')
|
||||
return { 'error': 'not enough data' }
|
||||
return {'error': 'cannot update'}
|
||||
return {"shout": shout_dict}
|
||||
logger.debug(f" cannot update with data: {shout_input}")
|
||||
return {"error": "cant update shout"}
|
||||
|
||||
|
||||
@mutation.field('delete_shout')
|
||||
@mutation.field("delete_shout")
|
||||
@login_required
|
||||
async def delete_shout(_, info, shout_id):
|
||||
user_id = info.context['user_id']
|
||||
roles = info.context['roles']
|
||||
user_id = info.context["user_id"]
|
||||
roles = info.context["roles"]
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
if not shout:
|
||||
return {'error': 'invalid shout id'}
|
||||
return {"error": "invalid shout id"}
|
||||
if author and shout:
|
||||
if shout.created_by is not author.id and author.id not in shout.authors and 'editor' not in roles:
|
||||
return {'error': 'access denied'}
|
||||
if (
|
||||
shout.created_by is not author.id
|
||||
and author.id not in shout.authors
|
||||
and "editor" not in roles
|
||||
):
|
||||
return {"error": "access denied"}
|
||||
|
||||
for author_id in shout.authors:
|
||||
reactions_unfollow(author_id, shout_id)
|
||||
|
||||
shout_dict = shout.dict()
|
||||
shout_dict['deleted_at'] = int(time.time())
|
||||
shout_dict["deleted_at"] = int(time.time())
|
||||
Shout.update(shout, shout_dict)
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
await notify_shout(shout_dict, 'delete')
|
||||
await notify_shout(shout_dict, "delete")
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
def handle_proposing(session, r, shout):
|
||||
if is_positive(r.kind):
|
||||
replied_reaction = session.query(Reaction).filter(Reaction.id == r.reply_to, Reaction.shout == r.shout).first()
|
||||
replied_reaction = (
|
||||
session.query(Reaction)
|
||||
.filter(Reaction.id == r.reply_to, Reaction.shout == r.shout)
|
||||
.first()
|
||||
)
|
||||
|
||||
if replied_reaction and replied_reaction.kind is ReactionKind.PROPOSE.value and replied_reaction.quote:
|
||||
if (
|
||||
replied_reaction
|
||||
and replied_reaction.kind is ReactionKind.PROPOSE.value
|
||||
and replied_reaction.quote
|
||||
):
|
||||
# patch all the proposals' quotes
|
||||
proposals = (
|
||||
session.query(Reaction)
|
||||
|
@ -265,13 +290,15 @@ def handle_proposing(session, r, shout):
|
|||
if proposal.quote:
|
||||
proposal_diff = get_diff(shout.body, proposal.quote)
|
||||
proposal_dict = proposal.dict()
|
||||
proposal_dict['quote'] = apply_diff(replied_reaction.quote, proposal_diff)
|
||||
proposal_dict["quote"] = apply_diff(
|
||||
replied_reaction.quote, proposal_diff
|
||||
)
|
||||
Reaction.update(proposal, proposal_dict)
|
||||
session.add(proposal)
|
||||
|
||||
# patch shout's body
|
||||
shout_dict = shout.dict()
|
||||
shout_dict['body'] = replied_reaction.quote
|
||||
shout_dict["body"] = replied_reaction.quote
|
||||
Shout.update(shout, shout_dict)
|
||||
session.add(shout)
|
||||
session.commit()
|
||||
|
|
|
@ -15,66 +15,72 @@ from services.db import local_session
|
|||
from services.notify import notify_follower
|
||||
from services.schema import mutation, query
|
||||
from services.logger import root_logger as logger
|
||||
from services.rediscache import redis
|
||||
|
||||
|
||||
@mutation.field('follow')
|
||||
@mutation.field("follow")
|
||||
@login_required
|
||||
async def follow(_, info, what, slug):
|
||||
try:
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
with local_session() as session:
|
||||
actor = session.query(Author).filter(Author.user == user_id).first()
|
||||
if actor:
|
||||
follower_id = actor.id
|
||||
if what == 'AUTHOR':
|
||||
if what == "AUTHOR":
|
||||
if author_follow(follower_id, slug):
|
||||
author = session.query(Author.id).where(Author.slug == slug).one()
|
||||
follower = session.query(Author).where(Author.id == follower_id).one()
|
||||
author = (
|
||||
session.query(Author.id).where(Author.slug == slug).one()
|
||||
)
|
||||
follower = (
|
||||
session.query(Author).where(Author.id == follower_id).one()
|
||||
)
|
||||
await notify_follower(follower.dict(), author.id)
|
||||
elif what == 'TOPIC':
|
||||
elif what == "TOPIC":
|
||||
topic_follow(follower_id, slug)
|
||||
elif what == 'COMMUNITY':
|
||||
elif what == "COMMUNITY":
|
||||
community_follow(follower_id, slug)
|
||||
elif what == 'REACTIONS':
|
||||
elif what == "REACTIONS":
|
||||
reactions_follow(follower_id, slug)
|
||||
except Exception as e:
|
||||
logger.debug(info, what, slug)
|
||||
logger.error(e)
|
||||
return {'error': str(e)}
|
||||
return {"error": str(e)}
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
@mutation.field('unfollow')
|
||||
@mutation.field("unfollow")
|
||||
@login_required
|
||||
async def unfollow(_, info, what, slug):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
try:
|
||||
with local_session() as session:
|
||||
actor = session.query(Author).filter(Author.user == user_id).first()
|
||||
if actor:
|
||||
follower_id = actor.id
|
||||
if what == 'AUTHOR':
|
||||
if what == "AUTHOR":
|
||||
if author_unfollow(follower_id, slug):
|
||||
author = session.query(Author.id).where(Author.slug == slug).one()
|
||||
follower = session.query(Author).where(Author.id == follower_id).one()
|
||||
await notify_follower(follower.dict(), author.id, 'unfollow')
|
||||
elif what == 'TOPIC':
|
||||
author = (
|
||||
session.query(Author.id).where(Author.slug == slug).one()
|
||||
)
|
||||
follower = (
|
||||
session.query(Author).where(Author.id == follower_id).one()
|
||||
)
|
||||
await notify_follower(follower.dict(), author.id, "unfollow")
|
||||
elif what == "TOPIC":
|
||||
topic_unfollow(follower_id, slug)
|
||||
elif what == 'COMMUNITY':
|
||||
elif what == "COMMUNITY":
|
||||
community_unfollow(follower_id, slug)
|
||||
elif what == 'REACTIONS':
|
||||
elif what == "REACTIONS":
|
||||
reactions_unfollow(follower_id, slug)
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
return {"error": str(e)}
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
@query.field('get_my_followed')
|
||||
@login_required
|
||||
async def get_my_followed(_, info):
|
||||
user_id = info.context['user_id']
|
||||
def query_follows(user_id: str):
|
||||
topics = set()
|
||||
authors = set()
|
||||
communities = []
|
||||
|
@ -99,11 +105,37 @@ async def get_my_followed(_, info):
|
|||
topics = set(session.execute(topics_query).scalars())
|
||||
communities = session.query(Community).all()
|
||||
|
||||
return {'topics': list(topics), 'authors': list(authors), 'communities': communities}
|
||||
return {
|
||||
"topics": list(topics),
|
||||
"authors": list(authors),
|
||||
"communities": communities,
|
||||
}
|
||||
|
||||
|
||||
@query.field('get_shout_followers')
|
||||
def get_shout_followers(_, _info, slug: str = '', shout_id: int | None = None) -> List[Author]:
|
||||
async def get_follows_by_user_id(user_id: str):
|
||||
redis_key = f"user:{user_id}:follows"
|
||||
res = await redis.execute("HGET", redis_key)
|
||||
if res:
|
||||
return res
|
||||
|
||||
logger.info(f"getting follows for {user_id}")
|
||||
follows = query_follows(user_id)
|
||||
await redis.execute("HSET", redis_key, follows)
|
||||
|
||||
return follows
|
||||
|
||||
|
||||
@query.field("get_my_followed")
|
||||
@login_required
|
||||
async def get_my_followed(_, info):
|
||||
user_id = info.context["user_id"]
|
||||
return await get_follows_by_user_id(user_id)
|
||||
|
||||
|
||||
@query.field("get_shout_followers")
|
||||
def get_shout_followers(
|
||||
_, _info, slug: str = "", shout_id: int | None = None
|
||||
) -> List[Author]:
|
||||
followers = []
|
||||
with local_session() as session:
|
||||
shout = None
|
||||
|
@ -136,7 +168,9 @@ def reactions_follow(author_id, shout_id, auto=False):
|
|||
)
|
||||
|
||||
if not following:
|
||||
following = ShoutReactionsFollower(follower=author_id, shout=shout.id, auto=auto)
|
||||
following = ShoutReactionsFollower(
|
||||
follower=author_id, shout=shout.id, auto=auto
|
||||
)
|
||||
session.add(following)
|
||||
session.commit()
|
||||
return True
|
||||
|
|
|
@ -21,16 +21,22 @@ from services.logger import root_logger as logger
|
|||
|
||||
def add_stat_columns(q, aliased_reaction):
|
||||
q = q.outerjoin(aliased_reaction).add_columns(
|
||||
func.sum(aliased_reaction.id).label('reacted_stat'),
|
||||
func.sum(case((aliased_reaction.kind == ReactionKind.COMMENT.value, 1), else_=0)).label('comments_stat'),
|
||||
func.sum(case((aliased_reaction.kind == ReactionKind.LIKE.value, 1), else_=0)).label('likes_stat'),
|
||||
func.sum(case((aliased_reaction.kind == ReactionKind.DISLIKE.value, 1), else_=0)).label('dislikes_stat'),
|
||||
func.sum(aliased_reaction.id).label("reacted_stat"),
|
||||
func.sum(
|
||||
case((aliased_reaction.kind == ReactionKind.COMMENT.value, 1), else_=0)
|
||||
).label("comments_stat"),
|
||||
func.sum(
|
||||
case((aliased_reaction.kind == ReactionKind.LIKE.value, 1), else_=0)
|
||||
).label("likes_stat"),
|
||||
func.sum(
|
||||
case((aliased_reaction.kind == ReactionKind.DISLIKE.value, 1), else_=0)
|
||||
).label("dislikes_stat"),
|
||||
func.max(
|
||||
case(
|
||||
(aliased_reaction.kind != ReactionKind.COMMENT.value, None),
|
||||
else_=aliased_reaction.created_at,
|
||||
)
|
||||
).label('last_comment'),
|
||||
).label("last_comment"),
|
||||
)
|
||||
|
||||
return q
|
||||
|
@ -54,7 +60,9 @@ def check_to_feature(session, approver_id, reaction):
|
|||
approvers = []
|
||||
approvers.append(approver_id)
|
||||
# now count how many approvers are voted already
|
||||
reacted_readers = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
|
||||
reacted_readers = (
|
||||
session.query(Reaction).where(Reaction.shout == reaction.shout).all()
|
||||
)
|
||||
for reacted_reader in reacted_readers:
|
||||
if is_featured_author(session, reacted_reader.id):
|
||||
approvers.append(reacted_reader.id)
|
||||
|
@ -69,12 +77,19 @@ def check_to_unfeature(session, rejecter_id, reaction):
|
|||
if is_featured_author(session, rejecter_id):
|
||||
reactions = (
|
||||
session.query(Reaction)
|
||||
.where(and_(Reaction.shout == reaction.shout, Reaction.kind.in_(RATING_REACTIONS)))
|
||||
.where(
|
||||
and_(
|
||||
Reaction.shout == reaction.shout,
|
||||
Reaction.kind.in_(RATING_REACTIONS),
|
||||
)
|
||||
)
|
||||
.all()
|
||||
)
|
||||
rejects = 0
|
||||
for r in reactions:
|
||||
approver = session.query(Author).filter(Author.id == r.created_by).first()
|
||||
approver = (
|
||||
session.query(Author).filter(Author.id == r.created_by).first()
|
||||
)
|
||||
if is_featured_author(session, approver):
|
||||
if is_negative(r.kind):
|
||||
rejects += 1
|
||||
|
@ -86,7 +101,7 @@ def check_to_unfeature(session, rejecter_id, reaction):
|
|||
async def set_featured(session, shout_id):
|
||||
s = session.query(Shout).where(Shout.id == shout_id).first()
|
||||
s.featured_at = int(time.time())
|
||||
Shout.update(s, {'featured_at': int(time.time())})
|
||||
Shout.update(s, {"featured_at": int(time.time())})
|
||||
author = session.query(Author).filter(Author.id == s.created_by).first()
|
||||
if author:
|
||||
await add_user_role(str(author.user))
|
||||
|
@ -96,7 +111,7 @@ async def set_featured(session, shout_id):
|
|||
|
||||
def set_unfeatured(session, shout_id):
|
||||
s = session.query(Shout).where(Shout.id == shout_id).first()
|
||||
Shout.update(s, {'featured_at': None})
|
||||
Shout.update(s, {"featured_at": None})
|
||||
session.add(s)
|
||||
session.commit()
|
||||
|
||||
|
@ -108,7 +123,11 @@ async def _create_reaction(session, shout, author, reaction):
|
|||
rdict = r.dict()
|
||||
|
||||
# collaborative editing
|
||||
if rdict.get('reply_to') and r.kind in RATING_REACTIONS and author.id in shout.authors:
|
||||
if (
|
||||
rdict.get("reply_to")
|
||||
and r.kind in RATING_REACTIONS
|
||||
and author.id in shout.authors
|
||||
):
|
||||
handle_proposing(session, r, shout)
|
||||
|
||||
# self-regultaion mechanics
|
||||
|
@ -118,92 +137,95 @@ async def _create_reaction(session, shout, author, reaction):
|
|||
await set_featured(session, shout.id)
|
||||
|
||||
# reactions auto-following
|
||||
reactions_follow(author.id, reaction['shout'], True)
|
||||
reactions_follow(author.id, reaction["shout"], True)
|
||||
|
||||
rdict['shout'] = shout.dict()
|
||||
rdict['created_by'] = author.dict()
|
||||
rdict['stat'] = {'commented': 0, 'reacted': 0, 'rating': 0}
|
||||
rdict["shout"] = shout.dict()
|
||||
rdict["created_by"] = author.dict()
|
||||
rdict["stat"] = {"commented": 0, "reacted": 0, "rating": 0}
|
||||
|
||||
# notifications call
|
||||
await notify_reaction(rdict, 'create')
|
||||
await notify_reaction(rdict, "create")
|
||||
|
||||
return rdict
|
||||
|
||||
|
||||
@mutation.field('create_reaction')
|
||||
@mutation.field("create_reaction")
|
||||
@login_required
|
||||
async def create_reaction(_, info, reaction):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
|
||||
shout_id = reaction.get('shout')
|
||||
shout_id = reaction.get("shout")
|
||||
|
||||
if not shout_id:
|
||||
return {'error': 'Shout ID is required to create a reaction.'}
|
||||
return {"error": "Shout ID is required to create a reaction."}
|
||||
|
||||
try:
|
||||
with local_session() as session:
|
||||
shout = session.query(Shout).filter(Shout.id == shout_id).first()
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if shout and author:
|
||||
reaction['created_by'] = author.id
|
||||
kind = reaction.get('kind')
|
||||
reaction["created_by"] = author.id
|
||||
kind = reaction.get("kind")
|
||||
shout_id = shout.id
|
||||
|
||||
if not kind and isinstance(reaction.get('body'), str):
|
||||
if not kind and isinstance(reaction.get("body"), str):
|
||||
kind = ReactionKind.COMMENT.value
|
||||
|
||||
if not kind:
|
||||
return {'error': 'cannot create reaction without a kind'}
|
||||
return {"error": "cannot create reaction without a kind"}
|
||||
|
||||
if kind in RATING_REACTIONS:
|
||||
|
||||
opposite_kind = ( ReactionKind.DISLIKE.value
|
||||
opposite_kind = (
|
||||
ReactionKind.DISLIKE.value
|
||||
if is_positive(kind)
|
||||
else ReactionKind.LIKE.value
|
||||
)
|
||||
|
||||
q = (
|
||||
select(Reaction)
|
||||
.filter(
|
||||
and_(
|
||||
Reaction.shout == shout_id,
|
||||
Reaction.created_by == author.id,
|
||||
Reaction.kind.in_(RATING_REACTIONS),
|
||||
)
|
||||
q = select(Reaction).filter(
|
||||
and_(
|
||||
Reaction.shout == shout_id,
|
||||
Reaction.created_by == author.id,
|
||||
Reaction.kind.in_(RATING_REACTIONS),
|
||||
)
|
||||
)
|
||||
reply_to = reaction.get('reply_to')
|
||||
reply_to = reaction.get("reply_to")
|
||||
if reply_to:
|
||||
q = q.filter(Reaction.reply_to == reply_to)
|
||||
rating_reactions = session.execute(q).all()
|
||||
same_rating = filter(lambda r: r.created_by == author.id and r.kind == opposite_kind, rating_reactions)
|
||||
opposite_rating = filter(lambda r: r.created_by == author.id and r.kind == opposite_kind, rating_reactions)
|
||||
same_rating = filter(
|
||||
lambda r: r.created_by == author.id and r.kind == opposite_kind,
|
||||
rating_reactions,
|
||||
)
|
||||
opposite_rating = filter(
|
||||
lambda r: r.created_by == author.id and r.kind == opposite_kind,
|
||||
rating_reactions,
|
||||
)
|
||||
if same_rating:
|
||||
return {'error': "You can't rate the same thing twice"}
|
||||
return {"error": "You can't rate the same thing twice"}
|
||||
elif opposite_rating:
|
||||
return {'error': 'Remove opposite vote first'}
|
||||
return {"error": "Remove opposite vote first"}
|
||||
elif filter(lambda r: r.created_by == author.id, rating_reactions):
|
||||
return {'error': "You can't rate your own thing"}
|
||||
return {"error": "You can't rate your own thing"}
|
||||
|
||||
rdict = await _create_reaction(session, shout, author, reaction)
|
||||
return {'reaction': rdict}
|
||||
return {"reaction": rdict}
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
logger.error(f'{type(e).__name__}: {e}')
|
||||
logger.error(f"{type(e).__name__}: {e}")
|
||||
|
||||
return {'error': 'Cannot create reaction.'}
|
||||
return {"error": "Cannot create reaction."}
|
||||
|
||||
|
||||
@mutation.field('update_reaction')
|
||||
@mutation.field("update_reaction")
|
||||
@login_required
|
||||
async def update_reaction(_, info, reaction):
|
||||
user_id = info.context.get('user_id')
|
||||
roles = info.context.get('roles')
|
||||
rid = reaction.get('id')
|
||||
user_id = info.context.get("user_id")
|
||||
roles = info.context.get("roles")
|
||||
rid = reaction.get("id")
|
||||
if rid and user_id and roles:
|
||||
del reaction['id']
|
||||
del reaction["id"]
|
||||
with local_session() as session:
|
||||
reaction_query = select(Reaction).filter(Reaction.id == int(rid))
|
||||
aliased_reaction = aliased(Reaction)
|
||||
|
@ -211,22 +233,24 @@ async def update_reaction(_, info, reaction):
|
|||
reaction_query = reaction_query.group_by(Reaction.id)
|
||||
|
||||
try:
|
||||
[r, reacted_stat, commented_stat, likes_stat, dislikes_stat, _l] = session.execute(reaction_query).unique().first()
|
||||
[r, reacted_stat, commented_stat, likes_stat, dislikes_stat, _l] = (
|
||||
session.execute(reaction_query).unique().first()
|
||||
)
|
||||
|
||||
if not r:
|
||||
return {'error': 'invalid reaction id'}
|
||||
return {"error": "invalid reaction id"}
|
||||
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if author:
|
||||
if r.created_by != author.id and 'editor' not in roles:
|
||||
return {'error': 'access denied'}
|
||||
if r.created_by != author.id and "editor" not in roles:
|
||||
return {"error": "access denied"}
|
||||
|
||||
body = reaction.get('body')
|
||||
body = reaction.get("body")
|
||||
if body:
|
||||
r.body = body
|
||||
r.updated_at = int(time.time())
|
||||
|
||||
if r.kind != reaction['kind']:
|
||||
if r.kind != reaction["kind"]:
|
||||
# Определение изменения мнения может быть реализовано здесь
|
||||
pass
|
||||
|
||||
|
@ -235,79 +259,79 @@ async def update_reaction(_, info, reaction):
|
|||
session.commit()
|
||||
|
||||
r.stat = {
|
||||
'reacted': reacted_stat,
|
||||
'commented': commented_stat,
|
||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
}
|
||||
|
||||
await notify_reaction(r.dict(), 'update')
|
||||
await notify_reaction(r.dict(), "update")
|
||||
|
||||
return {'reaction': r}
|
||||
return {"reaction": r}
|
||||
else:
|
||||
return {'error': 'not authorized'}
|
||||
return {"error": "not authorized"}
|
||||
except Exception:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
return {'error': 'cannot create reaction'}
|
||||
return {"error": "cannot create reaction"}
|
||||
|
||||
|
||||
@mutation.field('delete_reaction')
|
||||
@mutation.field("delete_reaction")
|
||||
@login_required
|
||||
async def delete_reaction(_, info, reaction_id: int):
|
||||
user_id = info.context['user_id']
|
||||
roles = info.context['roles']
|
||||
user_id = info.context["user_id"]
|
||||
roles = info.context["roles"]
|
||||
if isinstance(reaction_id, int) and user_id and isinstance(roles, list):
|
||||
with local_session() as session:
|
||||
try:
|
||||
author = session.query(Author).filter(Author.user == user_id).one()
|
||||
r = session.query(Reaction).filter(Reaction.id == reaction_id).one()
|
||||
if r and author:
|
||||
if r.created_by is author.id and 'editor' not in roles:
|
||||
return {'error': 'access denied'}
|
||||
if r.created_by is author.id and "editor" not in roles:
|
||||
return {"error": "access denied"}
|
||||
|
||||
if r.kind in [ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]:
|
||||
session.delete(r)
|
||||
session.commit()
|
||||
await notify_reaction(r.dict(), 'delete')
|
||||
await notify_reaction(r.dict(), "delete")
|
||||
except Exception as exc:
|
||||
return {'error': f'cannot delete reaction: {exc}'}
|
||||
return {'error': 'cannot delete reaction'}
|
||||
return {"error": f"cannot delete reaction: {exc}"}
|
||||
return {"error": "cannot delete reaction"}
|
||||
|
||||
|
||||
def apply_reaction_filters(by, q):
|
||||
shout_slug = by.get('shout', None)
|
||||
shout_slug = by.get("shout", None)
|
||||
if shout_slug:
|
||||
q = q.filter(Shout.slug == shout_slug)
|
||||
|
||||
elif by.get('shouts'):
|
||||
q = q.filter(Shout.slug.in_(by.get('shouts', [])))
|
||||
elif by.get("shouts"):
|
||||
q = q.filter(Shout.slug.in_(by.get("shouts", [])))
|
||||
|
||||
created_by = by.get('created_by', None)
|
||||
created_by = by.get("created_by", None)
|
||||
if created_by:
|
||||
q = q.filter(Author.id == created_by)
|
||||
|
||||
topic = by.get('topic', None)
|
||||
topic = by.get("topic", None)
|
||||
if topic:
|
||||
q = q.filter(Shout.topics.contains(topic))
|
||||
|
||||
if by.get('comment', False):
|
||||
if by.get("comment", False):
|
||||
q = q.filter(Reaction.kind == ReactionKind.COMMENT.value)
|
||||
if by.get('rating', False):
|
||||
if by.get("rating", False):
|
||||
q = q.filter(Reaction.kind.in_(RATING_REACTIONS))
|
||||
|
||||
by_search = by.get('search', '')
|
||||
by_search = by.get("search", "")
|
||||
if len(by_search) > 2:
|
||||
q = q.filter(Reaction.body.ilike(f'%{by_search}%'))
|
||||
q = q.filter(Reaction.body.ilike(f"%{by_search}%"))
|
||||
|
||||
after = by.get('after', None)
|
||||
after = by.get("after", None)
|
||||
if isinstance(after, int):
|
||||
q = q.filter(Reaction.created_at > after)
|
||||
|
||||
return q
|
||||
|
||||
|
||||
@query.field('load_reactions_by')
|
||||
@query.field("load_reactions_by")
|
||||
async def load_reactions_by(_, info, by, limit=50, offset=0):
|
||||
"""
|
||||
:param info: graphql meta
|
||||
|
@ -344,7 +368,7 @@ async def load_reactions_by(_, info, by, limit=50, offset=0):
|
|||
q = q.group_by(Reaction.id, Author.id, Shout.id, aliased_reaction.id)
|
||||
|
||||
# order by
|
||||
q = q.order_by(desc('created_at'))
|
||||
q = q.order_by(desc("created_at"))
|
||||
|
||||
# pagination
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
@ -365,19 +389,19 @@ async def load_reactions_by(_, info, by, limit=50, offset=0):
|
|||
reaction.created_by = author
|
||||
reaction.shout = shout
|
||||
reaction.stat = {
|
||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
'reacted': reacted_stat,
|
||||
'commented': commented_stat,
|
||||
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
}
|
||||
reactions.add(reaction)
|
||||
|
||||
# sort if by stat is present
|
||||
stat_sort = by.get('stat')
|
||||
stat_sort = by.get("stat")
|
||||
if stat_sort:
|
||||
reactions = sorted(
|
||||
reactions,
|
||||
key=lambda r: r.stat.get(stat_sort) or r.created_at,
|
||||
reverse=stat_sort.startswith('-'),
|
||||
reverse=stat_sort.startswith("-"),
|
||||
)
|
||||
|
||||
return reactions
|
||||
|
@ -415,7 +439,9 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S
|
|||
q2 = add_stat_columns(q2, aliased(Reaction))
|
||||
|
||||
# Sort shouts by the `last_comment` field
|
||||
combined_query = union(q1, q2).order_by(desc('last_comment')).limit(limit).offset(offset)
|
||||
combined_query = (
|
||||
union(q1, q2).order_by(desc("last_comment")).limit(limit).offset(offset)
|
||||
)
|
||||
results = session.execute(combined_query).scalars()
|
||||
with local_session() as session:
|
||||
for [
|
||||
|
@ -427,26 +453,26 @@ async def reacted_shouts_updates(follower_id: int, limit=50, offset=0) -> List[S
|
|||
last_comment,
|
||||
] in results:
|
||||
shout.stat = {
|
||||
'viewed': await ViewedStorage.get_shout(shout.slug),
|
||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
'reacted': reacted_stat,
|
||||
'commented': commented_stat,
|
||||
'last_comment': last_comment,
|
||||
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"last_comment": last_comment,
|
||||
}
|
||||
shouts.append(shout)
|
||||
|
||||
return shouts
|
||||
|
||||
|
||||
@query.field('load_shouts_followed')
|
||||
@query.field("load_shouts_followed")
|
||||
@login_required
|
||||
async def load_shouts_followed(_, info, limit=50, offset=0) -> List[Shout]:
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if author:
|
||||
try:
|
||||
author_id: int = author.dict()['id']
|
||||
author_id: int = author.dict()["id"]
|
||||
shouts = await reacted_shouts_updates(author_id, limit, offset)
|
||||
return shouts
|
||||
except Exception as error:
|
||||
|
|
|
@ -18,22 +18,22 @@ from services.logger import root_logger as logger
|
|||
|
||||
|
||||
def apply_filters(q, filters, author_id=None):
|
||||
if filters.get('reacted') and author_id:
|
||||
if filters.get("reacted") and author_id:
|
||||
q.join(Reaction, Reaction.created_by == author_id)
|
||||
|
||||
by_featured = filters.get('featured')
|
||||
by_featured = filters.get("featured")
|
||||
if by_featured:
|
||||
q = q.filter(Shout.featured_at.is_not(None))
|
||||
by_layouts = filters.get('layouts')
|
||||
by_layouts = filters.get("layouts")
|
||||
if by_layouts:
|
||||
q = q.filter(Shout.layout.in_(by_layouts))
|
||||
by_author = filters.get('author')
|
||||
by_author = filters.get("author")
|
||||
if by_author:
|
||||
q = q.filter(Shout.authors.any(slug=by_author))
|
||||
by_topic = filters.get('topic')
|
||||
by_topic = filters.get("topic")
|
||||
if by_topic:
|
||||
q = q.filter(Shout.topics.any(slug=by_topic))
|
||||
by_after = filters.get('after')
|
||||
by_after = filters.get("after")
|
||||
if by_after:
|
||||
ts = int(by_after)
|
||||
q = q.filter(Shout.created_at > ts)
|
||||
|
@ -41,7 +41,7 @@ def apply_filters(q, filters, author_id=None):
|
|||
return q
|
||||
|
||||
|
||||
@query.field('get_shout')
|
||||
@query.field("get_shout")
|
||||
async def get_shout(_, _info, slug=None, shout_id=None):
|
||||
with local_session() as session:
|
||||
q = select(Shout).options(
|
||||
|
@ -72,13 +72,15 @@ async def get_shout(_, _info, slug=None, shout_id=None):
|
|||
] = results
|
||||
|
||||
shout.stat = {
|
||||
'viewed': await ViewedStorage.get_shout(shout.slug),
|
||||
'reacted': reacted_stat,
|
||||
'commented': commented_stat,
|
||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
}
|
||||
|
||||
for author_caption in session.query(ShoutAuthor).join(Shout).where(Shout.slug == slug):
|
||||
for author_caption in (
|
||||
session.query(ShoutAuthor).join(Shout).where(Shout.slug == slug)
|
||||
):
|
||||
for author in shout.authors:
|
||||
if author.id == author_caption.author:
|
||||
author.caption = author_caption.caption
|
||||
|
@ -99,10 +101,12 @@ async def get_shout(_, _info, slug=None, shout_id=None):
|
|||
shout.main_topic = main_topic[0]
|
||||
return shout
|
||||
except Exception:
|
||||
raise HTTPException(status_code=404, detail=f'shout {slug or shout_id} not found')
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"shout {slug or shout_id} not found"
|
||||
)
|
||||
|
||||
|
||||
@query.field('load_shouts_by')
|
||||
@query.field("load_shouts_by")
|
||||
async def load_shouts_by(_, _info, options):
|
||||
"""
|
||||
:param options: {
|
||||
|
@ -138,20 +142,24 @@ async def load_shouts_by(_, _info, options):
|
|||
q = add_stat_columns(q, aliased_reaction)
|
||||
|
||||
# filters
|
||||
filters = options.get('filters', {})
|
||||
filters = options.get("filters", {})
|
||||
q = apply_filters(q, filters)
|
||||
|
||||
# group
|
||||
q = q.group_by(Shout.id)
|
||||
|
||||
# order
|
||||
order_by = options.get('order_by', Shout.featured_at if filters.get('featured') else Shout.published_at)
|
||||
query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
|
||||
order_by = options.get(
|
||||
"order_by", Shout.featured_at if filters.get("featured") else Shout.published_at
|
||||
)
|
||||
query_order_by = (
|
||||
desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
||||
)
|
||||
q = q.order_by(nulls_last(query_order_by))
|
||||
|
||||
# limit offset
|
||||
offset = options.get('offset', 0)
|
||||
limit = options.get('limit', 10)
|
||||
offset = options.get("offset", 0)
|
||||
limit = options.get("limit", 10)
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
||||
shouts = []
|
||||
|
@ -180,20 +188,20 @@ async def load_shouts_by(_, _info, options):
|
|||
if main_topic:
|
||||
shout.main_topic = main_topic[0]
|
||||
shout.stat = {
|
||||
'viewed': await ViewedStorage.get_shout(shout.slug),
|
||||
'reacted': reacted_stat,
|
||||
'commented': commented_stat,
|
||||
'rating': int(likes_stat) - int(dislikes_stat),
|
||||
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": int(likes_stat) - int(dislikes_stat),
|
||||
}
|
||||
shouts.append(shout)
|
||||
|
||||
return shouts
|
||||
|
||||
|
||||
@query.field('load_shouts_drafts')
|
||||
@query.field("load_shouts_drafts")
|
||||
@login_required
|
||||
async def load_shouts_drafts(_, info):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
|
||||
q = (
|
||||
select(Shout)
|
||||
|
@ -231,24 +239,29 @@ async def load_shouts_drafts(_, info):
|
|||
return shouts
|
||||
|
||||
|
||||
@query.field('load_shouts_feed')
|
||||
@query.field("load_shouts_feed")
|
||||
@login_required
|
||||
async def load_shouts_feed(_, info, options):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
|
||||
shouts = []
|
||||
with local_session() as session:
|
||||
reader = session.query(Author).filter(Author.user == user_id).first()
|
||||
if reader:
|
||||
reader_followed_authors = select(AuthorFollower.author).where(AuthorFollower.follower == reader.id)
|
||||
reader_followed_topics = select(TopicFollower.topic).where(TopicFollower.follower == reader.id)
|
||||
reader_followed_authors = select(AuthorFollower.author).where(
|
||||
AuthorFollower.follower == reader.id
|
||||
)
|
||||
reader_followed_topics = select(TopicFollower.topic).where(
|
||||
TopicFollower.follower == reader.id
|
||||
)
|
||||
|
||||
subquery = (
|
||||
select(Shout.id)
|
||||
.where(Shout.id == ShoutAuthor.shout)
|
||||
.where(Shout.id == ShoutTopic.shout)
|
||||
.where(
|
||||
(ShoutAuthor.author.in_(reader_followed_authors)) | (ShoutTopic.topic.in_(reader_followed_topics))
|
||||
(ShoutAuthor.author.in_(reader_followed_authors))
|
||||
| (ShoutTopic.topic.in_(reader_followed_topics))
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -269,22 +282,37 @@ async def load_shouts_feed(_, info, options):
|
|||
|
||||
aliased_reaction = aliased(Reaction)
|
||||
q = add_stat_columns(q, aliased_reaction)
|
||||
filters = options.get('filters', {})
|
||||
filters = options.get("filters", {})
|
||||
q = apply_filters(q, filters, reader.id)
|
||||
|
||||
order_by = options.get('order_by', Shout.featured_at if filters.get('featured') else Shout.published_at)
|
||||
order_by = options.get(
|
||||
"order_by",
|
||||
Shout.featured_at if filters.get("featured") else Shout.published_at,
|
||||
)
|
||||
|
||||
query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
|
||||
offset = options.get('offset', 0)
|
||||
limit = options.get('limit', 10)
|
||||
query_order_by = (
|
||||
desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
|
||||
)
|
||||
offset = options.get("offset", 0)
|
||||
limit = options.get("limit", 10)
|
||||
|
||||
q = q.group_by(Shout.id).order_by(nulls_last(query_order_by)).limit(limit).offset(offset)
|
||||
q = (
|
||||
q.group_by(Shout.id)
|
||||
.order_by(nulls_last(query_order_by))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
# print(q.compile(compile_kwargs={"literal_binds": True}))
|
||||
|
||||
for [shout, reacted_stat, commented_stat, likes_stat, dislikes_stat, _last_comment] in session.execute(
|
||||
q
|
||||
).unique():
|
||||
for [
|
||||
shout,
|
||||
reacted_stat,
|
||||
commented_stat,
|
||||
likes_stat,
|
||||
dislikes_stat,
|
||||
_last_comment,
|
||||
] in session.execute(q).unique():
|
||||
main_topic = (
|
||||
session.query(Topic.slug)
|
||||
.join(
|
||||
|
@ -301,17 +329,17 @@ async def load_shouts_feed(_, info, options):
|
|||
if main_topic:
|
||||
shout.main_topic = main_topic[0]
|
||||
shout.stat = {
|
||||
'viewed': await ViewedStorage.get_shout(shout.slug),
|
||||
'reacted': reacted_stat,
|
||||
'commented': commented_stat,
|
||||
'rating': likes_stat - dislikes_stat,
|
||||
"viewed": await ViewedStorage.get_shout(shout.slug),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": likes_stat - dislikes_stat,
|
||||
}
|
||||
shouts.append(shout)
|
||||
|
||||
return shouts
|
||||
|
||||
|
||||
@query.field('load_shouts_search')
|
||||
@query.field("load_shouts_search")
|
||||
async def load_shouts_search(_, _info, text, limit=50, offset=0):
|
||||
if isinstance(text, str) and len(text) > 2:
|
||||
results = await search_text(text, limit, offset)
|
||||
|
@ -321,7 +349,7 @@ async def load_shouts_search(_, _info, text, limit=50, offset=0):
|
|||
|
||||
|
||||
@login_required
|
||||
@query.field('load_shouts_unrated')
|
||||
@query.field("load_shouts_unrated")
|
||||
async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
||||
q = (
|
||||
select(Shout)
|
||||
|
@ -334,10 +362,12 @@ async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
|||
and_(
|
||||
Reaction.shout == Shout.id,
|
||||
Reaction.replyTo.is_(None),
|
||||
Reaction.kind.in_([ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]),
|
||||
Reaction.kind.in_(
|
||||
[ReactionKind.LIKE.value, ReactionKind.DISLIKE.value]
|
||||
),
|
||||
),
|
||||
)
|
||||
.outerjoin(Author, Author.user == bindparam('user_id'))
|
||||
.outerjoin(Author, Author.user == bindparam("user_id"))
|
||||
.where(
|
||||
and_(
|
||||
Shout.deleted_at.is_(None),
|
||||
|
@ -354,7 +384,7 @@ async def load_shouts_unrated(_, info, limit: int = 50, offset: int = 0):
|
|||
q = add_stat_columns(q, aliased_reaction)
|
||||
|
||||
q = q.group_by(Shout.id).order_by(func.random()).limit(limit).offset(offset)
|
||||
user_id = info.context.get('user_id')
|
||||
user_id = info.context.get("user_id")
|
||||
if user_id:
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
|
@ -374,20 +404,20 @@ async def get_shouts_from_query(q, author_id=None):
|
|||
likes_stat,
|
||||
dislikes_stat,
|
||||
last_comment,
|
||||
] in session.execute(q, {'author_id': author_id}).unique():
|
||||
] in session.execute(q, {"author_id": author_id}).unique():
|
||||
shouts.append(shout)
|
||||
shout.stat = {
|
||||
'viewed': await ViewedStorage.get_shout(shout_slug=shout.slug),
|
||||
'reacted': reacted_stat,
|
||||
'commented': commented_stat,
|
||||
'rating': int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
'last_comment': last_comment,
|
||||
"viewed": await ViewedStorage.get_shout(shout_slug=shout.slug),
|
||||
"reacted": reacted_stat,
|
||||
"commented": commented_stat,
|
||||
"rating": int(likes_stat or 0) - int(dislikes_stat or 0),
|
||||
"last_comment": last_comment,
|
||||
}
|
||||
|
||||
return shouts
|
||||
|
||||
|
||||
@query.field('load_shouts_random_top')
|
||||
@query.field("load_shouts_random_top")
|
||||
async def load_shouts_random_top(_, _info, options):
|
||||
"""
|
||||
:param _
|
||||
|
@ -406,9 +436,11 @@ async def load_shouts_random_top(_, _info, options):
|
|||
|
||||
aliased_reaction = aliased(Reaction)
|
||||
|
||||
subquery = select(Shout.id).outerjoin(aliased_reaction).where(Shout.deleted_at.is_(None))
|
||||
subquery = (
|
||||
select(Shout.id).outerjoin(aliased_reaction).where(Shout.deleted_at.is_(None))
|
||||
)
|
||||
|
||||
subquery = apply_filters(subquery, options.get('filters', {}))
|
||||
subquery = apply_filters(subquery, options.get("filters", {}))
|
||||
subquery = subquery.group_by(Shout.id).order_by(
|
||||
desc(
|
||||
func.sum(
|
||||
|
@ -423,7 +455,7 @@ async def load_shouts_random_top(_, _info, options):
|
|||
)
|
||||
)
|
||||
|
||||
random_limit = options.get('random_limit')
|
||||
random_limit = options.get("random_limit")
|
||||
if random_limit:
|
||||
subquery = subquery.limit(random_limit)
|
||||
|
||||
|
@ -438,20 +470,24 @@ async def load_shouts_random_top(_, _info, options):
|
|||
aliased_reaction = aliased(Reaction)
|
||||
q = add_stat_columns(q, aliased_reaction)
|
||||
|
||||
limit = options.get('limit', 10)
|
||||
limit = options.get("limit", 10)
|
||||
q = q.group_by(Shout.id).order_by(func.random()).limit(limit)
|
||||
|
||||
return await get_shouts_from_query(q)
|
||||
|
||||
|
||||
@query.field('load_shouts_random_topic')
|
||||
@query.field("load_shouts_random_topic")
|
||||
async def load_shouts_random_topic(_, info, limit: int = 10):
|
||||
topic = get_random_topic()
|
||||
if topic:
|
||||
shouts = fetch_shouts_by_topic(topic, limit)
|
||||
if shouts:
|
||||
return {'topic': topic, 'shouts': shouts}
|
||||
return { 'error': 'failed to get random topic after few retries', shouts: [], topic: {} }
|
||||
return {"topic": topic, "shouts": shouts}
|
||||
return {
|
||||
"error": "failed to get random topic after few retries",
|
||||
shouts: [],
|
||||
topic: {},
|
||||
}
|
||||
|
||||
|
||||
def fetch_shouts_by_topic(topic, limit):
|
||||
|
|
|
@ -11,25 +11,23 @@ from services.viewed import ViewedStorage
|
|||
from services.logger import root_logger as logger
|
||||
|
||||
|
||||
async def followed_topics(follower_id):
|
||||
q = select(Author)
|
||||
q = add_topic_stat_columns(q)
|
||||
q = q.join(TopicFollower, TopicFollower.author == Author.id).where(TopicFollower.follower == follower_id)
|
||||
# Pass the query to the get_topics_from_query function and return the results
|
||||
return await get_topics_from_query(q)
|
||||
|
||||
|
||||
def add_topic_stat_columns(q):
|
||||
aliased_shout_author = aliased(ShoutAuthor)
|
||||
aliased_topic_follower = aliased(TopicFollower)
|
||||
|
||||
q = (
|
||||
q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic)
|
||||
.add_columns(func.count(distinct(ShoutTopic.shout)).label('shouts_stat'))
|
||||
.add_columns(func.count(distinct(ShoutTopic.shout)).label("shouts_stat"))
|
||||
.outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout)
|
||||
.add_columns(func.count(distinct(aliased_shout_author.author)).label('authors_stat'))
|
||||
.add_columns(
|
||||
func.count(distinct(aliased_shout_author.author)).label("authors_stat")
|
||||
)
|
||||
.outerjoin(aliased_topic_follower)
|
||||
.add_columns(func.count(distinct(aliased_topic_follower.follower)).label('followers_stat'))
|
||||
.add_columns(
|
||||
func.count(distinct(aliased_topic_follower.follower)).label(
|
||||
"followers_stat"
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
q = q.group_by(Topic.id)
|
||||
|
@ -42,17 +40,17 @@ async def get_topics_from_query(q):
|
|||
with local_session() as session:
|
||||
for [topic, shouts_stat, authors_stat, followers_stat] in session.execute(q):
|
||||
topic.stat = {
|
||||
'shouts': shouts_stat,
|
||||
'authors': authors_stat,
|
||||
'followers': followers_stat,
|
||||
'viewed': await ViewedStorage.get_topic(topic.slug),
|
||||
"shouts": shouts_stat,
|
||||
"authors": authors_stat,
|
||||
"followers": followers_stat,
|
||||
"viewed": await ViewedStorage.get_topic(topic.slug),
|
||||
}
|
||||
topics.append(topic)
|
||||
|
||||
return topics
|
||||
|
||||
|
||||
@query.field('get_topics_all')
|
||||
@query.field("get_topics_all")
|
||||
async def get_topics_all(_, _info):
|
||||
q = select(Topic)
|
||||
q = add_topic_stat_columns(q)
|
||||
|
@ -68,7 +66,7 @@ async def topics_followed_by(author_id):
|
|||
return await get_topics_from_query(q)
|
||||
|
||||
|
||||
@query.field('get_topics_by_community')
|
||||
@query.field("get_topics_by_community")
|
||||
async def get_topics_by_community(_, _info, community_id: int):
|
||||
q = select(Topic).where(Topic.community == community_id)
|
||||
q = add_topic_stat_columns(q)
|
||||
|
@ -76,8 +74,8 @@ async def get_topics_by_community(_, _info, community_id: int):
|
|||
return await get_topics_from_query(q)
|
||||
|
||||
|
||||
@query.field('get_topics_by_author')
|
||||
async def get_topics_by_author(_, _info, author_id=None, slug='', user=''):
|
||||
@query.field("get_topics_by_author")
|
||||
async def get_topics_by_author(_, _info, author_id=None, slug="", user=""):
|
||||
q = select(Topic)
|
||||
q = add_topic_stat_columns(q)
|
||||
if author_id:
|
||||
|
@ -90,7 +88,7 @@ async def get_topics_by_author(_, _info, author_id=None, slug='', user=''):
|
|||
return await get_topics_from_query(q)
|
||||
|
||||
|
||||
@query.field('get_topic')
|
||||
@query.field("get_topic")
|
||||
async def get_topic(_, _info, slug):
|
||||
q = select(Topic).where(Topic.slug == slug)
|
||||
q = add_topic_stat_columns(q)
|
||||
|
@ -100,7 +98,7 @@ async def get_topic(_, _info, slug):
|
|||
return topics[0]
|
||||
|
||||
|
||||
@mutation.field('create_topic')
|
||||
@mutation.field("create_topic")
|
||||
@login_required
|
||||
async def create_topic(_, _info, inp):
|
||||
with local_session() as session:
|
||||
|
@ -110,45 +108,43 @@ async def create_topic(_, _info, inp):
|
|||
session.add(new_topic)
|
||||
session.commit()
|
||||
|
||||
return {'topic': new_topic}
|
||||
return {'error': 'cannot create topic'}
|
||||
return {"topic": new_topic}
|
||||
|
||||
|
||||
@mutation.field('update_topic')
|
||||
@mutation.field("update_topic")
|
||||
@login_required
|
||||
async def update_topic(_, _info, inp):
|
||||
slug = inp['slug']
|
||||
slug = inp["slug"]
|
||||
with local_session() as session:
|
||||
topic = session.query(Topic).filter(Topic.slug == slug).first()
|
||||
if not topic:
|
||||
return {'error': 'topic not found'}
|
||||
return {"error": "topic not found"}
|
||||
else:
|
||||
Topic.update(topic, inp)
|
||||
session.add(topic)
|
||||
session.commit()
|
||||
|
||||
return {'topic': topic}
|
||||
return {'error': 'cannot update'}
|
||||
return {"topic": topic}
|
||||
|
||||
|
||||
@mutation.field('delete_topic')
|
||||
@mutation.field("delete_topic")
|
||||
@login_required
|
||||
async def delete_topic(_, info, slug: str):
|
||||
user_id = info.context['user_id']
|
||||
user_id = info.context["user_id"]
|
||||
with local_session() as session:
|
||||
t: Topic = session.query(Topic).filter(Topic.slug == slug).first()
|
||||
if not t:
|
||||
return {'error': 'invalid topic slug'}
|
||||
return {"error": "invalid topic slug"}
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if author:
|
||||
if t.created_by != author.id:
|
||||
return {'error': 'access denied'}
|
||||
return {"error": "access denied"}
|
||||
|
||||
session.delete(t)
|
||||
session.commit()
|
||||
|
||||
return {}
|
||||
return {'error': 'access denied'}
|
||||
return {"error": "access denied"}
|
||||
|
||||
|
||||
def topic_follow(follower_id, slug):
|
||||
|
@ -179,7 +175,7 @@ def topic_unfollow(follower_id, slug):
|
|||
return False
|
||||
|
||||
|
||||
@query.field('get_topics_random')
|
||||
@query.field("get_topics_random")
|
||||
async def get_topics_random(_, info, amount=12):
|
||||
q = select(Topic)
|
||||
q = q.join(ShoutTopic)
|
||||
|
|
|
@ -4,7 +4,7 @@ type Query {
|
|||
get_author_id(user: String!): Author
|
||||
get_authors_all: [Author]
|
||||
get_author_followers(slug: String, user: String, author_id: Int): [Author]
|
||||
get_author_followed(slug: String, user: String, author_id: Int): [Author]
|
||||
get_author_follows(slug: String, user: String, author_id: Int): AuthorFollows!
|
||||
load_authors_by(by: AuthorsBy!, limit: Int, offset: Int): [Author]
|
||||
|
||||
# community
|
||||
|
|
|
@ -179,3 +179,10 @@ type Invite {
|
|||
shout_id: Int!
|
||||
status: InviteStatus
|
||||
}
|
||||
|
||||
type AuthorFollows {
|
||||
topics: [Topic]
|
||||
authors: [Author]
|
||||
shouts: [Shout]
|
||||
communities: [Community]
|
||||
}
|
||||
|
|
10
server.py
10
server.py
|
@ -2,15 +2,15 @@ from granian.constants import Interfaces
|
|||
from granian.server import Granian
|
||||
from services.logger import root_logger as logger
|
||||
|
||||
if __name__ == '__main__':
|
||||
logger.info('started')
|
||||
if __name__ == "__main__":
|
||||
logger.info("started")
|
||||
|
||||
granian_instance = Granian(
|
||||
'main:app',
|
||||
address='0.0.0.0', # noqa S104
|
||||
"main:app",
|
||||
address="0.0.0.0", # noqa S104
|
||||
port=8000,
|
||||
threads=4,
|
||||
websockets=False,
|
||||
interface=Interfaces.ASGI
|
||||
interface=Interfaces.ASGI,
|
||||
)
|
||||
granian_instance.serve()
|
||||
|
|
|
@ -6,33 +6,36 @@ from dogpile.cache import make_region
|
|||
from settings import ADMIN_SECRET, AUTH_URL
|
||||
from services.logger import root_logger as logger
|
||||
|
||||
|
||||
async def request_data(gql, headers=None):
|
||||
if headers is None:
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(AUTH_URL, json=gql, headers=headers)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
errors = data.get('errors')
|
||||
errors = data.get("errors")
|
||||
if errors:
|
||||
logger.error(f'HTTP Errors: {errors}')
|
||||
logger.error(f"HTTP Errors: {errors}")
|
||||
else:
|
||||
return data
|
||||
except Exception as e:
|
||||
# Handling and logging exceptions during authentication check
|
||||
logger.error(f'request_data error: {e}')
|
||||
logger.error(f"request_data error: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# Создание региона кэша с TTL 30 секунд
|
||||
region = make_region().configure('dogpile.cache.memory', expiration_time=30)
|
||||
region = make_region().configure("dogpile.cache.memory", expiration_time=30)
|
||||
|
||||
|
||||
# Функция-ключ для кэширования
|
||||
def auth_cache_key(req):
|
||||
token = req.headers.get('Authorization')
|
||||
token = req.headers.get("Authorization")
|
||||
return f"auth_token:{token}"
|
||||
|
||||
|
||||
# Декоратор для кэширования запроса проверки токена
|
||||
def cache_auth_request(f):
|
||||
@wraps(f)
|
||||
|
@ -41,41 +44,43 @@ def cache_auth_request(f):
|
|||
cache_key = auth_cache_key(req)
|
||||
result = region.get(cache_key)
|
||||
if result is None:
|
||||
[user_id, user_roles] = await f(*args, **kwargs)
|
||||
[user_id, user_roles] = await f(*args, **kwargs)
|
||||
if user_id:
|
||||
region.set(cache_key, [user_id, user_roles])
|
||||
return result
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
# Измененная функция проверки аутентификации с кэшированием
|
||||
@cache_auth_request
|
||||
async def check_auth(req):
|
||||
token = req.headers.get('Authorization')
|
||||
user_id = ''
|
||||
token = req.headers.get("Authorization")
|
||||
user_id = ""
|
||||
user_roles = []
|
||||
if token:
|
||||
try:
|
||||
# Logging the authentication token
|
||||
logger.debug(f'{token}')
|
||||
query_name = 'validate_jwt_token'
|
||||
operation = 'ValidateToken'
|
||||
logger.debug(f"{token}")
|
||||
query_name = "validate_jwt_token"
|
||||
operation = "ValidateToken"
|
||||
variables = {
|
||||
'params': {
|
||||
'token_type': 'access_token',
|
||||
'token': token,
|
||||
"params": {
|
||||
"token_type": "access_token",
|
||||
"token": token,
|
||||
}
|
||||
}
|
||||
|
||||
gql = {
|
||||
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}',
|
||||
'variables': variables,
|
||||
'operationName': operation,
|
||||
"query": f"query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
data = await request_data(gql)
|
||||
if data:
|
||||
user_data = data.get('data', {}).get(query_name, {}).get('claims', {})
|
||||
user_id = user_data.get('sub')
|
||||
user_roles = user_data.get('allowed_roles')
|
||||
user_data = data.get("data", {}).get(query_name, {}).get("claims", {})
|
||||
user_id = user_data.get("sub")
|
||||
user_roles = user_data.get("allowed_roles")
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
|
@ -87,41 +92,41 @@ async def check_auth(req):
|
|||
|
||||
|
||||
async def add_user_role(user_id):
|
||||
logger.info(f'add author role for user_id: {user_id}')
|
||||
query_name = '_update_user'
|
||||
operation = 'UpdateUserRoles'
|
||||
logger.info(f"add author role for user_id: {user_id}")
|
||||
query_name = "_update_user"
|
||||
operation = "UpdateUserRoles"
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'x-authorizer-admin-secret': ADMIN_SECRET,
|
||||
"Content-Type": "application/json",
|
||||
"x-authorizer-admin-secret": ADMIN_SECRET,
|
||||
}
|
||||
variables = {'params': {'roles': 'author, reader', 'id': user_id}}
|
||||
variables = {"params": {"roles": "author, reader", "id": user_id}}
|
||||
gql = {
|
||||
'query': f'mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}',
|
||||
'variables': variables,
|
||||
'operationName': operation,
|
||||
"query": f"mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
data = await request_data(gql, headers)
|
||||
if data:
|
||||
user_id = data.get('data', {}).get(query_name, {}).get('id')
|
||||
user_id = data.get("data", {}).get(query_name, {}).get("id")
|
||||
return user_id
|
||||
|
||||
|
||||
def login_required(f):
|
||||
@wraps(f)
|
||||
async def decorated_function(*args, **kwargs):
|
||||
user_id = ''
|
||||
user_id = ""
|
||||
user_roles = []
|
||||
info = args[1]
|
||||
|
||||
try:
|
||||
req = info.context.get('request')
|
||||
req = info.context.get("request")
|
||||
[user_id, user_roles] = await check_auth(req)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to authenticate user: {e}")
|
||||
if user_id:
|
||||
logger.info(f' got {user_id} roles: {user_roles}')
|
||||
info.context['user_id'] = user_id.strip()
|
||||
info.context['roles'] = user_roles
|
||||
logger.info(f" got {user_id} roles: {user_roles}")
|
||||
info.context["user_id"] = user_id.strip()
|
||||
info.context["roles"] = user_roles
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
@ -130,7 +135,7 @@ def login_required(f):
|
|||
def auth_request(f):
|
||||
@wraps(f)
|
||||
async def decorated_function(*args, **kwargs):
|
||||
user_id = ''
|
||||
user_id = ""
|
||||
user_roles = []
|
||||
req = {}
|
||||
try:
|
||||
|
@ -142,9 +147,9 @@ def auth_request(f):
|
|||
traceback.print_exc()
|
||||
logger.error(f"Failed to authenticate user: {args} {e}")
|
||||
if user_id:
|
||||
logger.info(f' got {user_id} roles: {user_roles}')
|
||||
req['user_id'] = user_id.strip()
|
||||
req['roles'] = user_roles
|
||||
logger.info(f" got {user_id} roles: {user_roles}")
|
||||
req["user_id"] = user_id.strip()
|
||||
req["roles"] = user_roles
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
|
|
@ -13,32 +13,33 @@ from services.logger import root_logger as logger
|
|||
from settings import DB_URL
|
||||
|
||||
# Создание региона кэша с TTL 300 секунд
|
||||
cache_region = make_region().configure(
|
||||
'dogpile.cache.memory',
|
||||
expiration_time=300
|
||||
)
|
||||
cache_region = make_region().configure("dogpile.cache.memory", expiration_time=300)
|
||||
|
||||
# Подключение к базе данных SQLAlchemy
|
||||
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
|
||||
T = TypeVar('T')
|
||||
T = TypeVar("T")
|
||||
REGISTRY: Dict[str, type] = {}
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
# Перехватчики для журнала запросов SQLAlchemy
|
||||
@event.listens_for(Engine, 'before_cursor_execute')
|
||||
@event.listens_for(Engine, "before_cursor_execute")
|
||||
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
conn.info.setdefault('query_start_time', []).append(time.time())
|
||||
conn.info.setdefault("query_start_time", []).append(time.time())
|
||||
|
||||
@event.listens_for(Engine, 'after_cursor_execute')
|
||||
|
||||
@event.listens_for(Engine, "after_cursor_execute")
|
||||
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
total = time.time() - conn.info['query_start_time'].pop(-1)
|
||||
stars = '*' * math.floor(total*1000)
|
||||
total = time.time() - conn.info["query_start_time"].pop(-1)
|
||||
stars = "*" * math.floor(total * 1000)
|
||||
if stars:
|
||||
logger.debug(f'{statement}\n{stars} {total*1000} s\n')
|
||||
logger.debug(f"{statement}\n{stars} {total*1000} s\n")
|
||||
|
||||
def local_session(src=''):
|
||||
|
||||
def local_session(src=""):
|
||||
return Session(bind=engine, expire_on_commit=False)
|
||||
|
||||
|
||||
class Base(declarative_base()):
|
||||
__table__: Table
|
||||
__tablename__: str
|
||||
|
@ -46,7 +47,7 @@ class Base(declarative_base()):
|
|||
__init__: Callable
|
||||
__allow_unmapped__ = True
|
||||
__abstract__ = True
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
|
@ -55,12 +56,12 @@ class Base(declarative_base()):
|
|||
|
||||
def dict(self) -> Dict[str, Any]:
|
||||
column_names = self.__table__.columns.keys()
|
||||
if '_sa_instance_state' in column_names:
|
||||
column_names.remove('_sa_instance_state')
|
||||
if "_sa_instance_state" in column_names:
|
||||
column_names.remove("_sa_instance_state")
|
||||
try:
|
||||
return {c: getattr(self, c) for c in column_names}
|
||||
except Exception as e:
|
||||
logger.error(f'Error occurred while converting object to dictionary: {e}')
|
||||
logger.error(f"Error occurred while converting object to dictionary: {e}")
|
||||
return {}
|
||||
|
||||
def update(self, values: Dict[str, Any]) -> None:
|
||||
|
@ -68,6 +69,7 @@ class Base(declarative_base()):
|
|||
if hasattr(self, key):
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
# Декоратор для кэширования методов
|
||||
def cache_method(cache_key: str):
|
||||
def decorator(f):
|
||||
|
@ -82,5 +84,7 @@ def cache_method(cache_key: str):
|
|||
result = f(*args, **kwargs)
|
||||
cache_region.set(key, result)
|
||||
return result
|
||||
|
||||
return decorated_function
|
||||
|
||||
return decorator
|
||||
|
|
|
@ -29,19 +29,19 @@ def apply_diff(original, diff):
|
|||
The modified string.
|
||||
"""
|
||||
result = []
|
||||
pattern = re.compile(r'^(\+|-) ')
|
||||
pattern = re.compile(r"^(\+|-) ")
|
||||
|
||||
for line in diff:
|
||||
match = pattern.match(line)
|
||||
if match:
|
||||
op = match.group(1)
|
||||
content = line[2:]
|
||||
if op == '+':
|
||||
if op == "+":
|
||||
result.append(content)
|
||||
elif op == '-':
|
||||
elif op == "-":
|
||||
# Ignore deleted lines
|
||||
pass
|
||||
else:
|
||||
result.append(line)
|
||||
|
||||
return ' '.join(result)
|
||||
return " ".join(result)
|
||||
|
|
|
@ -3,19 +3,19 @@ import colorlog
|
|||
|
||||
# Define the color scheme
|
||||
color_scheme = {
|
||||
'DEBUG': 'light_black',
|
||||
'INFO': 'green',
|
||||
'WARNING': 'yellow',
|
||||
'ERROR': 'red',
|
||||
'CRITICAL': 'red,bg_white',
|
||||
"DEBUG": "light_black",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red,bg_white",
|
||||
}
|
||||
|
||||
# Define secondary log colors
|
||||
secondary_colors = {
|
||||
'log_name': {'DEBUG': 'blue'},
|
||||
'asctime': {'DEBUG': 'cyan'},
|
||||
'process': {'DEBUG': 'purple'},
|
||||
'module': {'DEBUG': 'light_black,bg_blue'},
|
||||
"log_name": {"DEBUG": "blue"},
|
||||
"asctime": {"DEBUG": "cyan"},
|
||||
"process": {"DEBUG": "purple"},
|
||||
"module": {"DEBUG": "light_black,bg_blue"},
|
||||
}
|
||||
|
||||
# Define the log format string
|
||||
|
@ -23,30 +23,30 @@ fmt_string = "%(log_color)s%(levelname)s: %(log_color)s[%(module)s]%(reset)s %(w
|
|||
|
||||
# Define formatting configuration
|
||||
fmt_config = {
|
||||
'log_colors': color_scheme,
|
||||
'secondary_log_colors': secondary_colors,
|
||||
'style': '%',
|
||||
'reset': True
|
||||
"log_colors": color_scheme,
|
||||
"secondary_log_colors": secondary_colors,
|
||||
"style": "%",
|
||||
"reset": True,
|
||||
}
|
||||
|
||||
|
||||
class MultilineColoredFormatter(colorlog.ColoredFormatter):
|
||||
def format(self, record):
|
||||
# Check if the message is multiline
|
||||
if record.getMessage() and '\n' in record.getMessage():
|
||||
if record.getMessage() and "\n" in record.getMessage():
|
||||
# Split the message into lines
|
||||
lines = record.getMessage().split('\n')
|
||||
lines = record.getMessage().split("\n")
|
||||
formatted_lines = []
|
||||
for line in lines:
|
||||
# Format each line with the provided format
|
||||
formatted_lines.append(super().format(record))
|
||||
# Join the formatted lines
|
||||
return '\n'.join(formatted_lines)
|
||||
return "\n".join(formatted_lines)
|
||||
else:
|
||||
# If not multiline or no message, use the default formatting
|
||||
return super().format(record)
|
||||
|
||||
|
||||
|
||||
# Create a MultilineColoredFormatter object for colorized logging
|
||||
formatter = MultilineColoredFormatter(fmt_string, **fmt_config)
|
||||
|
||||
|
@ -55,8 +55,7 @@ stream = logging.StreamHandler()
|
|||
stream.setFormatter(formatter)
|
||||
|
||||
|
||||
def get_colorful_logger(name='main'):
|
||||
|
||||
def get_colorful_logger(name="main"):
|
||||
# Create and configure the logger
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
@ -64,6 +63,7 @@ def get_colorful_logger(name='main'):
|
|||
|
||||
return logger
|
||||
|
||||
|
||||
# Set up the root logger with the same formatting
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
|
|
@ -3,43 +3,43 @@ import json
|
|||
from services.rediscache import redis
|
||||
|
||||
|
||||
async def notify_reaction(reaction, action: str = 'create'):
|
||||
channel_name = 'reaction'
|
||||
data = {'payload': reaction, 'action': action}
|
||||
async def notify_reaction(reaction, action: str = "create"):
|
||||
channel_name = "reaction"
|
||||
data = {"payload": reaction, "action": action}
|
||||
try:
|
||||
await redis.publish(channel_name, json.dumps(data))
|
||||
except Exception as e:
|
||||
print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
|
||||
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}")
|
||||
|
||||
|
||||
async def notify_shout(shout, action: str = 'update'):
|
||||
channel_name = 'shout'
|
||||
data = {'payload': shout, 'action': action}
|
||||
async def notify_shout(shout, action: str = "update"):
|
||||
channel_name = "shout"
|
||||
data = {"payload": shout, "action": action}
|
||||
try:
|
||||
await redis.publish(channel_name, json.dumps(data))
|
||||
except Exception as e:
|
||||
print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
|
||||
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}")
|
||||
|
||||
|
||||
async def notify_follower(follower: dict, author_id: int, action: str = 'follow'):
|
||||
channel_name = f'follower:{author_id}'
|
||||
async def notify_follower(follower: dict, author_id: int, action: str = "follow"):
|
||||
channel_name = f"follower:{author_id}"
|
||||
try:
|
||||
# Simplify dictionary before publishing
|
||||
simplified_follower = {k: follower[k] for k in ['id', 'name', 'slug', 'pic']}
|
||||
simplified_follower = {k: follower[k] for k in ["id", "name", "slug", "pic"]}
|
||||
|
||||
data = {'payload': simplified_follower, 'action': action}
|
||||
data = {"payload": simplified_follower, "action": action}
|
||||
|
||||
# Convert data to JSON string
|
||||
json_data = json.dumps(data)
|
||||
|
||||
# Ensure the data is not empty before publishing
|
||||
if not json_data:
|
||||
raise ValueError('Empty data to publish.')
|
||||
raise ValueError("Empty data to publish.")
|
||||
|
||||
# Use the 'await' keyword when publishing
|
||||
await redis.publish(channel_name, json_data)
|
||||
|
||||
except Exception as e:
|
||||
# Log the error and re-raise it
|
||||
print(f'[services.notify] Failed to publish to channel {channel_name}: {e}')
|
||||
print(f"[services.notify] Failed to publish to channel {channel_name}: {e}")
|
||||
raise
|
||||
|
|
|
@ -4,7 +4,6 @@ from services.logger import root_logger as logger
|
|||
from settings import REDIS_URL
|
||||
|
||||
|
||||
|
||||
class RedisCache:
|
||||
def __init__(self, uri=REDIS_URL):
|
||||
self._uri: str = uri
|
||||
|
@ -21,7 +20,7 @@ class RedisCache:
|
|||
async def execute(self, command, *args, **kwargs):
|
||||
if self._client:
|
||||
try:
|
||||
logger.debug(f'{command} {args} {kwargs}')
|
||||
logger.debug(f"{command} {args} {kwargs}")
|
||||
r = await self._client.execute_command(command, *args, **kwargs)
|
||||
logger.debug(type(r))
|
||||
logger.debug(r)
|
||||
|
@ -52,4 +51,4 @@ class RedisCache:
|
|||
|
||||
redis = RedisCache()
|
||||
|
||||
__all__ = ['redis']
|
||||
__all__ = ["redis"]
|
||||
|
|
|
@ -7,67 +7,69 @@ from opensearchpy import OpenSearch
|
|||
from services.logger import root_logger as logger
|
||||
from services.rediscache import redis
|
||||
|
||||
ELASTIC_HOST = os.environ.get('ELASTIC_HOST', '').replace('https://', '')
|
||||
ELASTIC_USER = os.environ.get('ELASTIC_USER', '')
|
||||
ELASTIC_PASSWORD = os.environ.get('ELASTIC_PASSWORD', '')
|
||||
ELASTIC_PORT = os.environ.get('ELASTIC_PORT', 9200)
|
||||
ELASTIC_AUTH = f'{ELASTIC_USER}:{ELASTIC_PASSWORD}' if ELASTIC_USER else ''
|
||||
ELASTIC_URL = os.environ.get('ELASTIC_URL', f'https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}')
|
||||
ELASTIC_HOST = os.environ.get("ELASTIC_HOST", "").replace("https://", "")
|
||||
ELASTIC_USER = os.environ.get("ELASTIC_USER", "")
|
||||
ELASTIC_PASSWORD = os.environ.get("ELASTIC_PASSWORD", "")
|
||||
ELASTIC_PORT = os.environ.get("ELASTIC_PORT", 9200)
|
||||
ELASTIC_AUTH = f"{ELASTIC_USER}:{ELASTIC_PASSWORD}" if ELASTIC_USER else ""
|
||||
ELASTIC_URL = os.environ.get(
|
||||
"ELASTIC_URL", f"https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}"
|
||||
)
|
||||
REDIS_TTL = 86400 # 1 day in seconds
|
||||
|
||||
|
||||
index_settings = {
|
||||
'settings': {
|
||||
'index': {
|
||||
'number_of_shards': 1,
|
||||
'auto_expand_replicas': '0-all',
|
||||
"settings": {
|
||||
"index": {
|
||||
"number_of_shards": 1,
|
||||
"auto_expand_replicas": "0-all",
|
||||
},
|
||||
'analysis': {
|
||||
'analyzer': {
|
||||
'ru': {
|
||||
'tokenizer': 'standard',
|
||||
'filter': ['lowercase', 'ru_stop', 'ru_stemmer'],
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"ru": {
|
||||
"tokenizer": "standard",
|
||||
"filter": ["lowercase", "ru_stop", "ru_stemmer"],
|
||||
}
|
||||
},
|
||||
'filter': {
|
||||
'ru_stemmer': {
|
||||
'type': 'stemmer',
|
||||
'language': 'russian',
|
||||
"filter": {
|
||||
"ru_stemmer": {
|
||||
"type": "stemmer",
|
||||
"language": "russian",
|
||||
},
|
||||
'ru_stop': {
|
||||
'type': 'stop',
|
||||
'stopwords': '_russian_',
|
||||
"ru_stop": {
|
||||
"type": "stop",
|
||||
"stopwords": "_russian_",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'mappings': {
|
||||
'properties': {
|
||||
'body': {'type': 'text', 'analyzer': 'ru'},
|
||||
'title': {'type': 'text', 'analyzer': 'ru'},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"body": {"type": "text", "analyzer": "ru"},
|
||||
"title": {"type": "text", "analyzer": "ru"},
|
||||
# 'author': {'type': 'text'},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
expected_mapping = index_settings['mappings']
|
||||
expected_mapping = index_settings["mappings"]
|
||||
|
||||
|
||||
class SearchService:
|
||||
def __init__(self, index_name='search_index'):
|
||||
def __init__(self, index_name="search_index"):
|
||||
self.index_name = index_name
|
||||
self.manager = Manager()
|
||||
self.client = None
|
||||
|
||||
# Используем менеджер для создания Lock и Value
|
||||
self.lock = self.manager.Lock()
|
||||
self.initialized_flag = self.manager.Value('i', 0)
|
||||
self.initialized_flag = self.manager.Value("i", 0)
|
||||
|
||||
# Only initialize the instance if it's not already initialized
|
||||
if not self.initialized_flag.value and ELASTIC_HOST:
|
||||
try:
|
||||
self.client = OpenSearch(
|
||||
hosts=[{'host': ELASTIC_HOST, 'port': ELASTIC_PORT}],
|
||||
hosts=[{"host": ELASTIC_HOST, "port": ELASTIC_PORT}],
|
||||
http_compress=True,
|
||||
http_auth=(ELASTIC_USER, ELASTIC_PASSWORD),
|
||||
use_ssl=True,
|
||||
|
@ -76,46 +78,50 @@ class SearchService:
|
|||
ssl_show_warn=False,
|
||||
# ca_certs = ca_certs_path
|
||||
)
|
||||
logger.info(' Клиент OpenSearch.org подключен')
|
||||
logger.info(" Клиент OpenSearch.org подключен")
|
||||
if self.lock.acquire(blocking=False):
|
||||
try:
|
||||
self.check_index()
|
||||
finally:
|
||||
self.lock.release()
|
||||
else:
|
||||
logger.debug(' проверка пропущена')
|
||||
logger.debug(" проверка пропущена")
|
||||
except Exception as exc:
|
||||
logger.error(f' {exc}')
|
||||
logger.error(f" {exc}")
|
||||
self.client = None
|
||||
|
||||
def info(self):
|
||||
if isinstance(self.client, OpenSearch):
|
||||
logger.info(' Поиск подключен') # : {self.client.info()}')
|
||||
logger.info(" Поиск подключен") # : {self.client.info()}')
|
||||
else:
|
||||
logger.info(' * Задайте переменные среды для подключения к серверу поиска')
|
||||
logger.info(" * Задайте переменные среды для подключения к серверу поиска")
|
||||
|
||||
def delete_index(self):
|
||||
if self.client:
|
||||
logger.debug(f' Удаляем индекс {self.index_name}')
|
||||
logger.debug(f" Удаляем индекс {self.index_name}")
|
||||
self.client.indices.delete(index=self.index_name, ignore_unavailable=True)
|
||||
|
||||
def create_index(self):
|
||||
if self.client:
|
||||
if self.lock.acquire(blocking=False):
|
||||
try:
|
||||
logger.debug(f' Создаём новый индекс: {self.index_name} ')
|
||||
self.client.indices.create(index=self.index_name, body=index_settings)
|
||||
logger.debug(f" Создаём новый индекс: {self.index_name} ")
|
||||
self.client.indices.create(
|
||||
index=self.index_name, body=index_settings
|
||||
)
|
||||
self.client.indices.close(index=self.index_name)
|
||||
self.client.indices.open(index=self.index_name)
|
||||
finally:
|
||||
self.lock.release()
|
||||
else:
|
||||
logger.debug(' ..')
|
||||
logger.debug(" ..")
|
||||
|
||||
def put_mapping(self):
|
||||
if self.client:
|
||||
logger.debug(f' Разметка индекации {self.index_name}')
|
||||
self.client.indices.put_mapping(index=self.index_name, body=expected_mapping)
|
||||
logger.debug(f" Разметка индекации {self.index_name}")
|
||||
self.client.indices.put_mapping(
|
||||
index=self.index_name, body=expected_mapping
|
||||
)
|
||||
|
||||
def check_index(self):
|
||||
if self.client:
|
||||
|
@ -136,34 +142,36 @@ class SearchService:
|
|||
finally:
|
||||
self.lock.release()
|
||||
else:
|
||||
logger.debug(' ..')
|
||||
logger.debug(" ..")
|
||||
|
||||
def index(self, shout):
|
||||
if self.client:
|
||||
id_ = str(shout.id)
|
||||
logger.debug(f' Индексируем пост {id_}')
|
||||
logger.debug(f" Индексируем пост {id_}")
|
||||
self.client.index(index=self.index_name, id=id_, body=shout.dict())
|
||||
|
||||
async def search(self, text, limit, offset):
|
||||
logger.debug(f' Ищем: {text}')
|
||||
logger.debug(f" Ищем: {text}")
|
||||
search_body = {
|
||||
'query': {'match': {'_all': text}},
|
||||
"query": {"match": {"_all": text}},
|
||||
}
|
||||
if self.client:
|
||||
search_response = self.client.search(index=self.index_name, body=search_body, size=limit, from_=offset)
|
||||
hits = search_response['hits']['hits']
|
||||
search_response = self.client.search(
|
||||
index=self.index_name, body=search_body, size=limit, from_=offset
|
||||
)
|
||||
hits = search_response["hits"]["hits"]
|
||||
|
||||
results = [
|
||||
{
|
||||
**hit['_source'],
|
||||
'score': hit['_score'],
|
||||
**hit["_source"],
|
||||
"score": hit["_score"],
|
||||
}
|
||||
for hit in hits
|
||||
]
|
||||
|
||||
# Use Redis as cache with TTL
|
||||
redis_key = f'search:{text}'
|
||||
await redis.execute('SETEX', redis_key, REDIS_TTL, json.dumps(results))
|
||||
redis_key = f"search:{text}"
|
||||
await redis.execute("SETEX", redis_key, REDIS_TTL, json.dumps(results))
|
||||
return []
|
||||
|
||||
|
||||
|
|
|
@ -22,9 +22,9 @@ def start_sentry():
|
|||
integrations=[
|
||||
StarletteIntegration(),
|
||||
AriadneIntegration(),
|
||||
SqlalchemyIntegration()
|
||||
]
|
||||
SqlalchemyIntegration(),
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
print('[services.sentry] init error')
|
||||
print("[services.sentry] init error")
|
||||
print(e)
|
||||
|
|
|
@ -4,7 +4,7 @@ from services.rediscache import redis
|
|||
|
||||
|
||||
async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
||||
r = await redis.execute('LLEN', f'chats/{chat_id}/unread/{author_id}')
|
||||
r = await redis.execute("LLEN", f"chats/{chat_id}/unread/{author_id}")
|
||||
if isinstance(r, str):
|
||||
return int(r)
|
||||
elif isinstance(r, int):
|
||||
|
@ -14,7 +14,7 @@ async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
|||
|
||||
|
||||
async def get_total_unread_counter(author_id: int) -> int:
|
||||
chats_set = await redis.execute('SMEMBERS', f'chats_by_author/{author_id}')
|
||||
chats_set = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
|
||||
s = 0
|
||||
if isinstance(chats_set, str):
|
||||
chats_set = json.loads(chats_set)
|
||||
|
|
|
@ -7,7 +7,12 @@ from typing import Dict
|
|||
|
||||
# ga
|
||||
from google.analytics.data_v1beta import BetaAnalyticsDataClient
|
||||
from google.analytics.data_v1beta.types import DateRange, Dimension, Metric, RunReportRequest
|
||||
from google.analytics.data_v1beta.types import (
|
||||
DateRange,
|
||||
Dimension,
|
||||
Metric,
|
||||
RunReportRequest,
|
||||
)
|
||||
|
||||
from orm.author import Author
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
|
@ -15,9 +20,9 @@ from orm.topic import Topic
|
|||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
|
||||
GOOGLE_KEYFILE_PATH = os.environ.get('GOOGLE_KEYFILE_PATH', '/dump/google-service.json')
|
||||
GOOGLE_PROPERTY_ID = os.environ.get('GOOGLE_PROPERTY_ID', '')
|
||||
VIEWS_FILEPATH = '/dump/views.json'
|
||||
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json")
|
||||
GOOGLE_PROPERTY_ID = os.environ.get("GOOGLE_PROPERTY_ID", "")
|
||||
VIEWS_FILEPATH = "/dump/views.json"
|
||||
|
||||
|
||||
class ViewedStorage:
|
||||
|
@ -37,30 +42,32 @@ class ViewedStorage:
|
|||
"""Подключение к клиенту Google Analytics с использованием аутентификации"""
|
||||
self = ViewedStorage
|
||||
async with self.lock:
|
||||
os.environ.setdefault('GOOGLE_APPLICATION_CREDENTIALS', GOOGLE_KEYFILE_PATH)
|
||||
os.environ.setdefault("GOOGLE_APPLICATION_CREDENTIALS", GOOGLE_KEYFILE_PATH)
|
||||
if GOOGLE_KEYFILE_PATH and os.path.isfile(GOOGLE_KEYFILE_PATH):
|
||||
# Using a default constructor instructs the client to use the credentials
|
||||
# specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
|
||||
self.analytics_client = BetaAnalyticsDataClient()
|
||||
logger.info(' * Клиент Google Analytics успешно авторизован')
|
||||
logger.info(" * Клиент Google Analytics успешно авторизован")
|
||||
|
||||
# Загрузка предварительно подсчитанных просмотров из файла JSON
|
||||
self.load_precounted_views()
|
||||
|
||||
if os.path.exists(VIEWS_FILEPATH):
|
||||
file_timestamp = os.path.getctime(VIEWS_FILEPATH)
|
||||
self.start_date = datetime.fromtimestamp(file_timestamp).strftime('%Y-%m-%d')
|
||||
now_date = datetime.now().strftime('%Y-%m-%d')
|
||||
self.start_date = datetime.fromtimestamp(file_timestamp).strftime(
|
||||
"%Y-%m-%d"
|
||||
)
|
||||
now_date = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
if now_date == self.start_date:
|
||||
logger.info(' * Данные актуализованы!')
|
||||
logger.info(" * Данные актуализованы!")
|
||||
else:
|
||||
logger.info(f' * Миграция проводилась: {self.start_date}')
|
||||
logger.info(f" * Миграция проводилась: {self.start_date}")
|
||||
|
||||
# Запуск фоновой задачи
|
||||
asyncio.create_task(self.worker())
|
||||
else:
|
||||
logger.info(' * Пожалуйста, добавьте ключевой файл Google Analytics')
|
||||
logger.info(" * Пожалуйста, добавьте ключевой файл Google Analytics")
|
||||
self.disabled = True
|
||||
|
||||
@staticmethod
|
||||
|
@ -68,28 +75,32 @@ class ViewedStorage:
|
|||
"""Загрузка предварительно подсчитанных просмотров из файла JSON"""
|
||||
self = ViewedStorage
|
||||
try:
|
||||
with open(VIEWS_FILEPATH, 'r') as file:
|
||||
with open(VIEWS_FILEPATH, "r") as file:
|
||||
precounted_views = json.load(file)
|
||||
self.views_by_shout.update(precounted_views)
|
||||
logger.info(f' * {len(precounted_views)} публикаций с просмотрами успешно загружены.')
|
||||
logger.info(
|
||||
f" * {len(precounted_views)} публикаций с просмотрами успешно загружены."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f'Ошибка загрузки предварительно подсчитанных просмотров: {e}')
|
||||
logger.error(f"Ошибка загрузки предварительно подсчитанных просмотров: {e}")
|
||||
|
||||
@staticmethod
|
||||
async def update_pages():
|
||||
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров"""
|
||||
self = ViewedStorage
|
||||
logger.info(' ⎧ Обновление данных просмотров от Google Analytics ---')
|
||||
logger.info(" ⎧ Обновление данных просмотров от Google Analytics ---")
|
||||
if not self.disabled:
|
||||
try:
|
||||
start = time.time()
|
||||
async with self.lock:
|
||||
if self.analytics_client:
|
||||
request = RunReportRequest(
|
||||
property=f'properties/{GOOGLE_PROPERTY_ID}',
|
||||
dimensions=[Dimension(name='pagePath')],
|
||||
metrics=[Metric(name='screenPageViews')],
|
||||
date_ranges=[DateRange(start_date=self.start_date, end_date='today')],
|
||||
property=f"properties/{GOOGLE_PROPERTY_ID}",
|
||||
dimensions=[Dimension(name="pagePath")],
|
||||
metrics=[Metric(name="screenPageViews")],
|
||||
date_ranges=[
|
||||
DateRange(start_date=self.start_date, end_date="today")
|
||||
],
|
||||
)
|
||||
response = self.analytics_client.run_report(request)
|
||||
if response and isinstance(response.rows, list):
|
||||
|
@ -102,21 +113,23 @@ class ViewedStorage:
|
|||
# Извлечение путей страниц из ответа Google Analytics
|
||||
if isinstance(row.dimension_values, list):
|
||||
page_path = row.dimension_values[0].value
|
||||
slug = page_path.split('discours.io/')[-1]
|
||||
slug = page_path.split("discours.io/")[-1]
|
||||
views_count = int(row.metric_values[0].value)
|
||||
|
||||
# Обновление данных в хранилище
|
||||
self.views_by_shout[slug] = self.views_by_shout.get(slug, 0)
|
||||
self.views_by_shout[slug] = self.views_by_shout.get(
|
||||
slug, 0
|
||||
)
|
||||
self.views_by_shout[slug] += views_count
|
||||
self.update_topics(slug)
|
||||
|
||||
# Запись путей страниц для логирования
|
||||
slugs.add(slug)
|
||||
|
||||
logger.info(f' ⎪ Собрано страниц: {len(slugs)} ')
|
||||
logger.info(f" ⎪ Собрано страниц: {len(slugs)} ")
|
||||
|
||||
end = time.time()
|
||||
logger.info(' ⎪ Обновление страниц заняло %fs ' % (end - start))
|
||||
logger.info(" ⎪ Обновление страниц заняло %fs " % (end - start))
|
||||
except Exception as error:
|
||||
logger.error(error)
|
||||
|
||||
|
@ -165,12 +178,20 @@ class ViewedStorage:
|
|||
|
||||
# Обновление тем и авторов с использованием вспомогательной функции
|
||||
for [_shout_topic, topic] in (
|
||||
session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(Shout.slug == shout_slug).all()
|
||||
session.query(ShoutTopic, Topic)
|
||||
.join(Topic)
|
||||
.join(Shout)
|
||||
.where(Shout.slug == shout_slug)
|
||||
.all()
|
||||
):
|
||||
update_groups(self.shouts_by_topic, topic.slug, shout_slug)
|
||||
|
||||
for [_shout_topic, author] in (
|
||||
session.query(ShoutAuthor, Author).join(Author).join(Shout).where(Shout.slug == shout_slug).all()
|
||||
session.query(ShoutAuthor, Author)
|
||||
.join(Author)
|
||||
.join(Shout)
|
||||
.where(Shout.slug == shout_slug)
|
||||
.all()
|
||||
):
|
||||
update_groups(self.shouts_by_author, author.slug, shout_slug)
|
||||
|
||||
|
@ -188,15 +209,18 @@ class ViewedStorage:
|
|||
failed = 0
|
||||
except Exception as _exc:
|
||||
failed += 1
|
||||
logger.info(' - Обновление не удалось #%d, ожидание 10 секунд' % failed)
|
||||
logger.info(" - Обновление не удалось #%d, ожидание 10 секунд" % failed)
|
||||
if failed > 3:
|
||||
logger.info(' - Больше не пытаемся обновить')
|
||||
logger.info(" - Больше не пытаемся обновить")
|
||||
break
|
||||
if failed == 0:
|
||||
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
||||
t = format(when.astimezone().isoformat())
|
||||
logger.info(' ⎩ Следующее обновление: %s' % (t.split('T')[0] + ' ' + t.split('T')[1].split('.')[0]))
|
||||
logger.info(
|
||||
" ⎩ Следующее обновление: %s"
|
||||
% (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
|
||||
)
|
||||
await asyncio.sleep(self.period)
|
||||
else:
|
||||
await asyncio.sleep(10)
|
||||
logger.info(' - Попытка снова обновить данные')
|
||||
logger.info(" - Попытка снова обновить данные")
|
||||
|
|
|
@ -15,22 +15,26 @@ class WebhookEndpoint(HTTPEndpoint):
|
|||
try:
|
||||
data = await request.json()
|
||||
if data:
|
||||
auth = request.headers.get('Authorization')
|
||||
auth = request.headers.get("Authorization")
|
||||
if auth:
|
||||
if auth == os.environ.get('WEBHOOK_SECRET'):
|
||||
user_id: str = data['user']['id']
|
||||
name: str = data['user']['given_name']
|
||||
slug: str = data['user']['email'].split('@')[0]
|
||||
slug: str = re.sub('[^0-9a-z]+', '-', slug.lower())
|
||||
if auth == os.environ.get("WEBHOOK_SECRET"):
|
||||
user_id: str = data["user"]["id"]
|
||||
name: str = data["user"]["given_name"]
|
||||
slug: str = data["user"]["email"].split("@")[0]
|
||||
slug: str = re.sub("[^0-9a-z]+", "-", slug.lower())
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.slug == slug).first()
|
||||
author = (
|
||||
session.query(Author)
|
||||
.filter(Author.slug == slug)
|
||||
.first()
|
||||
)
|
||||
if author:
|
||||
slug = slug + '-' + user_id.split('-').pop()
|
||||
slug = slug + "-" + user_id.split("-").pop()
|
||||
await create_author(user_id, slug, name)
|
||||
|
||||
return JSONResponse({'status': 'success'})
|
||||
return JSONResponse({"status": "success"})
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
return JSONResponse({'status': 'error', 'message': str(e)}, status_code=500)
|
||||
return JSONResponse({"status": "error", "message": str(e)}, status_code=500)
|
||||
|
|
20
settings.py
20
settings.py
|
@ -3,15 +3,15 @@ from os import environ
|
|||
|
||||
PORT = 8080
|
||||
DB_URL = (
|
||||
environ.get('DATABASE_URL', '').replace('postgres://', 'postgresql://')
|
||||
or environ.get('DB_URL', '').replace('postgres://', 'postgresql://')
|
||||
or 'postgresql://postgres@localhost:5432/discoursio'
|
||||
environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://")
|
||||
or environ.get("DB_URL", "").replace("postgres://", "postgresql://")
|
||||
or "postgresql://postgres@localhost:5432/discoursio"
|
||||
)
|
||||
REDIS_URL = environ.get('REDIS_URL') or 'redis://127.0.0.1'
|
||||
API_BASE = environ.get('API_BASE') or ''
|
||||
AUTH_URL = environ.get('AUTH_URL') or ''
|
||||
SENTRY_DSN = environ.get('SENTRY_DSN')
|
||||
DEV_SERVER_PID_FILE_NAME = 'dev-server.pid'
|
||||
MODE = 'development' if 'dev' in sys.argv else 'production'
|
||||
REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1"
|
||||
API_BASE = environ.get("API_BASE") or ""
|
||||
AUTH_URL = environ.get("AUTH_URL") or ""
|
||||
SENTRY_DSN = environ.get("SENTRY_DSN")
|
||||
DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
|
||||
MODE = "development" if "dev" in sys.argv else "production"
|
||||
|
||||
ADMIN_SECRET = environ.get('AUTH_SECRET') or 'nothing'
|
||||
ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing"
|
||||
|
|
Loading…
Reference in New Issue
Block a user