Revert "Feature/lint"

This commit is contained in:
Kosta
2023-10-27 00:07:35 +03:00
committed by GitHub
parent 05136699ee
commit b142949805
70 changed files with 1465 additions and 1223 deletions

View File

@@ -18,7 +18,12 @@ class Following:
class FollowingManager:
lock = asyncio.Lock()
data = {"author": [], "topic": [], "shout": [], "chat": []}
data = {
'author': [],
'topic': [],
'shout': [],
'chat': []
}
@staticmethod
async def register(kind, uid):
@@ -34,13 +39,13 @@ class FollowingManager:
async def push(kind, payload):
try:
async with FollowingManager.lock:
if kind == "chat":
for chat in FollowingManager["chat"]:
if kind == 'chat':
for chat in FollowingManager['chat']:
if payload.message["chatId"] == chat.uid:
chat.queue.put_nowait(payload)
else:
for entity in FollowingManager[kind]:
if payload.shout["createdBy"] == entity.uid:
if payload.shout['createdBy'] == entity.uid:
entity.queue.put_nowait(payload)
except Exception as e:
print(Exception(e))

View File

@@ -1,13 +1,13 @@
from base.orm import local_session
from services.search import SearchService
from services.stat.viewed import ViewedStorage
from base.orm import local_session
async def storages_init():
with local_session() as session:
print("[main] initialize SearchService")
print('[main] initialize SearchService')
await SearchService.init(session)
print("[main] SearchService initialized")
print("[main] initialize storages")
print('[main] SearchService initialized')
print('[main] initialize storages')
await ViewedStorage.init()
print("[main] storages initialized")
print('[main] storages initialized')

View File

@@ -1,17 +1,21 @@
from base.orm import local_session
import asyncio
import json
from datetime import datetime, timezone
from orm import Notification, Reaction, Shout, User
from sqlalchemy import and_
from base.orm import local_session
from orm import Reaction, Shout, Notification, User
from orm.notification import NotificationType
from orm.reaction import ReactionKind
from services.notifications.sse import connection_manager
from sqlalchemy import and_
import asyncio
import json
def shout_to_shout_data(shout):
return {"title": shout.title, "slug": shout.slug}
return {
"title": shout.title,
"slug": shout.slug
}
def user_to_user_data(user):
@@ -19,14 +23,14 @@ def user_to_user_data(user):
"id": user.id,
"name": user.name,
"slug": user.slug,
"userpic": user.userpic,
"userpic": user.userpic
}
def update_prev_notification(notification, user, reaction):
notification_data = json.loads(notification.data)
notification_data["users"] = [u for u in notification_data["users"] if u["id"] != user.id]
notification_data["users"] = [u for u in notification_data["users"] if u['id'] != user.id]
notification_data["users"].append(user_to_user_data(user))
if notification_data["reactionIds"] is None:
@@ -53,45 +57,34 @@ class NewReactionNotificator:
if reaction.kind == ReactionKind.COMMENT:
parent_reaction = None
if reaction.replyTo:
parent_reaction = (
session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
)
parent_reaction = session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
if parent_reaction.createdBy != reaction.createdBy:
prev_new_reply_notification = (
session.query(Notification)
.where(
and_(
Notification.user == shout.createdBy,
Notification.type == NotificationType.NEW_REPLY,
Notification.shout == shout.id,
Notification.reaction == parent_reaction.id,
Notification.seen == False, # noqa: E712
)
prev_new_reply_notification = session.query(Notification).where(
and_(
Notification.user == shout.createdBy,
Notification.type == NotificationType.NEW_REPLY,
Notification.shout == shout.id,
Notification.reaction == parent_reaction.id,
Notification.seen == False
)
.first()
)
).first()
if prev_new_reply_notification:
update_prev_notification(prev_new_reply_notification, user, reaction)
else:
reply_notification_data = json.dumps(
{
"shout": shout_to_shout_data(shout),
"users": [user_to_user_data(user)],
"reactionIds": [reaction.id],
},
ensure_ascii=False,
)
reply_notification_data = json.dumps({
"shout": shout_to_shout_data(shout),
"users": [user_to_user_data(user)],
"reactionIds": [reaction.id]
}, ensure_ascii=False)
reply_notification = Notification.create(
**{
"user": parent_reaction.createdBy,
"type": NotificationType.NEW_REPLY,
"shout": shout.id,
"reaction": parent_reaction.id,
"data": reply_notification_data,
}
)
reply_notification = Notification.create(**{
"user": parent_reaction.createdBy,
"type": NotificationType.NEW_REPLY,
"shout": shout.id,
"reaction": parent_reaction.id,
"data": reply_notification_data
})
session.add(reply_notification)
@@ -100,39 +93,30 @@ class NewReactionNotificator:
if reaction.createdBy != shout.createdBy and (
parent_reaction is None or parent_reaction.createdBy != shout.createdBy
):
prev_new_comment_notification = (
session.query(Notification)
.where(
and_(
Notification.user == shout.createdBy,
Notification.type == NotificationType.NEW_COMMENT,
Notification.shout == shout.id,
Notification.seen == False, # noqa: E712
)
prev_new_comment_notification = session.query(Notification).where(
and_(
Notification.user == shout.createdBy,
Notification.type == NotificationType.NEW_COMMENT,
Notification.shout == shout.id,
Notification.seen == False
)
.first()
)
).first()
if prev_new_comment_notification:
update_prev_notification(prev_new_comment_notification, user, reaction)
else:
notification_data_string = json.dumps(
{
"shout": shout_to_shout_data(shout),
"users": [user_to_user_data(user)],
"reactionIds": [reaction.id],
},
ensure_ascii=False,
)
notification_data_string = json.dumps({
"shout": shout_to_shout_data(shout),
"users": [user_to_user_data(user)],
"reactionIds": [reaction.id]
}, ensure_ascii=False)
author_notification = Notification.create(
**{
"user": shout.createdBy,
"type": NotificationType.NEW_COMMENT,
"shout": shout.id,
"data": notification_data_string,
}
)
author_notification = Notification.create(**{
"user": shout.createdBy,
"type": NotificationType.NEW_COMMENT,
"shout": shout.id,
"data": notification_data_string
})
session.add(author_notification)
@@ -158,7 +142,7 @@ class NotificationService:
try:
await notificator.run()
except Exception as e:
print(f"[NotificationService.worker] error: {str(e)}")
print(f'[NotificationService.worker] error: {str(e)}')
notification_service = NotificationService()

View File

@@ -1,8 +1,8 @@
import json
from sse_starlette.sse import EventSourceResponse
from starlette.requests import Request
import asyncio
import json
class ConnectionManager:
@@ -28,7 +28,9 @@ class ConnectionManager:
return
for connection in self.connections_by_user_id[user_id]:
data = {"type": "newNotifications"}
data = {
"type": "newNotifications"
}
data_string = json.dumps(data, ensure_ascii=False)
await connection.put(data_string)

View File

@@ -1,10 +1,9 @@
import asyncio
import json
from base.redis import redis
from orm.shout import Shout
from resolvers.zine.load import load_shouts_by
import asyncio
import json
class SearchService:
lock = asyncio.Lock()
@@ -13,7 +12,7 @@ class SearchService:
@staticmethod
async def init(session):
async with SearchService.lock:
print("[search.service] did nothing")
print('[search.service] did nothing')
SearchService.cache = {}
@staticmethod
@@ -25,7 +24,7 @@ class SearchService:
"title": text,
"body": text,
"limit": limit,
"offset": offset,
"offset": offset
}
payload = await load_shouts_by(None, None, options)
await redis.execute("SET", text, json.dumps(payload))

View File

@@ -1,17 +1,18 @@
from base.orm import local_session
from datetime import datetime, timedelta, timezone
from gql import Client, gql
from gql.transport.aiohttp import AIOHTTPTransport
from orm import Topic
from orm.shout import Shout, ShoutTopic
import asyncio
import time
from datetime import timedelta, timezone, datetime
from os import environ, path
from ssl import create_default_context
import asyncio
import time
from gql import Client, gql
from gql.transport.aiohttp import AIOHTTPTransport
from sqlalchemy import func
load_facts = gql(
"""
from base.orm import local_session
from orm import User, Topic
from orm.shout import ShoutTopic, Shout
load_facts = gql("""
query getDomains {
domains {
id
@@ -24,11 +25,9 @@ query getDomains {
}
}
}
"""
)
""")
load_pages = gql(
"""
load_pages = gql("""
query getDomains {
domains {
title
@@ -42,9 +41,8 @@ query getDomains {
}
}
}
"""
)
schema_str = open(path.dirname(__file__) + "/ackee.graphql").read()
""")
schema_str = open(path.dirname(__file__) + '/ackee.graphql').read()
token = environ.get("ACKEE_TOKEN", "")
@@ -54,8 +52,8 @@ def create_client(headers=None, schema=None):
transport=AIOHTTPTransport(
url="https://ackee.discours.io/api",
ssl=create_default_context(),
headers=headers,
),
headers=headers
)
)
@@ -73,13 +71,13 @@ class ViewedStorage:
@staticmethod
async def init():
"""graphql client connection using permanent token"""
""" graphql client connection using permanent token """
self = ViewedStorage
async with self.lock:
if token:
self.client = create_client(
{"Authorization": "Bearer %s" % str(token)}, schema=schema_str
)
self.client = create_client({
"Authorization": "Bearer %s" % str(token)
}, schema=schema_str)
print("[stat.viewed] * authorized permanentely by ackee.discours.io: %s" % token)
else:
print("[stat.viewed] * please set ACKEE_TOKEN")
@@ -87,7 +85,7 @@ class ViewedStorage:
@staticmethod
async def update_pages():
"""query all the pages from ackee sorted by views count"""
""" query all the pages from ackee sorted by views count """
print("[stat.viewed] ⎧ updating ackee pages data ---")
start = time.time()
self = ViewedStorage
@@ -98,7 +96,7 @@ class ViewedStorage:
try:
for page in self.pages:
p = page["value"].split("?")[0]
slug = p.split("discours.io/")[-1]
slug = p.split('discours.io/')[-1]
shouts[slug] = page["count"]
for slug in shouts.keys():
await ViewedStorage.increment(slug, shouts[slug])
@@ -120,7 +118,7 @@ class ViewedStorage:
# unused yet
@staticmethod
async def get_shout(shout_slug):
"""getting shout views metric by slug"""
""" getting shout views metric by slug """
self = ViewedStorage
async with self.lock:
shout_views = self.by_shouts.get(shout_slug)
@@ -138,7 +136,7 @@ class ViewedStorage:
@staticmethod
async def get_topic(topic_slug):
"""getting topic views value summed"""
""" getting topic views value summed """
self = ViewedStorage
topic_views = 0
async with self.lock:
@@ -148,28 +146,24 @@ class ViewedStorage:
@staticmethod
def update_topics(session, shout_slug):
"""updates topics counters by shout slug"""
""" updates topics counters by shout slug """
self = ViewedStorage
for [shout_topic, topic] in (
session.query(ShoutTopic, Topic)
.join(Topic)
.join(Shout)
.where(Shout.slug == shout_slug)
.all()
):
for [shout_topic, topic] in session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(
Shout.slug == shout_slug
).all():
if not self.by_topics.get(topic.slug):
self.by_topics[topic.slug] = {}
self.by_topics[topic.slug][shout_slug] = self.by_shouts[shout_slug]
@staticmethod
async def increment(shout_slug, amount=1, viewer="ackee"):
"""the only way to change views counter"""
async def increment(shout_slug, amount=1, viewer='ackee'):
""" the only way to change views counter """
self = ViewedStorage
async with self.lock:
# TODO optimize, currenty we execute 1 DB transaction per shout
with local_session() as session:
shout = session.query(Shout).where(Shout.slug == shout_slug).one()
if viewer == "old-discours":
if viewer == 'old-discours':
# this is needed for old db migration
if shout.viewsOld == amount:
print(f"viewsOld amount: {amount}")
@@ -191,7 +185,7 @@ class ViewedStorage:
@staticmethod
async def worker():
"""async task worker"""
""" async task worker """
failed = 0
self = ViewedStorage
if self.disabled:
@@ -211,10 +205,9 @@ class ViewedStorage:
if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat())
print(
"[stat.viewed] ⎩ next update: %s"
% (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
)
print("[stat.viewed] ⎩ next update: %s" % (
t.split("T")[0] + " " + t.split("T")[1].split(".")[0]
))
await asyncio.sleep(self.period)
else:
await asyncio.sleep(10)

View File

@@ -1,8 +1,8 @@
from pathlib import Path
from settings import SHOUTS_REPO
import asyncio
import subprocess
from pathlib import Path
from settings import SHOUTS_REPO
class GitTask: