This commit is contained in:
Igor Lobanov
2023-10-26 22:38:31 +02:00
parent 1c49780cd4
commit c2cc428abe
64 changed files with 631 additions and 626 deletions

View File

@@ -18,7 +18,7 @@ class Following:
class FollowingManager:
lock = asyncio.Lock()
data = {'author': [], 'topic': [], 'shout': [], 'chat': []}
data = {"author": [], "topic": [], "shout": [], "chat": []}
@staticmethod
async def register(kind, uid):
@@ -34,13 +34,13 @@ class FollowingManager:
async def push(kind, payload):
try:
async with FollowingManager.lock:
if kind == 'chat':
for chat in FollowingManager['chat']:
if kind == "chat":
for chat in FollowingManager["chat"]:
if payload.message["chatId"] == chat.uid:
chat.queue.put_nowait(payload)
else:
for entity in FollowingManager[kind]:
if payload.shout['createdBy'] == entity.uid:
if payload.shout["createdBy"] == entity.uid:
entity.queue.put_nowait(payload)
except Exception as e:
print(Exception(e))

View File

@@ -5,9 +5,9 @@ from services.stat.viewed import ViewedStorage
async def storages_init():
with local_session() as session:
print('[main] initialize SearchService')
print("[main] initialize SearchService")
await SearchService.init(session)
print('[main] SearchService initialized')
print('[main] initialize storages')
print("[main] SearchService initialized")
print("[main] initialize storages")
await ViewedStorage.init()
print('[main] storages initialized')
print("[main] storages initialized")

View File

@@ -1,14 +1,13 @@
import asyncio
import json
from datetime import datetime, timezone
from sqlalchemy import and_
from base.orm import local_session
from datetime import datetime, timezone
from orm import Notification, Reaction, Shout, User
from orm.notification import NotificationType
from orm.reaction import ReactionKind
from services.notifications.sse import connection_manager
from sqlalchemy import and_
import asyncio
import json
def shout_to_shout_data(shout):
@@ -16,13 +15,18 @@ def shout_to_shout_data(shout):
def user_to_user_data(user):
return {"id": user.id, "name": user.name, "slug": user.slug, "userpic": user.userpic}
return {
"id": user.id,
"name": user.name,
"slug": user.slug,
"userpic": user.userpic,
}
def update_prev_notification(notification, user, reaction):
notification_data = json.loads(notification.data)
notification_data["users"] = [u for u in notification_data["users"] if u['id'] != user.id]
notification_data["users"] = [u for u in notification_data["users"] if u["id"] != user.id]
notification_data["users"].append(user_to_user_data(user))
if notification_data["reactionIds"] is None:
@@ -61,7 +65,7 @@ class NewReactionNotificator:
Notification.type == NotificationType.NEW_REPLY,
Notification.shout == shout.id,
Notification.reaction == parent_reaction.id,
Notification.seen == False,
Notification.seen == False, # noqa: E712
)
)
.first()
@@ -103,7 +107,7 @@ class NewReactionNotificator:
Notification.user == shout.createdBy,
Notification.type == NotificationType.NEW_COMMENT,
Notification.shout == shout.id,
Notification.seen == False,
Notification.seen == False, # noqa: E712
)
)
.first()
@@ -154,7 +158,7 @@ class NotificationService:
try:
await notificator.run()
except Exception as e:
print(f'[NotificationService.worker] error: {str(e)}')
print(f"[NotificationService.worker] error: {str(e)}")
notification_service = NotificationService()

View File

@@ -1,9 +1,9 @@
import asyncio
import json
from sse_starlette.sse import EventSourceResponse
from starlette.requests import Request
import asyncio
import json
class ConnectionManager:
def __init__(self):

View File

@@ -1,10 +1,10 @@
import asyncio
import json
from base.redis import redis
from orm.shout import Shout
from resolvers.zine.load import load_shouts_by
import asyncio
import json
class SearchService:
lock = asyncio.Lock()
@@ -13,7 +13,7 @@ class SearchService:
@staticmethod
async def init(session):
async with SearchService.lock:
print('[search.service] did nothing')
print("[search.service] did nothing")
SearchService.cache = {}
@staticmethod
@@ -21,7 +21,12 @@ class SearchService:
cached = await redis.execute("GET", text)
if not cached:
async with SearchService.lock:
options = {"title": text, "body": text, "limit": limit, "offset": offset}
options = {
"title": text,
"body": text,
"limit": limit,
"offset": offset,
}
payload = await load_shouts_by(None, None, options)
await redis.execute("SET", text, json.dumps(payload))
return payload

View File

@@ -1,16 +1,14 @@
import asyncio
import time
from base.orm import local_session
from datetime import datetime, timedelta, timezone
from gql import Client, gql
from gql.transport.aiohttp import AIOHTTPTransport
from orm import Topic
from orm.shout import Shout, ShoutTopic
from os import environ, path
from ssl import create_default_context
from gql import Client, gql
from gql.transport.aiohttp import AIOHTTPTransport
from sqlalchemy import func
from base.orm import local_session
from orm import Topic, User
from orm.shout import Shout, ShoutTopic
import asyncio
import time
load_facts = gql(
"""
@@ -46,7 +44,7 @@ query getDomains {
}
"""
)
schema_str = open(path.dirname(__file__) + '/ackee.graphql').read()
schema_str = open(path.dirname(__file__) + "/ackee.graphql").read()
token = environ.get("ACKEE_TOKEN", "")
@@ -54,7 +52,9 @@ def create_client(headers=None, schema=None):
return Client(
schema=schema,
transport=AIOHTTPTransport(
url="https://ackee.discours.io/api", ssl=create_default_context(), headers=headers
url="https://ackee.discours.io/api",
ssl=create_default_context(),
headers=headers,
),
)
@@ -98,7 +98,7 @@ class ViewedStorage:
try:
for page in self.pages:
p = page["value"].split("?")[0]
slug = p.split('discours.io/')[-1]
slug = p.split("discours.io/")[-1]
shouts[slug] = page["count"]
for slug in shouts.keys():
await ViewedStorage.increment(slug, shouts[slug])
@@ -162,14 +162,14 @@ class ViewedStorage:
self.by_topics[topic.slug][shout_slug] = self.by_shouts[shout_slug]
@staticmethod
async def increment(shout_slug, amount=1, viewer='ackee'):
async def increment(shout_slug, amount=1, viewer="ackee"):
"""the only way to change views counter"""
self = ViewedStorage
async with self.lock:
# TODO optimize, currenty we execute 1 DB transaction per shout
with local_session() as session:
shout = session.query(Shout).where(Shout.slug == shout_slug).one()
if viewer == 'old-discours':
if viewer == "old-discours":
# this is needed for old db migration
if shout.viewsOld == amount:
print(f"viewsOld amount: {amount}")

View File

@@ -1,8 +1,8 @@
from pathlib import Path
from settings import SHOUTS_REPO
import asyncio
import subprocess
from pathlib import Path
from settings import SHOUTS_REPO
class GitTask: