From 2c524279f68d63fa4e07cca95cbace02dd881b9b Mon Sep 17 00:00:00 2001 From: Igor Lobanov Date: Thu, 26 Oct 2023 19:56:42 +0200 Subject: [PATCH] lint wip --- .pre-commit-config.yaml | 10 +- Procfile | 1 - README.md | 8 +- ai/preprocess.py | 11 +- alembic/env.py | 9 +- alembic/versions/fe943b098418_init_alembic.py | 4 +- auth/authenticate.py | 39 ++-- auth/credentials.py | 4 +- auth/email.py | 13 +- auth/identity.py | 25 +-- auth/jwtcodec.py | 8 +- auth/oauth.py | 3 +- auth/tokenstorage.py | 4 +- base/exceptions.py | 2 +- base/orm.py | 10 +- base/redis.py | 4 +- main.py | 18 +- migrate.sh | 1 - migration/__init__.py | 36 ++-- migration/bson2json.py | 5 +- migration/export.py | 38 +--- migration/extract.py | 62 +++--- migration/html2text/__init__.py | 63 ++---- migration/html2text/cli.py | 9 +- migration/html2text/utils.py | 18 +- migration/tables/comments.py | 83 ++++---- migration/tables/content_items.py | 78 ++++---- migration/tables/remarks.py | 26 +-- migration/tables/topics.py | 2 +- migration/tables/users.py | 29 ++- orm/__init__.py | 4 +- orm/community.py | 15 +- orm/notification.py | 5 +- orm/rbac.py | 23 ++- orm/reaction.py | 8 +- orm/shout.py | 13 +- orm/topic.py | 8 +- orm/user.py | 13 +- requirements-dev.txt | 1 + resetdb.sh | 1 - resolvers/__init__.py | 70 ++----- resolvers/auth.py | 31 ++- resolvers/create/editor.py | 72 ++++--- resolvers/create/migrate.py | 3 +- resolvers/inbox/chats.py | 41 ++-- resolvers/inbox/load.py | 82 ++++---- resolvers/inbox/messages.py | 32 ++- resolvers/inbox/search.py | 53 ++--- resolvers/notifications.py | 52 ++--- resolvers/upload.py | 18 +- resolvers/zine/following.py | 13 +- resolvers/zine/load.py | 133 +++++++------ resolvers/zine/profile.py | 65 +++---- resolvers/zine/reactions.py | 184 +++++++++--------- resolvers/zine/topics.py | 35 ++-- server.py | 48 ++--- services/following.py | 7 +- services/main.py | 2 +- .../notifications/notification_service.py | 112 ++++++----- services/notifications/sse.py | 6 +- services/search.py | 8 +- services/stat/viewed.py | 61 +++--- settings.py | 5 +- validations/auth.py | 1 + validations/inbox.py | 3 +- 65 files changed, 802 insertions(+), 1049 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index af489f3a..74bd1516 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ exclude: | ) default_language_version: - python: python3.8 + python: python3.10 repos: - repo: https://github.com/pre-commit/pre-commit-hooks @@ -23,20 +23,20 @@ repos: - id: trailing-whitespace - repo: https://github.com/timothycrosley/isort - rev: 5.5.3 + rev: 5.12.0 hooks: - id: isort - repo: https://github.com/ambv/black - rev: 20.8b1 + rev: 23.9.1 hooks: - id: black args: - --line-length=100 - --skip-string-normalization - - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.3 + - repo: https://github.com/PyCQA/flake8 + rev: 6.1.0 hooks: - id: flake8 args: diff --git a/Procfile b/Procfile index c5c1bfa8..ac9d762f 100644 --- a/Procfile +++ b/Procfile @@ -1,2 +1 @@ web: python server.py - diff --git a/README.md b/README.md index 1a1ee0a4..6f57e39c 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,13 @@ pip install -r requirements.txt python3 server.py dev ``` +# pre-commit hook + +``` +pip install -r requirements-dev.txt +pre-commit install +``` + # How to do an authorized request Put the header 'Authorization' with token from signIn query or registerUser mutation. @@ -42,4 +49,3 @@ Put the header 'Authorization' with token from signIn query or registerUser muta # How to debug Ackee Set ACKEE_TOKEN var - diff --git a/ai/preprocess.py b/ai/preprocess.py index afd8dbd8..6cc5ae5a 100644 --- a/ai/preprocess.py +++ b/ai/preprocess.py @@ -1,9 +1,10 @@ import re +from string import punctuation + import nltk from bs4 import BeautifulSoup from nltk.corpus import stopwords from pymystem3 import Mystem -from string import punctuation from transformers import BertTokenizer nltk.download("stopwords") @@ -30,9 +31,11 @@ def get_clear_text(text): russian_stopwords = stopwords.words("russian") tokens = mystem.lemmatize(clear_text) - tokens = [token for token in tokens if token not in russian_stopwords \ - and token != " " \ - and token.strip() not in punctuation] + tokens = [ + token + for token in tokens + if token not in russian_stopwords and token != " " and token.strip() not in punctuation + ] clear_text = " ".join(tokens) diff --git a/alembic/env.py b/alembic/env.py index c6d69a97..91012c34 100644 --- a/alembic/env.py +++ b/alembic/env.py @@ -1,10 +1,8 @@ from logging.config import fileConfig -from sqlalchemy import engine_from_config -from sqlalchemy import pool +from sqlalchemy import engine_from_config, pool from alembic import context - from settings import DB_URL # this is the Alembic Config object, which provides @@ -20,6 +18,7 @@ if config.config_file_name is not None: fileConfig(config.config_file_name) from base.orm import Base + target_metadata = [Base.metadata] # other values from the config, defined by the needs of env.py, @@ -66,9 +65,7 @@ def run_migrations_online() -> None: ) with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) + context.configure(connection=connection, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() diff --git a/alembic/versions/fe943b098418_init_alembic.py b/alembic/versions/fe943b098418_init_alembic.py index 4ec6d519..6f62301f 100644 --- a/alembic/versions/fe943b098418_init_alembic.py +++ b/alembic/versions/fe943b098418_init_alembic.py @@ -1,15 +1,15 @@ """init alembic Revision ID: fe943b098418 -Revises: +Revises: Create Date: 2023-08-19 01:37:57.031933 """ from typing import Sequence, Union -from alembic import op import sqlalchemy as sa +from alembic import op # revision identifiers, used by Alembic. revision: str = 'fe943b098418' diff --git a/auth/authenticate.py b/auth/authenticate.py index be4db2d2..7792766d 100644 --- a/auth/authenticate.py +++ b/auth/authenticate.py @@ -2,24 +2,22 @@ from functools import wraps from typing import Optional, Tuple from graphql.type import GraphQLResolveInfo -from sqlalchemy.orm import joinedload, exc +from sqlalchemy.orm import exc, joinedload from starlette.authentication import AuthenticationBackend from starlette.requests import HTTPConnection from auth.credentials import AuthCredentials, AuthUser -from base.orm import local_session -from orm.user import User, Role - -from settings import SESSION_TOKEN_HEADER from auth.tokenstorage import SessionToken from base.exceptions import OperationNotAllowed +from base.orm import local_session +from orm.user import Role, User +from settings import SESSION_TOKEN_HEADER class JWTAuthenticate(AuthenticationBackend): async def authenticate( self, request: HTTPConnection ) -> Optional[Tuple[AuthCredentials, AuthUser]]: - if SESSION_TOKEN_HEADER not in request.headers: return AuthCredentials(scopes={}), AuthUser(user_id=None, username='') @@ -36,28 +34,27 @@ class JWTAuthenticate(AuthenticationBackend): with local_session() as session: try: user = ( - session.query(User).options( + session.query(User) + .options( joinedload(User.roles).options(joinedload(Role.permissions)), - joinedload(User.ratings) - ).filter( - User.id == payload.user_id - ).one() + joinedload(User.ratings), + ) + .filter(User.id == payload.user_id) + .one() ) scopes = {} # TODO: integrate await user.get_permission() return ( - AuthCredentials( - user_id=payload.user_id, - scopes=scopes, - logged_in=True - ), + AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True), AuthUser(user_id=user.id, username=''), ) except exc.NoResultFound: pass - return AuthCredentials(scopes={}, error_message=str('Invalid token')), AuthUser(user_id=None, username='') + return AuthCredentials(scopes={}, error_message=str('Invalid token')), AuthUser( + user_id=None, username='' + ) def login_required(func): @@ -68,9 +65,7 @@ def login_required(func): # print(auth) if not auth or not auth.logged_in: # raise Unauthorized(auth.error_message or "Please login") - return { - "error": "Please login first" - } + return {"error": "Please login first"} return await func(parent, info, *args, **kwargs) return wrap @@ -79,7 +74,9 @@ def login_required(func): def permission_required(resource, operation, func): @wraps(func) async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs): - print('[auth.authenticate] permission_required for %r with info %r' % (func, info)) # debug only + print( + '[auth.authenticate] permission_required for %r with info %r' % (func, info) + ) # debug only auth: AuthCredentials = info.context["request"].auth if not auth.logged_in: raise OperationNotAllowed(auth.error_message or "Please login") diff --git a/auth/credentials.py b/auth/credentials.py index 9045b7a4..63a1d161 100644 --- a/auth/credentials.py +++ b/auth/credentials.py @@ -23,9 +23,7 @@ class AuthCredentials(BaseModel): async def permissions(self) -> List[Permission]: if self.user_id is None: # raise Unauthorized("Please login first") - return { - "error": "Please login first" - } + return {"error": "Please login first"} else: # TODO: implement permissions logix print(self.user_id) diff --git a/auth/email.py b/auth/email.py index 7ca5d9bf..ca8b2bc4 100644 --- a/auth/email.py +++ b/auth/email.py @@ -4,10 +4,7 @@ from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or 'discours.io') noreply = "discours.io " % (MAILGUN_DOMAIN or 'discours.io') -lang_subject = { - "ru": "Подтверждение почты", - "en": "Confirm email" -} +lang_subject = {"ru": "Подтверждение почты", "en": "Confirm email"} async def send_auth_email(user, token, lang="ru", template="email_confirmation"): @@ -22,16 +19,12 @@ async def send_auth_email(user, token, lang="ru", template="email_confirmation") "to": to, "subject": subject, "template": template, - "h:X-Mailgun-Variables": "{ \"token\": \"%s\" }" % token + "h:X-Mailgun-Variables": "{ \"token\": \"%s\" }" % token, } print('[auth.email] payload: %r' % payload) # debug # print('http://localhost:3000/?modal=auth&mode=confirm-email&token=%s' % token) - response = requests.post( - api_url, - auth=("api", MAILGUN_API_KEY), - data=payload - ) + response = requests.post(api_url, auth=("api", MAILGUN_API_KEY), data=payload) response.raise_for_status() except Exception as e: print(e) diff --git a/auth/identity.py b/auth/identity.py index e4b78040..cc1bf3c8 100644 --- a/auth/identity.py +++ b/auth/identity.py @@ -7,6 +7,7 @@ from sqlalchemy import or_ from auth.jwtcodec import JWTCodec from auth.tokenstorage import TokenStorage + # from base.exceptions import InvalidPassword, InvalidToken from base.orm import local_session from orm import User @@ -57,14 +58,10 @@ class Identity: user = User(**orm_user.dict()) if not user.password: # raise InvalidPassword("User password is empty") - return { - "error": "User password is empty" - } + return {"error": "User password is empty"} if not Password.verify(password, user.password): # raise InvalidPassword("Wrong user password") - return { - "error": "Wrong user password" - } + return {"error": "Wrong user password"} return user @staticmethod @@ -91,26 +88,18 @@ class Identity: payload = JWTCodec.decode(token) if not await TokenStorage.exist(f"{payload.user_id}-{payload.username}-{token}"): # raise InvalidToken("Login token has expired, please login again") - return { - "error": "Token has expired" - } + return {"error": "Token has expired"} except ExpiredSignatureError: # raise InvalidToken("Login token has expired, please try again") - return { - "error": "Token has expired" - } + return {"error": "Token has expired"} except DecodeError: # raise InvalidToken("token format error") from e - return { - "error": "Token format error" - } + return {"error": "Token format error"} with local_session() as session: user = session.query(User).filter_by(id=payload.user_id).first() if not user: # raise Exception("user not exist") - return { - "error": "User does not exist" - } + return {"error": "User does not exist"} if not user.emailConfirmed: user.emailConfirmed = True session.commit() diff --git a/auth/jwtcodec.py b/auth/jwtcodec.py index ac561adb..8fc12d27 100644 --- a/auth/jwtcodec.py +++ b/auth/jwtcodec.py @@ -1,8 +1,10 @@ from datetime import datetime, timezone + import jwt + from base.exceptions import ExpiredToken, InvalidToken -from validations.auth import TokenPayload, AuthInput from settings import JWT_ALGORITHM, JWT_SECRET_KEY +from validations.auth import AuthInput, TokenPayload class JWTCodec: @@ -13,7 +15,7 @@ class JWTCodec: "username": user.email or user.phone, "exp": exp, "iat": datetime.now(tz=timezone.utc), - "iss": "discours" + "iss": "discours", } try: return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM) @@ -33,7 +35,7 @@ class JWTCodec: # "verify_signature": False }, algorithms=[JWT_ALGORITHM], - issuer="discours" + issuer="discours", ) r = TokenPayload(**payload) # print('[auth.jwtcodec] debug token %r' % r) diff --git a/auth/oauth.py b/auth/oauth.py index 54b5f11a..02f56ff5 100644 --- a/auth/oauth.py +++ b/auth/oauth.py @@ -1,8 +1,9 @@ from authlib.integrations.starlette_client import OAuth from starlette.responses import RedirectResponse + from auth.identity import Identity from auth.tokenstorage import TokenStorage -from settings import OAUTH_CLIENTS, FRONTEND_URL +from settings import FRONTEND_URL, OAUTH_CLIENTS oauth = OAuth() diff --git a/auth/tokenstorage.py b/auth/tokenstorage.py index c61aa848..b5a5bc39 100644 --- a/auth/tokenstorage.py +++ b/auth/tokenstorage.py @@ -1,9 +1,9 @@ from datetime import datetime, timedelta, timezone from auth.jwtcodec import JWTCodec -from validations.auth import AuthInput from base.redis import redis -from settings import SESSION_TOKEN_LIFE_SPAN, ONETIME_TOKEN_LIFE_SPAN +from settings import ONETIME_TOKEN_LIFE_SPAN, SESSION_TOKEN_LIFE_SPAN +from validations.auth import AuthInput async def save(token_key, life_span, auto_delete=True): diff --git a/base/exceptions.py b/base/exceptions.py index 1f3344e7..2cf7bdeb 100644 --- a/base/exceptions.py +++ b/base/exceptions.py @@ -1,8 +1,8 @@ from graphql.error import GraphQLError - # TODO: remove traceback from logs for defined exceptions + class BaseHttpException(GraphQLError): code = 500 message = "500 Server error" diff --git a/base/orm.py b/base/orm.py index 8d2e65ad..02105f51 100644 --- a/base/orm.py +++ b/base/orm.py @@ -1,15 +1,13 @@ -from typing import TypeVar, Any, Dict, Generic, Callable +from typing import Any, Callable, Dict, Generic, TypeVar -from sqlalchemy import create_engine, Column, Integer +from sqlalchemy import Column, Integer, create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session from sqlalchemy.sql.schema import Table from settings import DB_URL -engine = create_engine( - DB_URL, echo=False, pool_size=10, max_overflow=20 -) +engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20) T = TypeVar("T") @@ -47,7 +45,7 @@ class Base(declarative_base()): def update(self, input): column_names = self.__table__.columns.keys() - for (name, value) in input.items(): + for name, value in input.items(): if name in column_names: setattr(self, name, value) diff --git a/base/redis.py b/base/redis.py index 52a49caa..d5d4babd 100644 --- a/base/redis.py +++ b/base/redis.py @@ -1,5 +1,7 @@ -from aioredis import from_url from asyncio import sleep + +from aioredis import from_url + from settings import REDIS_URL diff --git a/main.py b/main.py index 6bb17a86..3f839ab5 100644 --- a/main.py +++ b/main.py @@ -2,6 +2,7 @@ import asyncio import os from importlib import import_module from os.path import exists + from ariadne import load_schema_from_path, make_executable_schema from ariadne.asgi import GraphQL from starlette.applications import Starlette @@ -9,20 +10,21 @@ from starlette.middleware import Middleware from starlette.middleware.authentication import AuthenticationMiddleware from starlette.middleware.sessions import SessionMiddleware from starlette.routing import Route -from orm import init_tables from auth.authenticate import JWTAuthenticate -from auth.oauth import oauth_login, oauth_authorize +from auth.oauth import oauth_authorize, oauth_login from base.redis import redis from base.resolvers import resolvers +from orm import init_tables from resolvers.auth import confirm_email_handler from resolvers.upload import upload_handler from services.main import storages_init from services.notifications.notification_service import notification_service +from services.notifications.sse import sse_subscribe_handler from services.stat.viewed import ViewedStorage + # from services.zine.gittask import GitTask from settings import DEV_SERVER_PID_FILE_NAME, SENTRY_DSN, SESSION_SECRET_KEY -from services.notifications.sse import sse_subscribe_handler import_module("resolvers") schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers) # type: ignore @@ -46,6 +48,7 @@ async def start_up(): try: import sentry_sdk + sentry_sdk.init(SENTRY_DSN) except Exception as e: print('[sentry] init error') @@ -82,9 +85,7 @@ app = Starlette( middleware=middleware, routes=routes, ) -app.mount("/", GraphQL( - schema -)) +app.mount("/", GraphQL(schema)) dev_app = Starlette( debug=True, @@ -93,7 +94,4 @@ dev_app = Starlette( middleware=middleware, routes=routes, ) -dev_app.mount("/", GraphQL( - schema, - debug=True -)) +dev_app.mount("/", GraphQL(schema, debug=True)) diff --git a/migrate.sh b/migrate.sh index 2c1189da..f63aac19 100644 --- a/migrate.sh +++ b/migrate.sh @@ -16,4 +16,3 @@ echo "Start migration" python3 server.py migrate if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi echo 'Done!' - diff --git a/migration/__init__.py b/migration/__init__.py index 468fa886..17cc5ffd 100644 --- a/migration/__init__.py +++ b/migration/__init__.py @@ -14,8 +14,9 @@ from migration.tables.content_items import get_shout_slug from migration.tables.content_items import migrate as migrateShout from migration.tables.remarks import migrate as migrateRemark from migration.tables.topics import migrate as migrateTopic -from migration.tables.users import migrate as migrateUser, post_migrate as users_post_migrate +from migration.tables.users import migrate as migrateUser from migration.tables.users import migrate_2stage as migrateUser_2stage +from migration.tables.users import post_migrate as users_post_migrate from orm import init_tables from orm.reaction import Reaction @@ -63,16 +64,8 @@ async def topics_handle(storage): del storage["topics"]["by_slug"][oldslug] storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug] print("[migration] " + str(counter) + " topics migrated") - print( - "[migration] " - + str(len(storage["topics"]["by_oid"].values())) - + " topics by oid" - ) - print( - "[migration] " - + str(len(storage["topics"]["by_slug"].values())) - + " topics by slug" - ) + print("[migration] " + str(len(storage["topics"]["by_oid"].values())) + " topics by oid") + print("[migration] " + str(len(storage["topics"]["by_slug"].values())) + " topics by slug") async def shouts_handle(storage, args): @@ -117,9 +110,10 @@ async def shouts_handle(storage, args): # print main counter counter += 1 - print('[migration] shouts_handle %d: %s @%s' % ( - (counter + 1), shout_dict["slug"], author["slug"] - )) + print( + '[migration] shouts_handle %d: %s @%s' + % ((counter + 1), shout_dict["slug"], author["slug"]) + ) b = bs4.BeautifulSoup(shout_dict["body"], "html.parser") texts = [shout_dict["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", "")] @@ -214,9 +208,7 @@ def data_load(): tags_data = json.loads(open("migration/data/tags.json").read()) storage["topics"]["tags"] = tags_data print("[migration.load] " + str(len(tags_data)) + " tags ") - cats_data = json.loads( - open("migration/data/content_item_categories.json").read() - ) + cats_data = json.loads(open("migration/data/content_item_categories.json").read()) storage["topics"]["cats"] = cats_data print("[migration.load] " + str(len(cats_data)) + " cats ") comments_data = json.loads(open("migration/data/comments.json").read()) @@ -235,11 +227,7 @@ def data_load(): storage["users"]["by_oid"][x["_id"]] = x # storage['users']['by_slug'][x['slug']] = x # no user.slug yet - print( - "[migration.load] " - + str(len(storage["users"]["by_oid"].keys())) - + " users by oid" - ) + print("[migration.load] " + str(len(storage["users"]["by_oid"].keys())) + " users by oid") for x in tags_data: storage["topics"]["by_oid"][x["_id"]] = x storage["topics"]["by_slug"][x["slug"]] = x @@ -247,9 +235,7 @@ def data_load(): storage["topics"]["by_oid"][x["_id"]] = x storage["topics"]["by_slug"][x["slug"]] = x print( - "[migration.load] " - + str(len(storage["topics"]["by_slug"].keys())) - + " topics by slug" + "[migration.load] " + str(len(storage["topics"]["by_slug"].keys())) + " topics by slug" ) for item in content_data: slug = get_shout_slug(item) diff --git a/migration/bson2json.py b/migration/bson2json.py index 03effe19..cff33b28 100644 --- a/migration/bson2json.py +++ b/migration/bson2json.py @@ -1,8 +1,9 @@ +import gc import json import os import bson -import gc + from .utils import DateTimeEncoder @@ -15,7 +16,7 @@ def json_tables(): "email_subscriptions": [], "users": [], "comments": [], - "remarks": [] + "remarks": [], } for table in data.keys(): print('[migration] bson2json for ' + table) diff --git a/migration/export.py b/migration/export.py index 102cfb14..42004ee3 100644 --- a/migration/export.py +++ b/migration/export.py @@ -71,47 +71,29 @@ def export_slug(slug, storage): def export_email_subscriptions(): - email_subscriptions_data = json.loads( - open("migration/data/email_subscriptions.json").read() - ) + email_subscriptions_data = json.loads(open("migration/data/email_subscriptions.json").read()) for data in email_subscriptions_data: # TODO: migrate to mailgun list manually # migrate_email_subscription(data) pass - print( - "[migration] " - + str(len(email_subscriptions_data)) - + " email subscriptions exported" - ) + print("[migration] " + str(len(email_subscriptions_data)) + " email subscriptions exported") def export_shouts(storage): # update what was just migrated or load json again if len(storage["users"]["by_slugs"].keys()) == 0: - storage["users"]["by_slugs"] = json.loads( - open(EXPORT_DEST + "authors.json").read() - ) - print( - "[migration] " - + str(len(storage["users"]["by_slugs"].keys())) - + " exported authors " - ) + storage["users"]["by_slugs"] = json.loads(open(EXPORT_DEST + "authors.json").read()) + print("[migration] " + str(len(storage["users"]["by_slugs"].keys())) + " exported authors ") if len(storage["shouts"]["by_slugs"].keys()) == 0: - storage["shouts"]["by_slugs"] = json.loads( - open(EXPORT_DEST + "articles.json").read() - ) + storage["shouts"]["by_slugs"] = json.loads(open(EXPORT_DEST + "articles.json").read()) print( - "[migration] " - + str(len(storage["shouts"]["by_slugs"].keys())) - + " exported articles " + "[migration] " + str(len(storage["shouts"]["by_slugs"].keys())) + " exported articles " ) for slug in storage["shouts"]["by_slugs"].keys(): export_slug(slug, storage) -def export_json( - export_articles={}, export_authors={}, export_topics={}, export_comments={} -): +def export_json(export_articles={}, export_authors={}, export_topics={}, export_comments={}): open(EXPORT_DEST + "authors.json", "w").write( json.dumps( export_authors, @@ -152,8 +134,4 @@ def export_json( ensure_ascii=False, ) ) - print( - "[migration] " - + str(len(export_comments.items())) - + " exported articles with comments" - ) + print("[migration] " + str(len(export_comments.items())) + " exported articles with comments") diff --git a/migration/extract.py b/migration/extract.py index fcd293e6..511e68ed 100644 --- a/migration/extract.py +++ b/migration/extract.py @@ -5,7 +5,6 @@ import uuid from bs4 import BeautifulSoup - TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)" contentDir = os.path.join( os.path.dirname(os.path.realpath(__file__)), "..", "..", "discoursio-web", "content" @@ -27,7 +26,6 @@ def replace_tooltips(body): return newbody - def extract_footnotes(body, shout_dict): parts = body.split("&&&") lll = len(parts) @@ -47,12 +45,16 @@ def extract_footnotes(body, shout_dict): extracted_body = part.split(fn, 1)[1].split('>', 1)[1].split('', 1)[0] print("[extract] footnote link: " + extracted_link) with local_session() as session: - Reaction.create({ - "shout": shout_dict['id'], - "kind": ReactionKind.FOOTNOTE, - "body": extracted_body, - "range": str(body.index(fn + link) - len('<')) + ':' + str(body.index(extracted_body) + len('')) - }) + Reaction.create( + { + "shout": shout_dict['id'], + "kind": ReactionKind.FOOTNOTE, + "body": extracted_body, + "range": str(body.index(fn + link) - len('<')) + + ':' + + str(body.index(extracted_body) + len('')), + } + ) newparts[i] = "ℹ️" else: newparts[i] = part @@ -76,9 +78,7 @@ def place_tooltips(body): print("[extract] footnote: " + part) fn = 'a class="footnote-url" href="' link = part.split(fn, 1)[1].split('"', 1)[0] - extracted_part = ( - part.split(fn, 1)[0] + " " + part.split("/", 1)[-1] - ) + extracted_part = part.split(fn, 1)[0] + " " + part.split("/", 1)[-1] newparts[i] = ( " (i + 1) else "".join(newparts) ) @@ -271,7 +262,7 @@ def cleanup_md(body): return newbody -def extract_md(body, shout_dict = None): +def extract_md(body, shout_dict=None): newbody = body if newbody: newbody = cleanup_md(newbody) @@ -279,7 +270,6 @@ def extract_md(body, shout_dict = None): raise Exception("cleanup error") if shout_dict: - uid = shout_dict['id'] or uuid.uuid4() newbody = extract_md_images(newbody, uid) if not newbody: @@ -293,7 +283,7 @@ def extract_md(body, shout_dict = None): def extract_media(entry): - ''' normalized media extraction method ''' + '''normalized media extraction method''' # media [ { title pic url body } ]} kind = entry.get("type") if not kind: @@ -323,12 +313,7 @@ def extract_media(entry): url = "https://vimeo.com/" + m["vimeoId"] # body body = m.get("body") or m.get("literatureBody") or "" - media.append({ - "url": url, - "pic": pic, - "title": title, - "body": body - }) + media.append({"url": url, "pic": pic, "title": title, "body": body}) return media @@ -398,9 +383,7 @@ def cleanup_html(body: str) -> str: r"

\s*

", r"
\s*
", ] - regex_replace = { - r"
\s*

": "

" - } + regex_replace = {r"
\s*

": "

"} changed = True while changed: # we need several iterations to clean nested tags this way @@ -414,7 +397,8 @@ def cleanup_html(body: str) -> str: changed = True return new_body -def extract_html(entry, shout_id = None, cleanup=False): + +def extract_html(entry, shout_id=None, cleanup=False): body_orig = (entry.get("body") or "").replace('\(', '(').replace('\)', ')') if cleanup: # we do that before bs parsing to catch the invalid html diff --git a/migration/html2text/__init__.py b/migration/html2text/__init__.py index 1090025c..6b87f297 100644 --- a/migration/html2text/__init__.py +++ b/migration/html2text/__init__.py @@ -119,9 +119,7 @@ class HTML2Text(html.parser.HTMLParser): self.lastWasList = False self.style = 0 self.style_def = {} # type: Dict[str, Dict[str, str]] - self.tag_stack = ( - [] - ) # type: List[Tuple[str, Dict[str, Optional[str]], Dict[str, str]]] + self.tag_stack = [] # type: List[Tuple[str, Dict[str, Optional[str]], Dict[str, str]]] self.emphasis = 0 self.drop_white_space = 0 self.inheader = False @@ -300,9 +298,7 @@ class HTML2Text(html.parser.HTMLParser): if strikethrough: self.quiet -= 1 - def handle_tag( - self, tag: str, attrs: Dict[str, Optional[str]], start: bool - ) -> None: + def handle_tag(self, tag: str, attrs: Dict[str, Optional[str]], start: bool) -> None: self.current_tag = tag if self.tag_callback is not None: @@ -333,9 +329,7 @@ class HTML2Text(html.parser.HTMLParser): tag_style = element_style(attrs, self.style_def, parent_style) self.tag_stack.append((tag, attrs, tag_style)) else: - dummy, attrs, tag_style = ( - self.tag_stack.pop() if self.tag_stack else (None, {}, {}) - ) + dummy, attrs, tag_style = self.tag_stack.pop() if self.tag_stack else (None, {}, {}) if self.tag_stack: parent_style = self.tag_stack[-1][2] @@ -385,11 +379,7 @@ class HTML2Text(html.parser.HTMLParser): ): self.o("`") # NOTE: same as self.span_highlight = True - elif ( - self.current_class == "lead" - and not self.inheader - and not self.span_highlight - ): + elif self.current_class == "lead" and not self.inheader and not self.span_highlight: # self.o("==") # NOTE: CriticMarkup {== self.span_lead = True else: @@ -479,11 +469,7 @@ class HTML2Text(html.parser.HTMLParser): and not self.span_lead and not self.span_highlight ): - if ( - start - and self.preceding_data - and self.preceding_data[-1] == self.strong_mark[0] - ): + if start and self.preceding_data and self.preceding_data[-1] == self.strong_mark[0]: strong = " " + self.strong_mark self.preceding_data += " " else: @@ -548,13 +534,8 @@ class HTML2Text(html.parser.HTMLParser): "href" in attrs and not attrs["href"].startswith("#_ftn") and attrs["href"] is not None - and not ( - self.skip_internal_links and attrs["href"].startswith("#") - ) - and not ( - self.ignore_mailto_links - and attrs["href"].startswith("mailto:") - ) + and not (self.skip_internal_links and attrs["href"].startswith("#")) + and not (self.ignore_mailto_links and attrs["href"].startswith("mailto:")) ): self.astack.append(attrs) self.maybe_automatic_link = attrs["href"] @@ -638,9 +619,7 @@ class HTML2Text(html.parser.HTMLParser): self.o("![" + escape_md(alt) + "]") if self.inline_links: href = attrs.get("href") or "" - self.o( - "(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")" - ) + self.o("(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")") else: i = self.previousIndex(attrs) if i is not None: @@ -696,9 +675,7 @@ class HTML2Text(html.parser.HTMLParser): # WARNING: does not line up
  1. s > 9 correctly. parent_list = None for list in self.list: - self.o( - " " if parent_list == "ol" and list.name == "ul" else " " - ) + self.o(" " if parent_list == "ol" and list.name == "ul" else " ") parent_list = list.name if li.name == "ul": @@ -787,9 +764,7 @@ class HTML2Text(html.parser.HTMLParser): self.pbr() self.br_toggle = " " - def o( - self, data: str, puredata: bool = False, force: Union[bool, str] = False - ) -> None: + def o(self, data: str, puredata: bool = False, force: Union[bool, str] = False) -> None: """ Deal with indentation and whitespace """ @@ -864,9 +839,7 @@ class HTML2Text(html.parser.HTMLParser): self.out(" ") self.space = False - if self.a and ( - (self.p_p == 2 and self.links_each_paragraph) or force == "end" - ): + if self.a and ((self.p_p == 2 and self.links_each_paragraph) or force == "end"): if force == "end": self.out("\n") @@ -925,11 +898,7 @@ class HTML2Text(html.parser.HTMLParser): if self.maybe_automatic_link is not None: href = self.maybe_automatic_link - if ( - href == data - and self.absolute_url_matcher.match(href) - and self.use_automatic_links - ): + if href == data and self.absolute_url_matcher.match(href) and self.use_automatic_links: self.o("<" + data + ">") self.empty_link = False return @@ -1000,9 +969,7 @@ class HTML2Text(html.parser.HTMLParser): self.inline_links = False for para in text.split("\n"): if len(para) > 0: - if not skipwrap( - para, self.wrap_links, self.wrap_list_items, self.wrap_tables - ): + if not skipwrap(para, self.wrap_links, self.wrap_list_items, self.wrap_tables): indent = "" if para.startswith(" " + self.ul_item_mark): # list item continuation: add a double indent to the @@ -1043,9 +1010,7 @@ class HTML2Text(html.parser.HTMLParser): return result -def html2text( - html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH -) -> str: +def html2text(html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH) -> str: h = html.strip() or "" if h: h = HTML2Text(baseurl=baseurl, bodywidth=bodywidth) diff --git a/migration/html2text/cli.py b/migration/html2text/cli.py index dbaba28b..62e0738f 100644 --- a/migration/html2text/cli.py +++ b/migration/html2text/cli.py @@ -117,10 +117,7 @@ def main() -> None: dest="images_with_size", action="store_true", default=config.IMAGES_WITH_SIZE, - help=( - "Write image tags with height and width attrs as raw html to retain " - "dimensions" - ), + help=("Write image tags with height and width attrs as raw html to retain " "dimensions"), ) p.add_argument( "-g", @@ -260,9 +257,7 @@ def main() -> None: default=config.CLOSE_QUOTE, help="The character used to close quotes", ) - p.add_argument( - "--version", action="version", version=".".join(map(str, __version__)) - ) + p.add_argument("--version", action="version", version=".".join(map(str, __version__))) p.add_argument("filename", nargs="?") p.add_argument("encoding", nargs="?", default="utf-8") args = p.parse_args() diff --git a/migration/html2text/utils.py b/migration/html2text/utils.py index 1cf22b52..fd6a16c2 100644 --- a/migration/html2text/utils.py +++ b/migration/html2text/utils.py @@ -4,9 +4,7 @@ from typing import Dict, List, Optional from . import config unifiable_n = { - html.entities.name2codepoint[k]: v - for k, v in config.UNIFIABLE.items() - if k != "nbsp" + html.entities.name2codepoint[k]: v for k, v in config.UNIFIABLE.items() if k != "nbsp" } @@ -156,9 +154,7 @@ def list_numbering_start(attrs: Dict[str, Optional[str]]) -> int: return 0 -def skipwrap( - para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool -) -> bool: +def skipwrap(para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool) -> bool: # If it appears to contain a link # don't wrap if not wrap_links and config.RE_LINK.search(para): @@ -236,9 +232,7 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]: max_width += [len(x) + right_margin for x in cols[-(num_cols - max_cols) :]] max_cols = num_cols - max_width = [ - max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width) - ] + max_width = [max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)] # reformat new_lines = [] @@ -247,15 +241,13 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]: if set(line.strip()) == set("-|"): filler = "-" new_cols = [ - x.rstrip() + (filler * (M - len(x.rstrip()))) - for x, M in zip(cols, max_width) + x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width) ] new_lines.append("|-" + "|".join(new_cols) + "|") else: filler = " " new_cols = [ - x.rstrip() + (filler * (M - len(x.rstrip()))) - for x, M in zip(cols, max_width) + x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width) ] new_lines.append("| " + "|".join(new_cols) + "|") return new_lines diff --git a/migration/tables/comments.py b/migration/tables/comments.py index 82e32924..092850c8 100644 --- a/migration/tables/comments.py +++ b/migration/tables/comments.py @@ -5,61 +5,48 @@ from dateutil.parser import parse as date_parse from base.orm import local_session from migration.html2text import html2text from orm.reaction import Reaction, ReactionKind -from orm.shout import ShoutReactionsFollower +from orm.shout import Shout, ShoutReactionsFollower from orm.topic import TopicFollower from orm.user import User -from orm.shout import Shout ts = datetime.now(tz=timezone.utc) def auto_followers(session, topics, reaction_dict): # creating shout's reactions following for reaction author - following1 = session.query( - ShoutReactionsFollower - ).where( - ShoutReactionsFollower.follower == reaction_dict["createdBy"] - ).filter( - ShoutReactionsFollower.shout == reaction_dict["shout"] - ).first() + following1 = ( + session.query(ShoutReactionsFollower) + .where(ShoutReactionsFollower.follower == reaction_dict["createdBy"]) + .filter(ShoutReactionsFollower.shout == reaction_dict["shout"]) + .first() + ) if not following1: following1 = ShoutReactionsFollower.create( - follower=reaction_dict["createdBy"], - shout=reaction_dict["shout"], - auto=True + follower=reaction_dict["createdBy"], shout=reaction_dict["shout"], auto=True ) session.add(following1) # creating topics followings for reaction author for t in topics: - tf = session.query( - TopicFollower - ).where( - TopicFollower.follower == reaction_dict["createdBy"] - ).filter( - TopicFollower.topic == t['id'] - ).first() + tf = ( + session.query(TopicFollower) + .where(TopicFollower.follower == reaction_dict["createdBy"]) + .filter(TopicFollower.topic == t['id']) + .first() + ) if not tf: topic_following = TopicFollower.create( - follower=reaction_dict["createdBy"], - topic=t['id'], - auto=True + follower=reaction_dict["createdBy"], topic=t['id'], auto=True ) session.add(topic_following) def migrate_ratings(session, entry, reaction_dict): for comment_rating_old in entry.get("ratings", []): - rater = ( - session.query(User) - .filter(User.oid == comment_rating_old["createdBy"]) - .first() - ) + rater = session.query(User).filter(User.oid == comment_rating_old["createdBy"]).first() re_reaction_dict = { "shout": reaction_dict["shout"], "replyTo": reaction_dict["id"], - "kind": ReactionKind.LIKE - if comment_rating_old["value"] > 0 - else ReactionKind.DISLIKE, + "kind": ReactionKind.LIKE if comment_rating_old["value"] > 0 else ReactionKind.DISLIKE, "createdBy": rater.id if rater else 1, } cts = comment_rating_old.get("createdAt") @@ -68,18 +55,15 @@ def migrate_ratings(session, entry, reaction_dict): try: # creating reaction from old rating rr = Reaction.create(**re_reaction_dict) - following2 = session.query( - ShoutReactionsFollower - ).where( - ShoutReactionsFollower.follower == re_reaction_dict['createdBy'] - ).filter( - ShoutReactionsFollower.shout == rr.shout - ).first() + following2 = ( + session.query(ShoutReactionsFollower) + .where(ShoutReactionsFollower.follower == re_reaction_dict['createdBy']) + .filter(ShoutReactionsFollower.shout == rr.shout) + .first() + ) if not following2: following2 = ShoutReactionsFollower.create( - follower=re_reaction_dict['createdBy'], - shout=rr.shout, - auto=True + follower=re_reaction_dict['createdBy'], shout=rr.shout, auto=True ) session.add(following2) session.add(rr) @@ -150,9 +134,7 @@ async def migrate(entry, storage): else: stage = "author and old id found" try: - shout = session.query( - Shout - ).where(Shout.slug == old_shout["slug"]).one() + shout = session.query(Shout).where(Shout.slug == old_shout["slug"]).one() if shout: reaction_dict["shout"] = shout.id reaction_dict["createdBy"] = author.id if author else 1 @@ -190,17 +172,20 @@ def migrate_2stage(old_comment, idmap): comment = session.query(Reaction).where(Reaction.id == new_id).first() try: if new_replyto_id: - new_reply = session.query(Reaction).where(Reaction.id == new_replyto_id).first() + new_reply = ( + session.query(Reaction).where(Reaction.id == new_replyto_id).first() + ) if not new_reply: print(new_replyto_id) raise Exception("cannot find reply by id!") comment.replyTo = new_reply.id session.add(comment) - srf = session.query(ShoutReactionsFollower).where( - ShoutReactionsFollower.shout == comment.shout - ).filter( - ShoutReactionsFollower.follower == comment.createdBy - ).first() + srf = ( + session.query(ShoutReactionsFollower) + .where(ShoutReactionsFollower.shout == comment.shout) + .filter(ShoutReactionsFollower.follower == comment.createdBy) + .first() + ) if not srf: srf = ShoutReactionsFollower.create( shout=comment.shout, follower=comment.createdBy, auto=True diff --git a/migration/tables/content_items.py b/migration/tables/content_items.py index a2297d98..92a97c24 100644 --- a/migration/tables/content_items.py +++ b/migration/tables/content_items.py @@ -1,16 +1,18 @@ -from datetime import datetime, timezone import json +import re +from datetime import datetime, timezone + from dateutil.parser import parse as date_parse from sqlalchemy.exc import IntegrityError from transliterate import translit + from base.orm import local_session from migration.extract import extract_html, extract_media from orm.reaction import Reaction, ReactionKind -from orm.shout import Shout, ShoutTopic, ShoutReactionsFollower +from orm.shout import Shout, ShoutReactionsFollower, ShoutTopic +from orm.topic import Topic, TopicFollower from orm.user import User -from orm.topic import TopicFollower, Topic from services.stat.viewed import ViewedStorage -import re OLD_DATE = "2016-03-05 22:22:00.350000" ts = datetime.now(tz=timezone.utc) @@ -91,11 +93,12 @@ async def create_shout(shout_dict): s = Shout.create(**shout_dict) author = s.authors[0] with local_session() as session: - srf = session.query(ShoutReactionsFollower).where( - ShoutReactionsFollower.shout == s.id - ).filter( - ShoutReactionsFollower.follower == author.id - ).first() + srf = ( + session.query(ShoutReactionsFollower) + .where(ShoutReactionsFollower.shout == s.id) + .filter(ShoutReactionsFollower.follower == author.id) + .first() + ) if not srf: srf = ShoutReactionsFollower.create(shout=s.id, follower=author.id, auto=True) session.add(srf) @@ -137,11 +140,14 @@ async def migrate(entry, storage): r = { "layout": type2layout[entry["type"]], "title": entry["title"], - "authors": [author, ], + "authors": [ + author, + ], "slug": get_shout_slug(entry), "cover": ( - "https://images.discours.io/unsafe/" + - entry["thumborId"] if entry.get("thumborId") else entry.get("image", {}).get("url") + "https://images.discours.io/unsafe/" + entry["thumborId"] + if entry.get("thumborId") + else entry.get("image", {}).get("url") ), "visibility": "public" if entry.get("published") else "community", "publishedAt": date_parse(entry.get("publishedAt")) if entry.get("published") else None, @@ -150,7 +156,7 @@ async def migrate(entry, storage): "updatedAt": date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts, "createdBy": author.id, "topics": await add_topics_follower(entry, storage, author), - "body": extract_html(entry, cleanup=True) + "body": extract_html(entry, cleanup=True), } # main topic patch @@ -184,7 +190,9 @@ async def migrate(entry, storage): # udpate data shout_dict = shout.dict() - shout_dict["authors"] = [author.dict(), ] + shout_dict["authors"] = [ + author.dict(), + ] # shout topics aftermath shout_dict["topics"] = await topics_aftermath(r, storage) @@ -193,7 +201,9 @@ async def migrate(entry, storage): await content_ratings_to_reactions(entry, shout_dict["slug"]) # shout views - await ViewedStorage.increment(shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours') + await ViewedStorage.increment( + shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours' + ) # del shout_dict['ratings'] storage["shouts"]["by_oid"][entry["_id"]] = shout_dict @@ -205,7 +215,9 @@ async def add_topics_follower(entry, storage, user): topics = set([]) category = entry.get("category") topics_by_oid = storage["topics"]["by_oid"] - oids = [category, ] + entry.get("tags", []) + oids = [ + category, + ] + entry.get("tags", []) for toid in oids: tslug = topics_by_oid.get(toid, {}).get("slug") if tslug: @@ -217,19 +229,14 @@ async def add_topics_follower(entry, storage, user): try: tpc = session.query(Topic).where(Topic.slug == tpcslug).first() if tpc: - tf = session.query( - TopicFollower - ).where( - TopicFollower.follower == user.id - ).filter( - TopicFollower.topic == tpc.id - ).first() + tf = ( + session.query(TopicFollower) + .where(TopicFollower.follower == user.id) + .filter(TopicFollower.topic == tpc.id) + .first() + ) if not tf: - tf = TopicFollower.create( - topic=tpc.id, - follower=user.id, - auto=True - ) + tf = TopicFollower.create(topic=tpc.id, follower=user.id, auto=True) session.add(tf) session.commit() except IntegrityError: @@ -295,10 +302,7 @@ async def resolve_create_shout(shout_dict): for key in shout_dict: if key in s.__dict__: if s.__dict__[key] != shout_dict[key]: - print( - "[migration] shout already exists, but differs in %s" - % key - ) + print("[migration] shout already exists, but differs in %s" % key) bump = True else: print("[migration] shout already exists, but lacks %s" % key) @@ -344,9 +348,7 @@ async def topics_aftermath(entry, storage): ) if not shout_topic_new: try: - ShoutTopic.create( - **{"shout": shout.id, "topic": new_topic.id} - ) + ShoutTopic.create(**{"shout": shout.id, "topic": new_topic.id}) except Exception: print("[migration] shout topic error: " + newslug) session.commit() @@ -363,9 +365,7 @@ async def content_ratings_to_reactions(entry, slug): with local_session() as session: for content_rating in entry.get("ratings", []): rater = ( - session.query(User) - .filter(User.oid == content_rating["createdBy"]) - .first() + session.query(User).filter(User.oid == content_rating["createdBy"]).first() ) or User.default_user shout = session.query(Shout).where(Shout.slug == slug).first() cts = content_rating.get("createdAt") @@ -375,7 +375,7 @@ async def content_ratings_to_reactions(entry, slug): if content_rating["value"] > 0 else ReactionKind.DISLIKE, "createdBy": rater.id, - "shout": shout.id + "shout": shout.id, } reaction = ( session.query(Reaction) diff --git a/migration/tables/remarks.py b/migration/tables/remarks.py index 026b95c6..09957ed4 100644 --- a/migration/tables/remarks.py +++ b/migration/tables/remarks.py @@ -12,27 +12,19 @@ def migrate(entry, storage): print(shout_dict['body']) remark = { "shout": shout_dict['id'], - "body": extract_md( - html2text(entry['body']), - shout_dict - ), - "kind": ReactionKind.REMARK + "body": extract_md(html2text(entry['body']), shout_dict), + "kind": ReactionKind.REMARK, } if entry.get('textBefore'): - remark['range'] = str( - shout_dict['body'] - .index( - entry['textBefore'] or '' - ) - ) + ':' + str( - shout_dict['body'] - .index( - entry['textAfter'] or '' - ) + len( - entry['textAfter'] or '' - ) + remark['range'] = ( + str(shout_dict['body'].index(entry['textBefore'] or '')) + + ':' + + str( + shout_dict['body'].index(entry['textAfter'] or '') + + len(entry['textAfter'] or '') ) + ) with local_session() as session: rmrk = Reaction.create(**remark) diff --git a/migration/tables/topics.py b/migration/tables/topics.py index 17804376..ae9ddbda 100644 --- a/migration/tables/topics.py +++ b/migration/tables/topics.py @@ -10,7 +10,7 @@ def migrate(entry): "slug": entry["slug"], "oid": entry["_id"], "title": entry["title"].replace(" ", " "), - "body": extract_md(html2text(body_orig)) + "body": extract_md(html2text(body_orig)), } with local_session() as session: diff --git a/migration/tables/users.py b/migration/tables/users.py index 3ccf9029..46f2e825 100644 --- a/migration/tables/users.py +++ b/migration/tables/users.py @@ -23,7 +23,7 @@ def migrate(entry): "muted": False, # amnesty "links": [], "name": "anonymous", - "password": entry["services"]["password"].get("bcrypt") + "password": entry["services"]["password"].get("bcrypt"), } if "updatedAt" in entry: @@ -35,7 +35,11 @@ def migrate(entry): slug = entry["profile"].get("path").lower() slug = re.sub('[^0-9a-zA-Z]+', '-', slug).strip() user_dict["slug"] = slug - bio = (entry.get("profile", {"bio": ""}).get("bio") or "").replace('\(', '(').replace('\)', ')') + bio = ( + (entry.get("profile", {"bio": ""}).get("bio") or "") + .replace('\(', '(') + .replace('\)', ')') + ) bio_text = BeautifulSoup(bio, features="lxml").text if len(bio_text) > 120: @@ -46,8 +50,7 @@ def migrate(entry): # userpic try: user_dict["userpic"] = ( - "https://images.discours.io/unsafe/" - + entry["profile"]["thumborId"] + "https://images.discours.io/unsafe/" + entry["profile"]["thumborId"] ) except KeyError: try: @@ -62,11 +65,7 @@ def migrate(entry): name = (name + " " + ln) if ln else name if not name: name = slug if slug else "anonymous" - name = ( - entry["profile"]["path"].lower().strip().replace(" ", "-") - if len(name) < 2 - else name - ) + name = entry["profile"]["path"].lower().strip().replace(" ", "-") if len(name) < 2 else name user_dict["name"] = name # links @@ -95,9 +94,7 @@ def migrate(entry): except IntegrityError: print("[migration] cannot create user " + user_dict["slug"]) with local_session() as session: - old_user = ( - session.query(User).filter(User.slug == user_dict["slug"]).first() - ) + old_user = session.query(User).filter(User.slug == user_dict["slug"]).first() old_user.oid = oid old_user.password = user_dict["password"] session.commit() @@ -114,7 +111,7 @@ def post_migrate(): "slug": "old-discours", "username": "old-discours", "email": "old@discours.io", - "name": "Просмотры на старой версии сайта" + "name": "Просмотры на старой версии сайта", } with local_session() as session: @@ -148,11 +145,7 @@ def migrate_2stage(entry, id_map): user_rating = UserRating.create(**user_rating_dict) if user_rating_dict['value'] > 0: - af = AuthorFollower.create( - author=user.id, - follower=rater.id, - auto=True - ) + af = AuthorFollower.create(author=user.id, follower=rater.id, auto=True) session.add(af) session.add(user_rating) session.commit() diff --git a/orm/__init__.py b/orm/__init__.py index 53b13951..9f66f85c 100644 --- a/orm/__init__.py +++ b/orm/__init__.py @@ -1,7 +1,7 @@ from base.orm import Base, engine from orm.community import Community from orm.notification import Notification -from orm.rbac import Operation, Resource, Permission, Role +from orm.rbac import Operation, Permission, Resource, Role from orm.reaction import Reaction from orm.shout import Shout from orm.topic import Topic, TopicFollower @@ -32,5 +32,5 @@ __all__ = [ "Notification", "Reaction", "UserRating", - "init_tables" + "init_tables", ] diff --git a/orm/community.py b/orm/community.py index b55b857f..7045e1aa 100644 --- a/orm/community.py +++ b/orm/community.py @@ -1,6 +1,7 @@ from datetime import datetime -from sqlalchemy import Column, String, ForeignKey, DateTime +from sqlalchemy import Column, DateTime, ForeignKey, String + from base.orm import Base, local_session @@ -10,9 +11,7 @@ class CommunityFollower(Base): id = None # type: ignore follower = Column(ForeignKey("user.id"), primary_key=True) community = Column(ForeignKey("community.id"), primary_key=True) - joinedAt = Column( - DateTime, nullable=False, default=datetime.now, comment="Created at" - ) + joinedAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at") # role = Column(ForeignKey(Role.id), nullable=False, comment="Role for member") @@ -23,16 +22,12 @@ class Community(Base): slug = Column(String, nullable=False, unique=True, comment="Slug") desc = Column(String, nullable=False, default="") pic = Column(String, nullable=False, default="") - createdAt = Column( - DateTime, nullable=False, default=datetime.now, comment="Created at" - ) + createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at") @staticmethod def init_table(): with local_session() as session: - d = ( - session.query(Community).filter(Community.slug == "discours").first() - ) + d = session.query(Community).filter(Community.slug == "discours").first() if not d: d = Community.create(name="Дискурс", slug="discours") session.add(d) diff --git a/orm/notification.py b/orm/notification.py index 25f4e4f3..a838ce6b 100644 --- a/orm/notification.py +++ b/orm/notification.py @@ -1,9 +1,10 @@ from datetime import datetime -from sqlalchemy import Column, Enum, ForeignKey, DateTime, Boolean, Integer +from enum import Enum as Enumeration + +from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer from sqlalchemy.dialects.postgresql import JSONB from base.orm import Base -from enum import Enum as Enumeration class NotificationType(Enumeration): diff --git a/orm/rbac.py b/orm/rbac.py index 29ade72e..80914949 100644 --- a/orm/rbac.py +++ b/orm/rbac.py @@ -1,9 +1,9 @@ import warnings -from sqlalchemy import String, Column, ForeignKey, UniqueConstraint, TypeDecorator +from sqlalchemy import Column, ForeignKey, String, TypeDecorator, UniqueConstraint from sqlalchemy.orm import relationship -from base.orm import Base, REGISTRY, engine, local_session +from base.orm import REGISTRY, Base, engine, local_session # Role Based Access Control # @@ -121,16 +121,23 @@ class Operation(Base): class Resource(Base): __tablename__ = "resource" - resourceClass = Column( - String, nullable=False, unique=True, comment="Resource class" - ) + resourceClass = Column(String, nullable=False, unique=True, comment="Resource class") name = Column(String, nullable=False, unique=True, comment="Resource name") # TODO: community = Column(ForeignKey()) @staticmethod def init_table(): with local_session() as session: - for res in ["shout", "topic", "reaction", "chat", "message", "invite", "community", "user"]: + for res in [ + "shout", + "topic", + "reaction", + "chat", + "message", + "invite", + "community", + "user", + ]: r = session.query(Resource).filter(Resource.name == res).first() if not r: r = Resource.create(name=res, resourceClass=res) @@ -145,9 +152,7 @@ class Permission(Base): {"extend_existing": True}, ) - role = Column( - ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role" - ) + role = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role") operation = Column( ForeignKey("operation.id", ondelete="CASCADE"), nullable=False, diff --git a/orm/reaction.py b/orm/reaction.py index 1c129e23..f3680b6d 100644 --- a/orm/reaction.py +++ b/orm/reaction.py @@ -27,18 +27,14 @@ class ReactionKind(Enumeration): class Reaction(Base): __tablename__ = "reaction" body = Column(String, nullable=True, comment="Reaction Body") - createdAt = Column( - DateTime, nullable=False, default=datetime.now, comment="Created at" - ) + createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at") createdBy = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender") updatedAt = Column(DateTime, nullable=True, comment="Updated at") updatedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor") deletedAt = Column(DateTime, nullable=True, comment="Deleted at") deletedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by") shout = Column(ForeignKey("shout.id"), nullable=False, index=True) - replyTo = Column( - ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID" - ) + replyTo = Column(ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID") range = Column(String, nullable=True, comment="Range in format :") kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind") oid = Column(String, nullable=True, comment="Old ID") diff --git a/orm/shout.py b/orm/shout.py index 22381d4c..0d980b8a 100644 --- a/orm/shout.py +++ b/orm/shout.py @@ -1,6 +1,6 @@ from datetime import datetime -from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, JSON +from sqlalchemy import JSON, Boolean, Column, DateTime, ForeignKey, Integer, String from sqlalchemy.orm import column_property, relationship from base.orm import Base, local_session @@ -24,9 +24,7 @@ class ShoutReactionsFollower(Base): follower = Column(ForeignKey("user.id"), primary_key=True, index=True) shout = Column(ForeignKey("shout.id"), primary_key=True, index=True) auto = Column(Boolean, nullable=False, default=False) - createdAt = Column( - DateTime, nullable=False, default=datetime.now, comment="Created at" - ) + createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at") deletedAt = Column(DateTime, nullable=True) @@ -83,12 +81,7 @@ class Shout(Base): with local_session() as session: s = session.query(Shout).first() if not s: - entry = { - "slug": "genesis-block", - "body": "", - "title": "Ничего", - "lang": "ru" - } + entry = {"slug": "genesis-block", "body": "", "title": "Ничего", "lang": "ru"} s = Shout.create(**entry) session.add(s) session.commit() diff --git a/orm/topic.py b/orm/topic.py index a37dc69a..b0d7cc01 100644 --- a/orm/topic.py +++ b/orm/topic.py @@ -11,9 +11,7 @@ class TopicFollower(Base): id = None # type: ignore follower = Column(ForeignKey("user.id"), primary_key=True, index=True) topic = Column(ForeignKey("topic.id"), primary_key=True, index=True) - createdAt = Column( - DateTime, nullable=False, default=datetime.now, comment="Created at" - ) + createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at") auto = Column(Boolean, nullable=False, default=False) @@ -24,7 +22,5 @@ class Topic(Base): title = Column(String, nullable=False, comment="Title") body = Column(String, nullable=True, comment="Body") pic = Column(String, nullable=True, comment="Picture") - community = Column( - ForeignKey("community.id"), default=1, comment="Community" - ) + community = Column(ForeignKey("community.id"), default=1, comment="Community") oid = Column(String, nullable=True, comment="Old ID") diff --git a/orm/user.py b/orm/user.py index 5aeab90e..d10be411 100644 --- a/orm/user.py +++ b/orm/user.py @@ -3,6 +3,7 @@ from datetime import datetime from sqlalchemy import JSON as JSONType from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String from sqlalchemy.orm import relationship + from base.orm import Base, local_session from orm.rbac import Role @@ -34,9 +35,7 @@ class AuthorFollower(Base): id = None # type: ignore follower = Column(ForeignKey("user.id"), primary_key=True, index=True) author = Column(ForeignKey("user.id"), primary_key=True, index=True) - createdAt = Column( - DateTime, nullable=False, default=datetime.now, comment="Created at" - ) + createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at") auto = Column(Boolean, nullable=False, default=False) @@ -54,12 +53,8 @@ class User(Base): slug = Column(String, unique=True, comment="User's slug") muted = Column(Boolean, default=False) emailConfirmed = Column(Boolean, default=False) - createdAt = Column( - DateTime, nullable=False, default=datetime.now, comment="Created at" - ) - lastSeen = Column( - DateTime, nullable=False, default=datetime.now, comment="Was online at" - ) + createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at") + lastSeen = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at") deletedAt = Column(DateTime, nullable=True, comment="Deleted at") links = Column(JSONType, nullable=True, comment="Links") oauth = Column(String, nullable=True) diff --git a/requirements-dev.txt b/requirements-dev.txt index d221f3b0..b2e99a01 100755 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,3 +2,4 @@ isort brunette flake8 mypy +pre-commit diff --git a/resetdb.sh b/resetdb.sh index 39b3b9b2..40ba2e37 100755 --- a/resetdb.sh +++ b/resetdb.sh @@ -53,4 +53,3 @@ echo "Start migration" python3 server.py migrate if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi echo 'Done!' - diff --git a/resolvers/__init__.py b/resolvers/__init__.py index 5d753ac4..78ae7e22 100644 --- a/resolvers/__init__.py +++ b/resolvers/__init__.py @@ -1,67 +1,35 @@ from resolvers.auth import ( - login, - sign_out, - is_email_used, - register_by_email, - confirm_email, auth_send_link, + confirm_email, get_current_user, + is_email_used, + login, + register_by_email, + sign_out, ) - -from resolvers.create.migrate import markdown_body from resolvers.create.editor import create_shout, delete_shout, update_shout - -from resolvers.zine.profile import ( - load_authors_by, - rate_user, - update_profile, - get_authors_all -) - +from resolvers.create.migrate import markdown_body +from resolvers.inbox.chats import create_chat, delete_chat, update_chat +from resolvers.inbox.load import load_chats, load_messages_by, load_recipients +from resolvers.inbox.messages import create_message, delete_message, mark_as_read, update_message +from resolvers.inbox.search import search_recipients +from resolvers.notifications import load_notifications +from resolvers.zine.following import follow, unfollow +from resolvers.zine.load import load_shout, load_shouts_by +from resolvers.zine.profile import get_authors_all, load_authors_by, rate_user, update_profile from resolvers.zine.reactions import ( create_reaction, delete_reaction, - update_reaction, - reactions_unfollow, + load_reactions_by, reactions_follow, - load_reactions_by + reactions_unfollow, + update_reaction, ) from resolvers.zine.topics import ( + get_topic, topic_follow, topic_unfollow, + topics_all, topics_by_author, topics_by_community, - topics_all, - get_topic ) - -from resolvers.zine.following import ( - follow, - unfollow -) - -from resolvers.zine.load import ( - load_shout, - load_shouts_by -) - -from resolvers.inbox.chats import ( - create_chat, - delete_chat, - update_chat - -) -from resolvers.inbox.messages import ( - create_message, - delete_message, - update_message, - mark_as_read -) -from resolvers.inbox.load import ( - load_chats, - load_messages_by, - load_recipients -) -from resolvers.inbox.search import search_recipients - -from resolvers.notifications import load_notifications diff --git a/resolvers/auth.py b/resolvers/auth.py index 17369b7a..c28898e3 100644 --- a/resolvers/auth.py +++ b/resolvers/auth.py @@ -1,24 +1,30 @@ # -*- coding: utf-8 -*- +import re from datetime import datetime, timezone from urllib.parse import quote_plus from graphql.type import GraphQLResolveInfo from starlette.responses import RedirectResponse from transliterate import translit -import re + from auth.authenticate import login_required from auth.credentials import AuthCredentials from auth.email import send_auth_email from auth.identity import Identity, Password from auth.jwtcodec import JWTCodec from auth.tokenstorage import TokenStorage -from base.exceptions import (BaseHttpException, InvalidPassword, InvalidToken, - ObjectNotExist, Unauthorized) +from base.exceptions import ( + BaseHttpException, + InvalidPassword, + InvalidToken, + ObjectNotExist, + Unauthorized, +) from base.orm import local_session from base.resolvers import mutation, query from orm import Role, User -from settings import SESSION_TOKEN_HEADER, FRONTEND_URL +from settings import FRONTEND_URL, SESSION_TOKEN_HEADER @mutation.field("getSession") @@ -32,10 +38,7 @@ async def get_current_user(_, info): user.lastSeen = datetime.now(tz=timezone.utc) session.commit() - return { - "token": token, - "user": user - } + return {"token": token, "user": user} @mutation.field("confirmEmail") @@ -53,10 +56,7 @@ async def confirm_email(_, info, token): user.lastSeen = datetime.now(tz=timezone.utc) session.add(user) session.commit() - return { - "token": session_token, - "user": user - } + return {"token": session_token, "user": user} except InvalidToken as e: raise InvalidToken(e.message) except Exception as e: @@ -122,7 +122,7 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str "email": email, "username": email, # will be used to store phone number or some messenger network id "name": name, - "slug": slug + "slug": slug, } if password: user_dict["password"] = Password.encode(password) @@ -172,10 +172,7 @@ async def login(_, info, email: str, password: str = "", lang: str = "ru"): user = Identity.password(orm_user, password) session_token = await TokenStorage.create_session(user) print(f"[auth] user {email} authorized") - return { - "token": session_token, - "user": user - } + return {"token": session_token, "user": user} except InvalidPassword: print(f"[auth] {email}: invalid password") raise InvalidPassword("invalid password") # contains webserver status diff --git a/resolvers/create/editor.py b/resolvers/create/editor.py index c81ff404..d6db8bf6 100644 --- a/resolvers/create/editor.py +++ b/resolvers/create/editor.py @@ -20,19 +20,21 @@ async def create_shout(_, info, inp): with local_session() as session: topics = session.query(Topic).filter(Topic.slug.in_(inp.get('topics', []))).all() - new_shout = Shout.create(**{ - "title": inp.get("title"), - "subtitle": inp.get('subtitle'), - "lead": inp.get('lead'), - "description": inp.get('description'), - "body": inp.get("body", ''), - "layout": inp.get("layout"), - "authors": inp.get("authors", []), - "slug": inp.get("slug"), - "mainTopic": inp.get("mainTopic"), - "visibility": "owner", - "createdBy": auth.user_id - }) + new_shout = Shout.create( + **{ + "title": inp.get("title"), + "subtitle": inp.get('subtitle'), + "lead": inp.get('lead'), + "description": inp.get('description'), + "body": inp.get("body", ''), + "layout": inp.get("layout"), + "authors": inp.get("authors", []), + "slug": inp.get("slug"), + "mainTopic": inp.get("mainTopic"), + "visibility": "owner", + "createdBy": auth.user_id, + } + ) for topic in topics: t = ShoutTopic.create(topic=topic.id, shout=new_shout.id) @@ -64,10 +66,15 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False): auth: AuthCredentials = info.context["request"].auth with local_session() as session: - shout = session.query(Shout).options( - joinedload(Shout.authors), - joinedload(Shout.topics), - ).filter(Shout.id == shout_id).first() + shout = ( + session.query(Shout) + .options( + joinedload(Shout.authors), + joinedload(Shout.topics), + ) + .filter(Shout.id == shout_id) + .first() + ) if not shout: return {"error": "shout not found"} @@ -94,25 +101,34 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False): session.commit() for new_topic_to_link in new_topics_to_link: - created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=new_topic_to_link.id) + created_unlinked_topic = ShoutTopic.create( + shout=shout.id, topic=new_topic_to_link.id + ) session.add(created_unlinked_topic) - existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0] - existing_topic_to_link_ids = [existing_topic_input["id"] for existing_topic_input in existing_topics_input - if existing_topic_input["id"] not in [topic.id for topic in shout.topics]] + existing_topics_input = [ + topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0 + ] + existing_topic_to_link_ids = [ + existing_topic_input["id"] + for existing_topic_input in existing_topics_input + if existing_topic_input["id"] not in [topic.id for topic in shout.topics] + ] for existing_topic_to_link_id in existing_topic_to_link_ids: - created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=existing_topic_to_link_id) + created_unlinked_topic = ShoutTopic.create( + shout=shout.id, topic=existing_topic_to_link_id + ) session.add(created_unlinked_topic) - topic_to_unlink_ids = [topic.id for topic in shout.topics - if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]] + topic_to_unlink_ids = [ + topic.id + for topic in shout.topics + if topic.id not in [topic_input["id"] for topic_input in existing_topics_input] + ] shout_topics_to_remove = session.query(ShoutTopic).filter( - and_( - ShoutTopic.shout == shout.id, - ShoutTopic.topic.in_(topic_to_unlink_ids) - ) + and_(ShoutTopic.shout == shout.id, ShoutTopic.topic.in_(topic_to_unlink_ids)) ) for shout_topic_to_remove in shout_topics_to_remove: diff --git a/resolvers/create/migrate.py b/resolvers/create/migrate.py index f16341f0..9e849f86 100644 --- a/resolvers/create/migrate.py +++ b/resolvers/create/migrate.py @@ -1,7 +1,6 @@ - from base.resolvers import query -from resolvers.auth import login_required from migration.extract import extract_md +from resolvers.auth import login_required @login_required diff --git a/resolvers/inbox/chats.py b/resolvers/inbox/chats.py index 853defab..a589e870 100644 --- a/resolvers/inbox/chats.py +++ b/resolvers/inbox/chats.py @@ -24,27 +24,24 @@ async def update_chat(_, info, chat_new: Chat): chat_id = chat_new["id"] chat = await redis.execute("GET", f"chats/{chat_id}") if not chat: - return { - "error": "chat not exist" - } + return {"error": "chat not exist"} chat = dict(json.loads(chat)) # TODO if auth.user_id in chat["admins"]: - chat.update({ - "title": chat_new.get("title", chat["title"]), - "description": chat_new.get("description", chat["description"]), - "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()), - "admins": chat_new.get("admins", chat.get("admins") or []), - "users": chat_new.get("users", chat["users"]) - }) + chat.update( + { + "title": chat_new.get("title", chat["title"]), + "description": chat_new.get("description", chat["description"]), + "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()), + "admins": chat_new.get("admins", chat.get("admins") or []), + "users": chat_new.get("users", chat["users"]), + } + ) await redis.execute("SET", f"chats/{chat.id}", json.dumps(chat)) await redis.execute("COMMIT") - return { - "error": None, - "chat": chat - } + return {"error": None, "chat": chat} @mutation.field("createChat") @@ -79,10 +76,7 @@ async def create_chat(_, info, title="", members=[]): print(chat) break if chat: - return { - "chat": chat, - "error": "existed" - } + return {"chat": chat, "error": "existed"} chat_id = str(uuid.uuid4()) chat = { @@ -92,7 +86,7 @@ async def create_chat(_, info, title="", members=[]): "createdBy": auth.user_id, "createdAt": int(datetime.now(tz=timezone.utc).timestamp()), "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()), - "admins": members if (len(members) == 2 and title == "") else [] + "admins": members if (len(members) == 2 and title == "") else [], } for m in members: @@ -100,10 +94,7 @@ async def create_chat(_, info, title="", members=[]): await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat)) await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0)) await redis.execute("COMMIT") - return { - "error": None, - "chat": chat - } + return {"error": None, "chat": chat} @mutation.field("deleteChat") @@ -119,6 +110,4 @@ async def delete_chat(_, info, chat_id: str): await redis.execute("SREM", "chats_by_user/" + str(auth.user_id), chat_id) await redis.execute("COMMIT") else: - return { - "error": "chat not exist" - } + return {"error": "chat not exist"} diff --git a/resolvers/inbox/load.py b/resolvers/inbox/load.py index a0d41721..43f8a07c 100644 --- a/resolvers/inbox/load.py +++ b/resolvers/inbox/load.py @@ -1,28 +1,27 @@ import json -# from datetime import datetime, timedelta, timezone from auth.authenticate import login_required from auth.credentials import AuthCredentials -from base.redis import redis from base.orm import local_session +from base.redis import redis from base.resolvers import query from orm.user import User from resolvers.zine.profile import followed_authors + from .unread import get_unread_counter +# from datetime import datetime, timedelta, timezone + async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]): - ''' load :limit messages for :chat_id with :offset ''' + '''load :limit messages for :chat_id with :offset''' messages = [] message_ids = [] if ids: message_ids += ids try: if limit: - mids = await redis.lrange(f"chats/{chat_id}/message_ids", - offset, - offset + limit - ) + mids = await redis.lrange(f"chats/{chat_id}/message_ids", offset, offset + limit) mids = [mid.decode("utf-8") for mid in mids] message_ids += mids except Exception as e: @@ -46,12 +45,12 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]): @query.field("loadChats") @login_required async def load_chats(_, info, limit: int = 50, offset: int = 0): - """ load :limit chats of current user with :offset """ + """load :limit chats of current user with :offset""" auth: AuthCredentials = info.context["request"].auth cids = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id)) if cids: - cids = list(cids)[offset:offset + limit] + cids = list(cids)[offset : offset + limit] if not cids: print('[inbox.load] no chats were found') cids = [] @@ -71,25 +70,24 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0): for uid in c["users"]: a = session.query(User).where(User.id == uid).first() if a: - c['members'].append({ - "id": a.id, - "slug": a.slug, - "userpic": a.userpic, - "name": a.name, - "lastSeen": a.lastSeen, - "online": a.id in onliners - }) + c['members'].append( + { + "id": a.id, + "slug": a.slug, + "userpic": a.userpic, + "name": a.name, + "lastSeen": a.lastSeen, + "online": a.id in onliners, + } + ) chats.append(c) - return { - "chats": chats, - "error": None - } + return {"chats": chats, "error": None} @query.field("loadMessagesBy") @login_required async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0): - ''' load :limit messages of :chat_id with :offset ''' + '''load :limit messages of :chat_id with :offset''' auth: AuthCredentials = info.context["request"].auth userchats = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id)) @@ -103,23 +101,12 @@ async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0): chat = await redis.execute("GET", f"chats/{by_chat}") # print(chat) if not chat: - return { - "messages": [], - "error": "chat not exist" - } + return {"messages": [], "error": "chat not exist"} # everyone's messages in filtered chat messages = await load_messages(by_chat, limit, offset) - return { - "messages": sorted( - list(messages), - key=lambda m: m['createdAt'] - ), - "error": None - } + return {"messages": sorted(list(messages), key=lambda m: m['createdAt']), "error": None} else: - return { - "error": "Cannot access messages of this chat" - } + return {"error": "Cannot access messages of this chat"} @query.field("loadRecipients") @@ -138,15 +125,14 @@ async def load_recipients(_, info, limit=50, offset=0): chat_users += session.query(User).where(User.emailConfirmed).limit(limit).offset(offset) members = [] for a in chat_users: - members.append({ - "id": a.id, - "slug": a.slug, - "userpic": a.userpic, - "name": a.name, - "lastSeen": a.lastSeen, - "online": a.id in onliners - }) - return { - "members": members, - "error": None - } + members.append( + { + "id": a.id, + "slug": a.slug, + "userpic": a.userpic, + "name": a.name, + "lastSeen": a.lastSeen, + "online": a.id in onliners, + } + ) + return {"members": members, "error": None} diff --git a/resolvers/inbox/messages.py b/resolvers/inbox/messages.py index 56187edf..3d35105a 100644 --- a/resolvers/inbox/messages.py +++ b/resolvers/inbox/messages.py @@ -1,27 +1,27 @@ import asyncio import json -from typing import Any from datetime import datetime, timezone +from typing import Any + from graphql.type import GraphQLResolveInfo + from auth.authenticate import login_required from auth.credentials import AuthCredentials from base.redis import redis from base.resolvers import mutation -from services.following import FollowingManager, FollowingResult, Following +from services.following import Following, FollowingManager, FollowingResult from validations.inbox import Message @mutation.field("createMessage") @login_required async def create_message(_, info, chat: str, body: str, replyTo=None): - """ create message with :body for :chat_id replying to :replyTo optionally """ + """create message with :body for :chat_id replying to :replyTo optionally""" auth: AuthCredentials = info.context["request"].auth chat = await redis.execute("GET", f"chats/{chat}") if not chat: - return { - "error": "chat is not exist" - } + return {"error": "chat is not exist"} else: chat = dict(json.loads(chat)) message_id = await redis.execute("GET", f"chats/{chat['id']}/next_message_id") @@ -31,7 +31,7 @@ async def create_message(_, info, chat: str, body: str, replyTo=None): "id": message_id, "author": auth.user_id, "body": body, - "createdAt": int(datetime.now(tz=timezone.utc).timestamp()) + "createdAt": int(datetime.now(tz=timezone.utc).timestamp()), } if replyTo: new_message['replyTo'] = replyTo @@ -46,17 +46,12 @@ async def create_message(_, info, chat: str, body: str, replyTo=None): users = chat["users"] for user_slug in users: - await redis.execute( - "LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id) - ) + await redis.execute("LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id)) result = FollowingResult("NEW", 'chat', new_message) await FollowingManager.push('chat', result) - return { - "message": new_message, - "error": None - } + return {"message": new_message, "error": None} @mutation.field("updateMessage") @@ -84,10 +79,7 @@ async def update_message(_, info, chat_id: str, message_id: int, body: str): result = FollowingResult("UPDATED", 'chat', message) await FollowingManager.push('chat', result) - return { - "message": message, - "error": None - } + return {"message": message, "error": None} @mutation.field("deleteMessage") @@ -137,6 +129,4 @@ async def mark_as_read(_, info, chat_id: str, messages: [int]): for message_id in messages: await redis.execute("LREM", f"chats/{chat_id}/unread/{auth.user_id}", 0, str(message_id)) - return { - "error": None - } + return {"error": None} diff --git a/resolvers/inbox/search.py b/resolvers/inbox/search.py index 1ca340e5..8a3f0c2d 100644 --- a/resolvers/inbox/search.py +++ b/resolvers/inbox/search.py @@ -1,10 +1,11 @@ import json -from datetime import datetime, timezone, timedelta +from datetime import datetime, timedelta, timezone + from auth.authenticate import login_required from auth.credentials import AuthCredentials +from base.orm import local_session from base.redis import redis from base.resolvers import query -from base.orm import local_session from orm.user import AuthorFollower, User from resolvers.inbox.load import load_messages @@ -17,7 +18,7 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int = auth: AuthCredentials = info.context["request"].auth talk_before = await redis.execute("GET", f"/chats_by_user/{auth.user_id}") if talk_before: - talk_before = list(json.loads(talk_before))[offset:offset + limit] + talk_before = list(json.loads(talk_before))[offset : offset + limit] for chat_id in talk_before: members = await redis.execute("GET", f"/chats/{chat_id}/users") if members: @@ -31,23 +32,24 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int = with local_session() as session: # followings - result += session.query(AuthorFollower.author).join( - User, User.id == AuthorFollower.follower - ).where( - User.slug.startswith(query) - ).offset(offset + len(result)).limit(more_amount) + result += ( + session.query(AuthorFollower.author) + .join(User, User.id == AuthorFollower.follower) + .where(User.slug.startswith(query)) + .offset(offset + len(result)) + .limit(more_amount) + ) more_amount = limit # followers - result += session.query(AuthorFollower.follower).join( - User, User.id == AuthorFollower.author - ).where( - User.slug.startswith(query) - ).offset(offset + len(result)).limit(offset + len(result) + limit) - return { - "members": list(result), - "error": None - } + result += ( + session.query(AuthorFollower.follower) + .join(User, User.id == AuthorFollower.author) + .where(User.slug.startswith(query)) + .offset(offset + len(result)) + .limit(offset + len(result) + limit) + ) + return {"members": list(result), "error": None} @query.field("searchMessages") @@ -83,13 +85,12 @@ async def search_user_chats(by, messages, user_id: int, limit, offset): days = by.get("days") if days: - messages.extend(filter( - list(messages), - key=lambda m: ( - datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"]) + messages.extend( + filter( + list(messages), + key=lambda m: ( + datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"]) + ), ) - )) - return { - "messages": messages, - "error": None - } + ) + return {"messages": messages, "error": None} diff --git a/resolvers/notifications.py b/resolvers/notifications.py index 0cfc2244..98314d2e 100644 --- a/resolvers/notifications.py +++ b/resolvers/notifications.py @@ -1,9 +1,9 @@ -from sqlalchemy import select, desc, and_, update +from sqlalchemy import and_, desc, select, update -from auth.credentials import AuthCredentials -from base.resolvers import query, mutation from auth.authenticate import login_required +from auth.credentials import AuthCredentials from base.orm import local_session +from base.resolvers import mutation, query from orm import Notification @@ -19,22 +19,23 @@ async def load_notifications(_, info, params=None): limit = params.get('limit', 50) offset = params.get('offset', 0) - q = select(Notification).where( - Notification.user == user_id - ).order_by(desc(Notification.createdAt)).limit(limit).offset(offset) + q = ( + select(Notification) + .where(Notification.user == user_id) + .order_by(desc(Notification.createdAt)) + .limit(limit) + .offset(offset) + ) notifications = [] with local_session() as session: - total_count = session.query(Notification).where( - Notification.user == user_id - ).count() + total_count = session.query(Notification).where(Notification.user == user_id).count() - total_unread_count = session.query(Notification).where( - and_( - Notification.user == user_id, - Notification.seen == False - ) - ).count() + total_unread_count = ( + session.query(Notification) + .where(and_(Notification.user == user_id, Notification.seen == False)) + .count() + ) for [notification] in session.execute(q): notification.type = notification.type.name @@ -43,7 +44,7 @@ async def load_notifications(_, info, params=None): return { "notifications": notifications, "totalCount": total_count, - "totalUnreadCount": total_unread_count + "totalUnreadCount": total_unread_count, } @@ -54,9 +55,11 @@ async def mark_notification_as_read(_, info, notification_id: int): user_id = auth.user_id with local_session() as session: - notification = session.query(Notification).where( - and_(Notification.id == notification_id, Notification.user == user_id) - ).one() + notification = ( + session.query(Notification) + .where(and_(Notification.id == notification_id, Notification.user == user_id)) + .one() + ) notification.seen = True session.commit() @@ -69,12 +72,11 @@ async def mark_all_notifications_as_read(_, info): auth: AuthCredentials = info.context["request"].auth user_id = auth.user_id - statement = update(Notification).where( - and_( - Notification.user == user_id, - Notification.seen == False - ) - ).values(seen=True) + statement = ( + update(Notification) + .where(and_(Notification.user == user_id, Notification.seen == False)) + .values(seen=True) + ) with local_session() as session: try: diff --git a/resolvers/upload.py b/resolvers/upload.py index 44c7b81c..3eee3358 100644 --- a/resolvers/upload.py +++ b/resolvers/upload.py @@ -2,6 +2,7 @@ import os import shutil import tempfile import uuid + import boto3 from botocore.exceptions import BotoCoreError, ClientError from starlette.responses import JSONResponse @@ -25,10 +26,12 @@ async def upload_handler(request): key = 'files/' + str(uuid.uuid4()) + file_extension # Create an S3 client with Storj configuration - s3 = boto3.client('s3', - aws_access_key_id=STORJ_ACCESS_KEY, - aws_secret_access_key=STORJ_SECRET_KEY, - endpoint_url=STORJ_END_POINT) + s3 = boto3.client( + 's3', + aws_access_key_id=STORJ_ACCESS_KEY, + aws_secret_access_key=STORJ_SECRET_KEY, + endpoint_url=STORJ_END_POINT, + ) try: # Save the uploaded file to a temporary file @@ -39,9 +42,7 @@ async def upload_handler(request): Filename=tmp_file.name, Bucket=STORJ_BUCKET_NAME, Key=key, - ExtraArgs={ - "ContentType": file.content_type - } + ExtraArgs={"ContentType": file.content_type}, ) url = 'https://' + CDN_DOMAIN + '/' + key @@ -51,6 +52,3 @@ async def upload_handler(request): except (BotoCoreError, ClientError) as e: print(e) return JSONResponse({'error': 'Failed to upload file'}, status_code=500) - - - diff --git a/resolvers/zine/following.py b/resolvers/zine/following.py index 99481571..24935d5e 100644 --- a/resolvers/zine/following.py +++ b/resolvers/zine/following.py @@ -1,17 +1,20 @@ import asyncio -from base.orm import local_session -from base.resolvers import mutation + +from graphql.type import GraphQLResolveInfo + from auth.authenticate import login_required from auth.credentials import AuthCredentials +from base.orm import local_session +from base.resolvers import mutation +from orm.shout import ShoutReactionsFollower +from orm.topic import TopicFollower + # from resolvers.community import community_follow, community_unfollow from orm.user import AuthorFollower -from orm.topic import TopicFollower -from orm.shout import ShoutReactionsFollower from resolvers.zine.profile import author_follow, author_unfollow from resolvers.zine.reactions import reactions_follow, reactions_unfollow from resolvers.zine.topics import topic_follow, topic_unfollow from services.following import Following, FollowingManager, FollowingResult -from graphql.type import GraphQLResolveInfo @mutation.field("follow") diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py index 4619efa6..06f400fc 100644 --- a/resolvers/zine/load.py +++ b/resolvers/zine/load.py @@ -1,7 +1,7 @@ from datetime import datetime, timedelta, timezone -from sqlalchemy.orm import joinedload, aliased -from sqlalchemy.sql.expression import desc, asc, select, func, case, and_, text, nulls_last +from sqlalchemy.orm import aliased, joinedload +from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select, text from auth.authenticate import login_required from auth.credentials import AuthCredentials @@ -18,32 +18,32 @@ def add_stat_columns(q): aliased_reaction = aliased(Reaction) q = q.outerjoin(aliased_reaction).add_columns( - func.sum( - aliased_reaction.id - ).label('reacted_stat'), + func.sum(aliased_reaction.id).label('reacted_stat'), + func.sum(case((aliased_reaction.kind == ReactionKind.COMMENT, 1), else_=0)).label( + 'commented_stat' + ), func.sum( case( - (aliased_reaction.kind == ReactionKind.COMMENT, 1), - else_=0 + # do not count comments' reactions + (aliased_reaction.replyTo.is_not(None), 0), + (aliased_reaction.kind == ReactionKind.AGREE, 1), + (aliased_reaction.kind == ReactionKind.DISAGREE, -1), + (aliased_reaction.kind == ReactionKind.PROOF, 1), + (aliased_reaction.kind == ReactionKind.DISPROOF, -1), + (aliased_reaction.kind == ReactionKind.ACCEPT, 1), + (aliased_reaction.kind == ReactionKind.REJECT, -1), + (aliased_reaction.kind == ReactionKind.LIKE, 1), + (aliased_reaction.kind == ReactionKind.DISLIKE, -1), + else_=0, ) - ).label('commented_stat'), - func.sum(case( - # do not count comments' reactions - (aliased_reaction.replyTo.is_not(None), 0), - (aliased_reaction.kind == ReactionKind.AGREE, 1), - (aliased_reaction.kind == ReactionKind.DISAGREE, -1), - (aliased_reaction.kind == ReactionKind.PROOF, 1), - (aliased_reaction.kind == ReactionKind.DISPROOF, -1), - (aliased_reaction.kind == ReactionKind.ACCEPT, 1), - (aliased_reaction.kind == ReactionKind.REJECT, -1), - (aliased_reaction.kind == ReactionKind.LIKE, 1), - (aliased_reaction.kind == ReactionKind.DISLIKE, -1), - else_=0) ).label('rating_stat'), - func.max(case( - (aliased_reaction.kind != ReactionKind.COMMENT, None), - else_=aliased_reaction.createdAt - )).label('last_comment')) + func.max( + case( + (aliased_reaction.kind != ReactionKind.COMMENT, None), + else_=aliased_reaction.createdAt, + ) + ).label('last_comment'), + ) return q @@ -87,27 +87,23 @@ async def load_shout(_, info, slug=None, shout_id=None): q = add_stat_columns(q) if slug is not None: - q = q.filter( - Shout.slug == slug - ) + q = q.filter(Shout.slug == slug) if shout_id is not None: - q = q.filter( - Shout.id == shout_id - ) + q = q.filter(Shout.id == shout_id) - q = q.filter( - Shout.deletedAt.is_(None) - ).group_by(Shout.id) + q = q.filter(Shout.deletedAt.is_(None)).group_by(Shout.id) try: - [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(q).first() + [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute( + q + ).first() shout.stat = { "viewed": shout.views, "reacted": reacted_stat, "commented": commented_stat, - "rating": rating_stat + "rating": rating_stat, } for author_caption in session.query(ShoutAuthor).join(Shout).where(Shout.slug == slug): @@ -142,14 +138,13 @@ async def load_shouts_by(_, info, options): :return: Shout[] """ - q = select(Shout).options( - joinedload(Shout.authors), - joinedload(Shout.topics), - ).where( - and_( - Shout.deletedAt.is_(None), - Shout.layout.is_not(None) + q = ( + select(Shout) + .options( + joinedload(Shout.authors), + joinedload(Shout.topics), ) + .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None))) ) q = add_stat_columns(q) @@ -169,13 +164,15 @@ async def load_shouts_by(_, info, options): with local_session() as session: shouts_map = {} - for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique(): + for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute( + q + ).unique(): shouts.append(shout) shout.stat = { "viewed": shout.views, "reacted": reacted_stat, "commented": commented_stat, - "rating": rating_stat + "rating": rating_stat, } shouts_map[shout.id] = shout @@ -188,11 +185,13 @@ async def get_drafts(_, info): auth: AuthCredentials = info.context["request"].auth user_id = auth.user_id - q = select(Shout).options( - joinedload(Shout.authors), - joinedload(Shout.topics), - ).where( - and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id) + q = ( + select(Shout) + .options( + joinedload(Shout.authors), + joinedload(Shout.topics), + ) + .where(and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id)) ) q = q.group_by(Shout.id) @@ -211,24 +210,22 @@ async def get_my_feed(_, info, options): auth: AuthCredentials = info.context["request"].auth user_id = auth.user_id - subquery = select(Shout.id).join( - ShoutAuthor - ).join( - AuthorFollower, AuthorFollower.follower == user_id - ).join( - ShoutTopic - ).join( - TopicFollower, TopicFollower.follower == user_id + subquery = ( + select(Shout.id) + .join(ShoutAuthor) + .join(AuthorFollower, AuthorFollower.follower == user_id) + .join(ShoutTopic) + .join(TopicFollower, TopicFollower.follower == user_id) ) - q = select(Shout).options( - joinedload(Shout.authors), - joinedload(Shout.topics), - ).where( - and_( - Shout.publishedAt.is_not(None), - Shout.deletedAt.is_(None), - Shout.id.in_(subquery) + q = ( + select(Shout) + .options( + joinedload(Shout.authors), + joinedload(Shout.topics), + ) + .where( + and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None), Shout.id.in_(subquery)) ) ) @@ -246,13 +243,15 @@ async def get_my_feed(_, info, options): shouts = [] with local_session() as session: shouts_map = {} - for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique(): + for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute( + q + ).unique(): shouts.append(shout) shout.stat = { "viewed": shout.views, "reacted": reacted_stat, "commented": commented_stat, - "rating": rating_stat + "rating": rating_stat, } shouts_map[shout.id] = shout diff --git a/resolvers/zine/profile.py b/resolvers/zine/profile.py index 552af43f..ecdc26c7 100644 --- a/resolvers/zine/profile.py +++ b/resolvers/zine/profile.py @@ -1,6 +1,7 @@ -from typing import List from datetime import datetime, timedelta, timezone -from sqlalchemy import and_, func, distinct, select, literal +from typing import List + +from sqlalchemy import and_, distinct, func, literal, select from sqlalchemy.orm import aliased, joinedload from auth.authenticate import login_required @@ -55,7 +56,7 @@ def add_stat(author, stat_columns): "followers": followers_stat, "followings": followings_stat, "rating": rating_stat, - "commented": commented_stat + "commented": commented_stat, } return author @@ -119,10 +120,10 @@ async def user_followers(_, _info, slug) -> List[User]: q = add_author_stat_columns(q) aliased_user = aliased(User) - q = q.join(AuthorFollower, AuthorFollower.follower == User.id).join( - aliased_user, aliased_user.id == AuthorFollower.author - ).where( - aliased_user.slug == slug + q = ( + q.join(AuthorFollower, AuthorFollower.follower == User.id) + .join(aliased_user, aliased_user.id == AuthorFollower.author) + .where(aliased_user.slug == slug) ) return get_authors_from_query(q) @@ -150,15 +151,10 @@ async def update_profile(_, info, profile): with local_session() as session: user = session.query(User).filter(User.id == user_id).one() if not user: - return { - "error": "canoot find user" - } + return {"error": "canoot find user"} user.update(profile) session.commit() - return { - "error": None, - "author": user - } + return {"error": None, "author": user} @mutation.field("rateUser") @@ -200,13 +196,10 @@ def author_follow(user_id, slug): def author_unfollow(user_id, slug): with local_session() as session: flw = ( - session.query( - AuthorFollower - ).join(User, User.id == AuthorFollower.author).filter( - and_( - AuthorFollower.follower == user_id, User.slug == slug - ) - ).first() + session.query(AuthorFollower) + .join(User, User.id == AuthorFollower.author) + .filter(and_(AuthorFollower.follower == user_id, User.slug == slug)) + .first() ) if flw: session.delete(flw) @@ -232,12 +225,11 @@ async def get_author(_, _info, slug): [author] = get_authors_from_query(q) with local_session() as session: - comments_count = session.query(Reaction).where( - and_( - Reaction.createdBy == author.id, - Reaction.kind == ReactionKind.COMMENT - ) - ).count() + comments_count = ( + session.query(Reaction) + .where(and_(Reaction.createdBy == author.id, Reaction.kind == ReactionKind.COMMENT)) + .count() + ) author.stat["commented"] = comments_count return author @@ -260,9 +252,7 @@ async def load_authors_by(_, info, by, limit, offset): days_before = datetime.now(tz=timezone.utc) - timedelta(days=by["createdAt"]) q = q.filter(User.createdAt > days_before) - q = q.order_by( - by.get("order", User.createdAt) - ).limit(limit).offset(offset) + q = q.order_by(by.get("order", User.createdAt)).limit(limit).offset(offset) return get_authors_from_query(q) @@ -273,13 +263,13 @@ async def load_my_subscriptions(_, info): auth = info.context["request"].auth user_id = auth.user_id - authors_query = select(User).join(AuthorFollower, AuthorFollower.author == User.id).where( - AuthorFollower.follower == user_id + authors_query = ( + select(User) + .join(AuthorFollower, AuthorFollower.author == User.id) + .where(AuthorFollower.follower == user_id) ) - topics_query = select(Topic).join(TopicFollower).where( - TopicFollower.follower == user_id - ) + topics_query = select(Topic).join(TopicFollower).where(TopicFollower.follower == user_id) topics = [] authors = [] @@ -291,7 +281,4 @@ async def load_my_subscriptions(_, info): for [topic] in session.execute(topics_query): topics.append(topic) - return { - "topics": topics, - "authors": authors - } + return {"topics": topics, "authors": authors} diff --git a/resolvers/zine/reactions.py b/resolvers/zine/reactions.py index 1c132b69..0a37f6c3 100644 --- a/resolvers/zine/reactions.py +++ b/resolvers/zine/reactions.py @@ -1,5 +1,6 @@ from datetime import datetime, timedelta, timezone -from sqlalchemy import and_, asc, desc, select, text, func, case + +from sqlalchemy import and_, asc, case, desc, func, select, text from sqlalchemy.orm import aliased from auth.authenticate import login_required @@ -17,26 +18,22 @@ def add_reaction_stat_columns(q): aliased_reaction = aliased(Reaction) q = q.outerjoin(aliased_reaction, Reaction.id == aliased_reaction.replyTo).add_columns( - func.sum( - aliased_reaction.id - ).label('reacted_stat'), + func.sum(aliased_reaction.id).label('reacted_stat'), + func.sum(case((aliased_reaction.body.is_not(None), 1), else_=0)).label('commented_stat'), func.sum( case( - (aliased_reaction.body.is_not(None), 1), - else_=0 + (aliased_reaction.kind == ReactionKind.AGREE, 1), + (aliased_reaction.kind == ReactionKind.DISAGREE, -1), + (aliased_reaction.kind == ReactionKind.PROOF, 1), + (aliased_reaction.kind == ReactionKind.DISPROOF, -1), + (aliased_reaction.kind == ReactionKind.ACCEPT, 1), + (aliased_reaction.kind == ReactionKind.REJECT, -1), + (aliased_reaction.kind == ReactionKind.LIKE, 1), + (aliased_reaction.kind == ReactionKind.DISLIKE, -1), + else_=0, ) - ).label('commented_stat'), - func.sum(case( - (aliased_reaction.kind == ReactionKind.AGREE, 1), - (aliased_reaction.kind == ReactionKind.DISAGREE, -1), - (aliased_reaction.kind == ReactionKind.PROOF, 1), - (aliased_reaction.kind == ReactionKind.DISPROOF, -1), - (aliased_reaction.kind == ReactionKind.ACCEPT, 1), - (aliased_reaction.kind == ReactionKind.REJECT, -1), - (aliased_reaction.kind == ReactionKind.LIKE, 1), - (aliased_reaction.kind == ReactionKind.DISLIKE, -1), - else_=0) - ).label('rating_stat')) + ).label('rating_stat'), + ) return q @@ -47,17 +44,19 @@ def reactions_follow(user_id, shout_id: int, auto=False): shout = session.query(Shout).where(Shout.id == shout_id).one() following = ( - session.query(ShoutReactionsFollower).where(and_( - ShoutReactionsFollower.follower == user_id, - ShoutReactionsFollower.shout == shout.id, - )).first() + session.query(ShoutReactionsFollower) + .where( + and_( + ShoutReactionsFollower.follower == user_id, + ShoutReactionsFollower.shout == shout.id, + ) + ) + .first() ) if not following: following = ShoutReactionsFollower.create( - follower=user_id, - shout=shout.id, - auto=auto + follower=user_id, shout=shout.id, auto=auto ) session.add(following) session.commit() @@ -72,10 +71,14 @@ def reactions_unfollow(user_id: int, shout_id: int): shout = session.query(Shout).where(Shout.id == shout_id).one() following = ( - session.query(ShoutReactionsFollower).where(and_( - ShoutReactionsFollower.follower == user_id, - ShoutReactionsFollower.shout == shout.id - )).first() + session.query(ShoutReactionsFollower) + .where( + and_( + ShoutReactionsFollower.follower == user_id, + ShoutReactionsFollower.shout == shout.id, + ) + ) + .first() ) if following: @@ -88,30 +91,31 @@ def reactions_unfollow(user_id: int, shout_id: int): def is_published_author(session, user_id): - ''' checks if user has at least one publication ''' - return session.query( - Shout - ).where( - Shout.authors.contains(user_id) - ).filter( - and_( - Shout.publishedAt.is_not(None), - Shout.deletedAt.is_(None) - ) - ).count() > 0 + '''checks if user has at least one publication''' + return ( + session.query(Shout) + .where(Shout.authors.contains(user_id)) + .filter(and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None))) + .count() + > 0 + ) def check_to_publish(session, user_id, reaction): - ''' set shout to public if publicated approvers amount > 4 ''' + '''set shout to public if publicated approvers amount > 4''' if not reaction.replyTo and reaction.kind in [ ReactionKind.ACCEPT, ReactionKind.LIKE, - ReactionKind.PROOF + ReactionKind.PROOF, ]: if is_published_author(user_id): # now count how many approvers are voted already - approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all() - approvers = [user_id, ] + approvers_reactions = ( + session.query(Reaction).where(Reaction.shout == reaction.shout).all() + ) + approvers = [ + user_id, + ] for ar in approvers_reactions: a = ar.createdBy if is_published_author(session, a): @@ -122,21 +126,17 @@ def check_to_publish(session, user_id, reaction): def check_to_hide(session, user_id, reaction): - ''' hides any shout if 20% of reactions are negative ''' + '''hides any shout if 20% of reactions are negative''' if not reaction.replyTo and reaction.kind in [ ReactionKind.REJECT, ReactionKind.DISLIKE, - ReactionKind.DISPROOF + ReactionKind.DISPROOF, ]: # if is_published_author(user): approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all() rejects = 0 for r in approvers_reactions: - if r.kind in [ - ReactionKind.REJECT, - ReactionKind.DISLIKE, - ReactionKind.DISPROOF - ]: + if r.kind in [ReactionKind.REJECT, ReactionKind.DISLIKE, ReactionKind.DISPROOF]: rejects += 1 if len(approvers_reactions) / rejects < 5: return True @@ -168,31 +168,40 @@ async def create_reaction(_, info, reaction): shout = session.query(Shout).where(Shout.id == reaction["shout"]).one() author = session.query(User).where(User.id == auth.user_id).one() - if reaction["kind"] in [ - ReactionKind.DISLIKE.name, - ReactionKind.LIKE.name - ]: - existing_reaction = session.query(Reaction).where( - and_( - Reaction.shout == reaction["shout"], - Reaction.createdBy == auth.user_id, - Reaction.kind == reaction["kind"], - Reaction.replyTo == reaction.get("replyTo") + if reaction["kind"] in [ReactionKind.DISLIKE.name, ReactionKind.LIKE.name]: + existing_reaction = ( + session.query(Reaction) + .where( + and_( + Reaction.shout == reaction["shout"], + Reaction.createdBy == auth.user_id, + Reaction.kind == reaction["kind"], + Reaction.replyTo == reaction.get("replyTo"), + ) ) - ).first() + .first() + ) if existing_reaction is not None: raise OperationNotAllowed("You can't vote twice") - opposite_reaction_kind = ReactionKind.DISLIKE if reaction["kind"] == ReactionKind.LIKE.name else ReactionKind.LIKE - opposite_reaction = session.query(Reaction).where( + opposite_reaction_kind = ( + ReactionKind.DISLIKE + if reaction["kind"] == ReactionKind.LIKE.name + else ReactionKind.LIKE + ) + opposite_reaction = ( + session.query(Reaction) + .where( and_( Reaction.shout == reaction["shout"], Reaction.createdBy == auth.user_id, Reaction.kind == opposite_reaction_kind, - Reaction.replyTo == reaction.get("replyTo") + Reaction.replyTo == reaction.get("replyTo"), ) - ).first() + ) + .first() + ) if opposite_reaction is not None: session.delete(opposite_reaction) @@ -235,11 +244,7 @@ async def create_reaction(_, info, reaction): except Exception as e: print(f"[resolvers.reactions] error on reactions autofollowing: {e}") - rdict['stat'] = { - "commented": 0, - "reacted": 0, - "rating": 0 - } + rdict['stat'] = {"commented": 0, "reacted": 0, "rating": 0} return {"reaction": rdict} @@ -269,11 +274,7 @@ async def update_reaction(_, info, id, reaction={}): if reaction.get("range"): r.range = reaction.get("range") session.commit() - r.stat = { - "commented": commented_stat, - "reacted": reacted_stat, - "rating": rating_stat - } + r.stat = {"commented": commented_stat, "reacted": reacted_stat, "rating": rating_stat} return {"reaction": r} @@ -290,17 +291,12 @@ async def delete_reaction(_, info, id): if r.createdBy != auth.user_id: return {"error": "access denied"} - if r.kind in [ - ReactionKind.LIKE, - ReactionKind.DISLIKE - ]: + if r.kind in [ReactionKind.LIKE, ReactionKind.DISLIKE]: session.delete(r) else: r.deletedAt = datetime.now(tz=timezone.utc) session.commit() - return { - "reaction": r - } + return {"reaction": r} @query.field("loadReactionsBy") @@ -321,12 +317,10 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0): :return: Reaction[] """ - q = select( - Reaction, User, Shout - ).join( - User, Reaction.createdBy == User.id - ).join( - Shout, Reaction.shout == Shout.id + q = ( + select(Reaction, User, Shout) + .join(User, Reaction.createdBy == User.id) + .join(Shout, Reaction.shout == Shout.id) ) if by.get("shout"): @@ -354,11 +348,7 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0): order_way = asc if by.get("sort", "").startswith("-") else desc order_field = by.get("sort", "").replace('-', '') or Reaction.createdAt - q = q.group_by( - Reaction.id, User.id, Shout.id - ).order_by( - order_way(order_field) - ) + q = q.group_by(Reaction.id, User.id, Shout.id).order_by(order_way(order_field)) q = add_reaction_stat_columns(q) @@ -367,13 +357,15 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0): reactions = [] with local_session() as session: - for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(q): + for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute( + q + ): reaction.createdBy = user reaction.shout = shout reaction.stat = { "rating": rating_stat, "commented": commented_stat, - "reacted": reacted_stat + "reacted": reacted_stat, } reaction.kind = reaction.kind.name diff --git a/resolvers/zine/topics.py b/resolvers/zine/topics.py index f354a7b4..72ecf9ac 100644 --- a/resolvers/zine/topics.py +++ b/resolvers/zine/topics.py @@ -1,24 +1,25 @@ -from sqlalchemy import and_, select, distinct, func +from sqlalchemy import and_, distinct, func, select from sqlalchemy.orm import aliased from auth.authenticate import login_required from base.orm import local_session from base.resolvers import mutation, query -from orm.shout import ShoutTopic, ShoutAuthor -from orm.topic import Topic, TopicFollower from orm import User +from orm.shout import ShoutAuthor, ShoutTopic +from orm.topic import Topic, TopicFollower def add_topic_stat_columns(q): aliased_shout_author = aliased(ShoutAuthor) aliased_topic_follower = aliased(TopicFollower) - q = q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic).add_columns( - func.count(distinct(ShoutTopic.shout)).label('shouts_stat') - ).outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout).add_columns( - func.count(distinct(aliased_shout_author.user)).label('authors_stat') - ).outerjoin(aliased_topic_follower).add_columns( - func.count(distinct(aliased_topic_follower.follower)).label('followers_stat') + q = ( + q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic) + .add_columns(func.count(distinct(ShoutTopic.shout)).label('shouts_stat')) + .outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout) + .add_columns(func.count(distinct(aliased_shout_author.user)).label('authors_stat')) + .outerjoin(aliased_topic_follower) + .add_columns(func.count(distinct(aliased_topic_follower.follower)).label('followers_stat')) ) q = q.group_by(Topic.id) @@ -28,11 +29,7 @@ def add_topic_stat_columns(q): def add_stat(topic, stat_columns): [shouts_stat, authors_stat, followers_stat] = stat_columns - topic.stat = { - "shouts": shouts_stat, - "authors": authors_stat, - "followers": followers_stat - } + topic.stat = {"shouts": shouts_stat, "authors": authors_stat, "followers": followers_stat} return topic @@ -133,12 +130,10 @@ def topic_unfollow(user_id, slug): try: with local_session() as session: sub = ( - session.query(TopicFollower).join(Topic).filter( - and_( - TopicFollower.follower == user_id, - Topic.slug == slug - ) - ).first() + session.query(TopicFollower) + .join(Topic) + .filter(and_(TopicFollower.follower == user_id, Topic.slug == slug)) + .first() ) if sub: session.delete(sub) diff --git a/server.py b/server.py index 753c60ae..48186da0 100644 --- a/server.py +++ b/server.py @@ -1,8 +1,9 @@ -import sys import os +import sys + import uvicorn -from settings import PORT, DEV_SERVER_PID_FILE_NAME +from settings import DEV_SERVER_PID_FILE_NAME, PORT def exception_handler(exception_type, exception, traceback, debug_hook=sys.excepthook): @@ -16,41 +17,30 @@ log_settings = { 'default': { '()': 'uvicorn.logging.DefaultFormatter', 'fmt': '%(levelprefix)s %(message)s', - 'use_colors': None + 'use_colors': None, }, 'access': { '()': 'uvicorn.logging.AccessFormatter', - 'fmt': '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s' - } + 'fmt': '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s', + }, }, 'handlers': { 'default': { 'formatter': 'default', 'class': 'logging.StreamHandler', - 'stream': 'ext://sys.stderr' + 'stream': 'ext://sys.stderr', }, 'access': { 'formatter': 'access', 'class': 'logging.StreamHandler', - 'stream': 'ext://sys.stdout' - } + 'stream': 'ext://sys.stdout', + }, }, 'loggers': { - 'uvicorn': { - 'handlers': ['default'], - 'level': 'INFO' - }, - 'uvicorn.error': { - 'level': 'INFO', - 'handlers': ['default'], - 'propagate': True - }, - 'uvicorn.access': { - 'handlers': ['access'], - 'level': 'INFO', - 'propagate': False - } - } + 'uvicorn': {'handlers': ['default'], 'level': 'INFO'}, + 'uvicorn.error': {'level': 'INFO', 'handlers': ['default'], 'propagate': True}, + 'uvicorn.access': {'handlers': ['access'], 'level': 'INFO', 'propagate': False}, + }, } local_headers = [ @@ -86,24 +76,20 @@ if __name__ == "__main__": # log_config=log_settings, log_level=None, access_log=True, - reload=want_reload + reload=want_reload, ) # , ssl_keyfile="discours.key", ssl_certfile="discours.crt") elif x == "migrate": from migration import process + print("MODE: MIGRATE") process() elif x == "bson": from migration.bson2json import json_tables + print("MODE: BSON") json_tables() else: sys.excepthook = exception_handler - uvicorn.run( - "main:app", - host="0.0.0.0", - port=PORT, - proxy_headers=True, - server_header=True - ) + uvicorn.run("main:app", host="0.0.0.0", port=PORT, proxy_headers=True, server_header=True) diff --git a/services/following.py b/services/following.py index 8410eb2d..8261d696 100644 --- a/services/following.py +++ b/services/following.py @@ -18,12 +18,7 @@ class Following: class FollowingManager: lock = asyncio.Lock() - data = { - 'author': [], - 'topic': [], - 'shout': [], - 'chat': [] - } + data = {'author': [], 'topic': [], 'shout': [], 'chat': []} @staticmethod async def register(kind, uid): diff --git a/services/main.py b/services/main.py index 10301b86..98fddcc1 100644 --- a/services/main.py +++ b/services/main.py @@ -1,6 +1,6 @@ +from base.orm import local_session from services.search import SearchService from services.stat.viewed import ViewedStorage -from base.orm import local_session async def storages_init(): diff --git a/services/notifications/notification_service.py b/services/notifications/notification_service.py index 7e92aa95..8467e836 100644 --- a/services/notifications/notification_service.py +++ b/services/notifications/notification_service.py @@ -5,26 +5,18 @@ from datetime import datetime, timezone from sqlalchemy import and_ from base.orm import local_session -from orm import Reaction, Shout, Notification, User +from orm import Notification, Reaction, Shout, User from orm.notification import NotificationType from orm.reaction import ReactionKind from services.notifications.sse import connection_manager def shout_to_shout_data(shout): - return { - "title": shout.title, - "slug": shout.slug - } + return {"title": shout.title, "slug": shout.slug} def user_to_user_data(user): - return { - "id": user.id, - "name": user.name, - "slug": user.slug, - "userpic": user.userpic - } + return {"id": user.id, "name": user.name, "slug": user.slug, "userpic": user.userpic} def update_prev_notification(notification, user, reaction): @@ -57,34 +49,45 @@ class NewReactionNotificator: if reaction.kind == ReactionKind.COMMENT: parent_reaction = None if reaction.replyTo: - parent_reaction = session.query(Reaction).where(Reaction.id == reaction.replyTo).one() + parent_reaction = ( + session.query(Reaction).where(Reaction.id == reaction.replyTo).one() + ) if parent_reaction.createdBy != reaction.createdBy: - prev_new_reply_notification = session.query(Notification).where( - and_( - Notification.user == shout.createdBy, - Notification.type == NotificationType.NEW_REPLY, - Notification.shout == shout.id, - Notification.reaction == parent_reaction.id, - Notification.seen == False + prev_new_reply_notification = ( + session.query(Notification) + .where( + and_( + Notification.user == shout.createdBy, + Notification.type == NotificationType.NEW_REPLY, + Notification.shout == shout.id, + Notification.reaction == parent_reaction.id, + Notification.seen == False, + ) ) - ).first() + .first() + ) if prev_new_reply_notification: update_prev_notification(prev_new_reply_notification, user, reaction) else: - reply_notification_data = json.dumps({ - "shout": shout_to_shout_data(shout), - "users": [user_to_user_data(user)], - "reactionIds": [reaction.id] - }, ensure_ascii=False) + reply_notification_data = json.dumps( + { + "shout": shout_to_shout_data(shout), + "users": [user_to_user_data(user)], + "reactionIds": [reaction.id], + }, + ensure_ascii=False, + ) - reply_notification = Notification.create(**{ - "user": parent_reaction.createdBy, - "type": NotificationType.NEW_REPLY, - "shout": shout.id, - "reaction": parent_reaction.id, - "data": reply_notification_data - }) + reply_notification = Notification.create( + **{ + "user": parent_reaction.createdBy, + "type": NotificationType.NEW_REPLY, + "shout": shout.id, + "reaction": parent_reaction.id, + "data": reply_notification_data, + } + ) session.add(reply_notification) @@ -93,30 +96,39 @@ class NewReactionNotificator: if reaction.createdBy != shout.createdBy and ( parent_reaction is None or parent_reaction.createdBy != shout.createdBy ): - prev_new_comment_notification = session.query(Notification).where( - and_( - Notification.user == shout.createdBy, - Notification.type == NotificationType.NEW_COMMENT, - Notification.shout == shout.id, - Notification.seen == False + prev_new_comment_notification = ( + session.query(Notification) + .where( + and_( + Notification.user == shout.createdBy, + Notification.type == NotificationType.NEW_COMMENT, + Notification.shout == shout.id, + Notification.seen == False, + ) ) - ).first() + .first() + ) if prev_new_comment_notification: update_prev_notification(prev_new_comment_notification, user, reaction) else: - notification_data_string = json.dumps({ - "shout": shout_to_shout_data(shout), - "users": [user_to_user_data(user)], - "reactionIds": [reaction.id] - }, ensure_ascii=False) + notification_data_string = json.dumps( + { + "shout": shout_to_shout_data(shout), + "users": [user_to_user_data(user)], + "reactionIds": [reaction.id], + }, + ensure_ascii=False, + ) - author_notification = Notification.create(**{ - "user": shout.createdBy, - "type": NotificationType.NEW_COMMENT, - "shout": shout.id, - "data": notification_data_string - }) + author_notification = Notification.create( + **{ + "user": shout.createdBy, + "type": NotificationType.NEW_COMMENT, + "shout": shout.id, + "data": notification_data_string, + } + ) session.add(author_notification) diff --git a/services/notifications/sse.py b/services/notifications/sse.py index 085dbde0..55cae575 100644 --- a/services/notifications/sse.py +++ b/services/notifications/sse.py @@ -1,8 +1,8 @@ +import asyncio import json from sse_starlette.sse import EventSourceResponse from starlette.requests import Request -import asyncio class ConnectionManager: @@ -28,9 +28,7 @@ class ConnectionManager: return for connection in self.connections_by_user_id[user_id]: - data = { - "type": "newNotifications" - } + data = {"type": "newNotifications"} data_string = json.dumps(data, ensure_ascii=False) await connection.put(data_string) diff --git a/services/search.py b/services/search.py index 834e5bf7..d1748cdd 100644 --- a/services/search.py +++ b/services/search.py @@ -1,5 +1,6 @@ import asyncio import json + from base.redis import redis from orm.shout import Shout from resolvers.zine.load import load_shouts_by @@ -20,12 +21,7 @@ class SearchService: cached = await redis.execute("GET", text) if not cached: async with SearchService.lock: - options = { - "title": text, - "body": text, - "limit": limit, - "offset": offset - } + options = {"title": text, "body": text, "limit": limit, "offset": offset} payload = await load_shouts_by(None, None, options) await redis.execute("SET", text, json.dumps(payload)) return payload diff --git a/services/stat/viewed.py b/services/stat/viewed.py index 905ade43..c9f9a6db 100644 --- a/services/stat/viewed.py +++ b/services/stat/viewed.py @@ -1,6 +1,6 @@ import asyncio import time -from datetime import timedelta, timezone, datetime +from datetime import datetime, timedelta, timezone from os import environ, path from ssl import create_default_context @@ -9,10 +9,11 @@ from gql.transport.aiohttp import AIOHTTPTransport from sqlalchemy import func from base.orm import local_session -from orm import User, Topic -from orm.shout import ShoutTopic, Shout +from orm import Topic, User +from orm.shout import Shout, ShoutTopic -load_facts = gql(""" +load_facts = gql( + """ query getDomains { domains { id @@ -25,9 +26,11 @@ query getDomains { } } } -""") +""" +) -load_pages = gql(""" +load_pages = gql( + """ query getDomains { domains { title @@ -41,7 +44,8 @@ query getDomains { } } } -""") +""" +) schema_str = open(path.dirname(__file__) + '/ackee.graphql').read() token = environ.get("ACKEE_TOKEN", "") @@ -50,10 +54,8 @@ def create_client(headers=None, schema=None): return Client( schema=schema, transport=AIOHTTPTransport( - url="https://ackee.discours.io/api", - ssl=create_default_context(), - headers=headers - ) + url="https://ackee.discours.io/api", ssl=create_default_context(), headers=headers + ), ) @@ -71,13 +73,13 @@ class ViewedStorage: @staticmethod async def init(): - """ graphql client connection using permanent token """ + """graphql client connection using permanent token""" self = ViewedStorage async with self.lock: if token: - self.client = create_client({ - "Authorization": "Bearer %s" % str(token) - }, schema=schema_str) + self.client = create_client( + {"Authorization": "Bearer %s" % str(token)}, schema=schema_str + ) print("[stat.viewed] * authorized permanentely by ackee.discours.io: %s" % token) else: print("[stat.viewed] * please set ACKEE_TOKEN") @@ -85,7 +87,7 @@ class ViewedStorage: @staticmethod async def update_pages(): - """ query all the pages from ackee sorted by views count """ + """query all the pages from ackee sorted by views count""" print("[stat.viewed] ⎧ updating ackee pages data ---") start = time.time() self = ViewedStorage @@ -118,7 +120,7 @@ class ViewedStorage: # unused yet @staticmethod async def get_shout(shout_slug): - """ getting shout views metric by slug """ + """getting shout views metric by slug""" self = ViewedStorage async with self.lock: shout_views = self.by_shouts.get(shout_slug) @@ -136,7 +138,7 @@ class ViewedStorage: @staticmethod async def get_topic(topic_slug): - """ getting topic views value summed """ + """getting topic views value summed""" self = ViewedStorage topic_views = 0 async with self.lock: @@ -146,18 +148,22 @@ class ViewedStorage: @staticmethod def update_topics(session, shout_slug): - """ updates topics counters by shout slug """ + """updates topics counters by shout slug""" self = ViewedStorage - for [shout_topic, topic] in session.query(ShoutTopic, Topic).join(Topic).join(Shout).where( - Shout.slug == shout_slug - ).all(): + for [shout_topic, topic] in ( + session.query(ShoutTopic, Topic) + .join(Topic) + .join(Shout) + .where(Shout.slug == shout_slug) + .all() + ): if not self.by_topics.get(topic.slug): self.by_topics[topic.slug] = {} self.by_topics[topic.slug][shout_slug] = self.by_shouts[shout_slug] @staticmethod async def increment(shout_slug, amount=1, viewer='ackee'): - """ the only way to change views counter """ + """the only way to change views counter""" self = ViewedStorage async with self.lock: # TODO optimize, currenty we execute 1 DB transaction per shout @@ -185,7 +191,7 @@ class ViewedStorage: @staticmethod async def worker(): - """ async task worker """ + """async task worker""" failed = 0 self = ViewedStorage if self.disabled: @@ -205,9 +211,10 @@ class ViewedStorage: if failed == 0: when = datetime.now(timezone.utc) + timedelta(seconds=self.period) t = format(when.astimezone().isoformat()) - print("[stat.viewed] ⎩ next update: %s" % ( - t.split("T")[0] + " " + t.split("T")[1].split(".")[0] - )) + print( + "[stat.viewed] ⎩ next update: %s" + % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0]) + ) await asyncio.sleep(self.period) else: await asyncio.sleep(10) diff --git a/settings.py b/settings.py index 270b4551..bd096081 100644 --- a/settings.py +++ b/settings.py @@ -3,8 +3,9 @@ from os import environ PORT = 8080 DB_URL = ( - environ.get("DATABASE_URL") or environ.get("DB_URL") or - "postgresql://postgres@localhost:5432/discoursio" + environ.get("DATABASE_URL") + or environ.get("DB_URL") + or "postgresql://postgres@localhost:5432/discoursio" ) JWT_ALGORITHM = "HS256" JWT_SECRET_KEY = environ.get("JWT_SECRET_KEY") or "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key" diff --git a/validations/auth.py b/validations/auth.py index 216d7dcb..73b83079 100644 --- a/validations/auth.py +++ b/validations/auth.py @@ -1,4 +1,5 @@ from typing import Optional, Text + from pydantic import BaseModel diff --git a/validations/inbox.py b/validations/inbox.py index d03cca05..58645dd9 100644 --- a/validations/inbox.py +++ b/validations/inbox.py @@ -1,4 +1,5 @@ -from typing import Optional, Text, List +from typing import List, Optional, Text + from pydantic import BaseModel