diff --git a/.flake8 b/.flake8
index 523cb30f..e82de95a 100644
--- a/.flake8
+++ b/.flake8
@@ -1,6 +1,6 @@
[flake8]
-ignore = E203
+ignore = E203,W504,W191,W503
exclude = .git,__pycache__,orm/rbac.py
-max-complexity = 15
-max-line-length = 100
+max-complexity = 10
+max-line-length = 108
indent-string = ' '
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 42569413..af489f3a 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -6,7 +6,7 @@ exclude: |
)
default_language_version:
- python: python3.10
+ python: python3.8
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
@@ -17,23 +17,28 @@ repos:
- id: check-docstring-first
- id: check-json
- id: check-merge-conflict
+ - id: check-toml
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
-# - repo: https://github.com/timothycrosley/isort
-# rev: 5.12.0
-# hooks:
-# - id: isort
+ - repo: https://github.com/timothycrosley/isort
+ rev: 5.5.3
+ hooks:
+ - id: isort
- repo: https://github.com/ambv/black
- rev: 23.9.1
+ rev: 20.8b1
hooks:
- id: black
args:
- --line-length=100
+ - --skip-string-normalization
- - repo: https://github.com/PyCQA/flake8
- rev: 6.1.0
+ - repo: https://gitlab.com/pycqa/flake8
+ rev: 3.8.3
hooks:
- id: flake8
+ args:
+ - --max-line-length=100
+ - --disable=protected-access
diff --git a/Procfile b/Procfile
index ac9d762f..c5c1bfa8 100644
--- a/Procfile
+++ b/Procfile
@@ -1 +1,2 @@
web: python server.py
+
diff --git a/README.md b/README.md
index 6f57e39c..1a1ee0a4 100644
--- a/README.md
+++ b/README.md
@@ -35,13 +35,6 @@ pip install -r requirements.txt
python3 server.py dev
```
-# pre-commit hook
-
-```
-pip install -r requirements-dev.txt
-pre-commit install
-```
-
# How to do an authorized request
Put the header 'Authorization' with token from signIn query or registerUser mutation.
@@ -49,3 +42,4 @@ Put the header 'Authorization' with token from signIn query or registerUser muta
# How to debug Ackee
Set ACKEE_TOKEN var
+
diff --git a/ai/preprocess.py b/ai/preprocess.py
index 82d06f71..afd8dbd8 100644
--- a/ai/preprocess.py
+++ b/ai/preprocess.py
@@ -1,28 +1,28 @@
+import re
+import nltk
from bs4 import BeautifulSoup
from nltk.corpus import stopwords
from pymystem3 import Mystem
from string import punctuation
-
-import nltk
-import re
+from transformers import BertTokenizer
nltk.download("stopwords")
def get_clear_text(text):
- soup = BeautifulSoup(text, "html.parser")
+ soup = BeautifulSoup(text, 'html.parser')
# extract the plain text from the HTML document without tags
- clear_text = ""
+ clear_text = ''
for tag in soup.find_all():
- clear_text += tag.string or ""
+ clear_text += tag.string or ''
- clear_text = re.sub(pattern="[\u202F\u00A0\n]+", repl=" ", string=clear_text)
+ clear_text = re.sub(pattern='[\u202F\u00A0\n]+', repl=' ', string=clear_text)
# only words
- clear_text = re.sub(pattern="[^A-ZА-ЯЁ -]", repl="", string=clear_text, flags=re.IGNORECASE)
+ clear_text = re.sub(pattern='[^A-ZА-ЯЁ -]', repl='', string=clear_text, flags=re.IGNORECASE)
- clear_text = re.sub(pattern=r"\s+", repl=" ", string=clear_text)
+ clear_text = re.sub(pattern='\s+', repl=' ', string=clear_text)
clear_text = clear_text.lower()
@@ -30,11 +30,9 @@ def get_clear_text(text):
russian_stopwords = stopwords.words("russian")
tokens = mystem.lemmatize(clear_text)
- tokens = [
- token
- for token in tokens
- if token not in russian_stopwords and token != " " and token.strip() not in punctuation
- ]
+ tokens = [token for token in tokens if token not in russian_stopwords \
+ and token != " " \
+ and token.strip() not in punctuation]
clear_text = " ".join(tokens)
diff --git a/alembic/env.py b/alembic/env.py
index 58e3e200..c6d69a97 100644
--- a/alembic/env.py
+++ b/alembic/env.py
@@ -1,8 +1,11 @@
-from alembic import context
-from base.orm import Base
from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+
+from alembic import context
+
from settings import DB_URL
-from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
@@ -16,6 +19,7 @@ config.set_section_option(config.config_ini_section, "DB_URL", DB_URL)
if config.config_file_name is not None:
fileConfig(config.config_file_name)
+from base.orm import Base
target_metadata = [Base.metadata]
# other values from the config, defined by the needs of env.py,
@@ -62,7 +66,9 @@ def run_migrations_online() -> None:
)
with connectable.connect() as connection:
- context.configure(connection=connection, target_metadata=target_metadata)
+ context.configure(
+ connection=connection, target_metadata=target_metadata
+ )
with context.begin_transaction():
context.run_migrations()
diff --git a/alembic/versions/fe943b098418_init_alembic.py b/alembic/versions/fe943b098418_init_alembic.py
index 52796fea..4ec6d519 100644
--- a/alembic/versions/fe943b098418_init_alembic.py
+++ b/alembic/versions/fe943b098418_init_alembic.py
@@ -1,18 +1,18 @@
"""init alembic
Revision ID: fe943b098418
-Revises:
+Revises:
Create Date: 2023-08-19 01:37:57.031933
"""
from typing import Sequence, Union
-# import sqlalchemy as sa
+from alembic import op
+import sqlalchemy as sa
-# from alembic import op
# revision identifiers, used by Alembic.
-revision: str = "fe943b098418"
+revision: str = 'fe943b098418'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
diff --git a/auth/authenticate.py b/auth/authenticate.py
index 9e4c93fc..be4db2d2 100644
--- a/auth/authenticate.py
+++ b/auth/authenticate.py
@@ -1,70 +1,76 @@
-from auth.credentials import AuthCredentials, AuthUser
-from auth.tokenstorage import SessionToken
-from base.exceptions import OperationNotAllowed
-from base.orm import local_session
from functools import wraps
+from typing import Optional, Tuple
+
from graphql.type import GraphQLResolveInfo
-from orm.user import Role, User
-from settings import SESSION_TOKEN_HEADER
-from sqlalchemy.orm import exc, joinedload
+from sqlalchemy.orm import joinedload, exc
from starlette.authentication import AuthenticationBackend
from starlette.requests import HTTPConnection
-from typing import Optional, Tuple
+
+from auth.credentials import AuthCredentials, AuthUser
+from base.orm import local_session
+from orm.user import User, Role
+
+from settings import SESSION_TOKEN_HEADER
+from auth.tokenstorage import SessionToken
+from base.exceptions import OperationNotAllowed
class JWTAuthenticate(AuthenticationBackend):
async def authenticate(
self, request: HTTPConnection
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
+
if SESSION_TOKEN_HEADER not in request.headers:
- return AuthCredentials(scopes={}), AuthUser(user_id=None, username="")
+ return AuthCredentials(scopes={}), AuthUser(user_id=None, username='')
token = request.headers.get(SESSION_TOKEN_HEADER)
if not token:
print("[auth.authenticate] no token in header %s" % SESSION_TOKEN_HEADER)
return AuthCredentials(scopes={}, error_message=str("no token")), AuthUser(
- user_id=None, username=""
+ user_id=None, username=''
)
- if len(token.split(".")) > 1:
+ if len(token.split('.')) > 1:
payload = await SessionToken.verify(token)
with local_session() as session:
try:
user = (
- session.query(User)
- .options(
+ session.query(User).options(
joinedload(User.roles).options(joinedload(Role.permissions)),
- joinedload(User.ratings),
- )
- .filter(User.id == payload.user_id)
- .one()
+ joinedload(User.ratings)
+ ).filter(
+ User.id == payload.user_id
+ ).one()
)
scopes = {} # TODO: integrate await user.get_permission()
return (
- AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
- AuthUser(user_id=user.id, username=""),
+ AuthCredentials(
+ user_id=payload.user_id,
+ scopes=scopes,
+ logged_in=True
+ ),
+ AuthUser(user_id=user.id, username=''),
)
except exc.NoResultFound:
pass
- return AuthCredentials(scopes={}, error_message=str("Invalid token")), AuthUser(
- user_id=None, username=""
- )
+ return AuthCredentials(scopes={}, error_message=str('Invalid token')), AuthUser(user_id=None, username='')
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
- # debug only
- # print('[auth.authenticate] login required for %r with info %r' % (func, info))
+ # print('[auth.authenticate] login required for %r with info %r' % (func, info)) # debug only
auth: AuthCredentials = info.context["request"].auth
# print(auth)
if not auth or not auth.logged_in:
# raise Unauthorized(auth.error_message or "Please login")
- return {"error": "Please login first"}
+ return {
+ "error": "Please login first"
+ }
return await func(parent, info, *args, **kwargs)
return wrap
@@ -73,9 +79,7 @@ def login_required(func):
def permission_required(resource, operation, func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
- print(
- "[auth.authenticate] permission_required for %r with info %r" % (func, info)
- ) # debug only
+ print('[auth.authenticate] permission_required for %r with info %r' % (func, info)) # debug only
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
raise OperationNotAllowed(auth.error_message or "Please login")
diff --git a/auth/credentials.py b/auth/credentials.py
index 856c2374..9045b7a4 100644
--- a/auth/credentials.py
+++ b/auth/credentials.py
@@ -1,6 +1,7 @@
-from pydantic import BaseModel
from typing import List, Optional, Text
+from pydantic import BaseModel
+
# from base.exceptions import Unauthorized
@@ -22,7 +23,9 @@ class AuthCredentials(BaseModel):
async def permissions(self) -> List[Permission]:
if self.user_id is None:
# raise Unauthorized("Please login first")
- return {"error": "Please login first"}
+ return {
+ "error": "Please login first"
+ }
else:
# TODO: implement permissions logix
print(self.user_id)
diff --git a/auth/email.py b/auth/email.py
index faa64725..7ca5d9bf 100644
--- a/auth/email.py
+++ b/auth/email.py
@@ -1,17 +1,20 @@
-from settings import MAILGUN_API_KEY, MAILGUN_DOMAIN
-
import requests
-api_url = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN or "discours.io")
-noreply = "discours.io \s*
",
r"
\s*
self.span_highlight = True
- elif self.current_class == "lead" and not self.inheader and not self.span_highlight:
+ elif (
+ self.current_class == "lead"
+ and not self.inheader
+ and not self.span_highlight
+ ):
# self.o("==") # NOTE: CriticMarkup {==
self.span_lead = True
else:
@@ -469,7 +479,11 @@ class HTML2Text(html.parser.HTMLParser):
and not self.span_lead
and not self.span_highlight
):
- if start and self.preceding_data and self.preceding_data[-1] == self.strong_mark[0]:
+ if (
+ start
+ and self.preceding_data
+ and self.preceding_data[-1] == self.strong_mark[0]
+ ):
strong = " " + self.strong_mark
self.preceding_data += " "
else:
@@ -534,8 +548,13 @@ class HTML2Text(html.parser.HTMLParser):
"href" in attrs
and not attrs["href"].startswith("#_ftn")
and attrs["href"] is not None
- and not (self.skip_internal_links and attrs["href"].startswith("#"))
- and not (self.ignore_mailto_links and attrs["href"].startswith("mailto:"))
+ and not (
+ self.skip_internal_links and attrs["href"].startswith("#")
+ )
+ and not (
+ self.ignore_mailto_links
+ and attrs["href"].startswith("mailto:")
+ )
):
self.astack.append(attrs)
self.maybe_automatic_link = attrs["href"]
@@ -619,7 +638,9 @@ class HTML2Text(html.parser.HTMLParser):
self.o("![" + escape_md(alt) + "]")
if self.inline_links:
href = attrs.get("href") or ""
- self.o("(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")")
+ self.o(
+ "(" + escape_md(urlparse.urljoin(self.baseurl, href)) + ")"
+ )
else:
i = self.previousIndex(attrs)
if i is not None:
@@ -675,7 +696,9 @@ class HTML2Text(html.parser.HTMLParser):
# WARNING: does not line up - s > 9 correctly.
parent_list = None
for list in self.list:
- self.o(" " if parent_list == "ol" and list.name == "ul" else " ")
+ self.o(
+ " " if parent_list == "ol" and list.name == "ul" else " "
+ )
parent_list = list.name
if li.name == "ul":
@@ -764,7 +787,9 @@ class HTML2Text(html.parser.HTMLParser):
self.pbr()
self.br_toggle = " "
- def o(self, data: str, puredata: bool = False, force: Union[bool, str] = False) -> None:
+ def o(
+ self, data: str, puredata: bool = False, force: Union[bool, str] = False
+ ) -> None:
"""
Deal with indentation and whitespace
"""
@@ -839,7 +864,9 @@ class HTML2Text(html.parser.HTMLParser):
self.out(" ")
self.space = False
- if self.a and ((self.p_p == 2 and self.links_each_paragraph) or force == "end"):
+ if self.a and (
+ (self.p_p == 2 and self.links_each_paragraph) or force == "end"
+ ):
if force == "end":
self.out("\n")
@@ -898,7 +925,11 @@ class HTML2Text(html.parser.HTMLParser):
if self.maybe_automatic_link is not None:
href = self.maybe_automatic_link
- if href == data and self.absolute_url_matcher.match(href) and self.use_automatic_links:
+ if (
+ href == data
+ and self.absolute_url_matcher.match(href)
+ and self.use_automatic_links
+ ):
self.o("<" + data + ">")
self.empty_link = False
return
@@ -969,7 +1000,9 @@ class HTML2Text(html.parser.HTMLParser):
self.inline_links = False
for para in text.split("\n"):
if len(para) > 0:
- if not skipwrap(para, self.wrap_links, self.wrap_list_items, self.wrap_tables):
+ if not skipwrap(
+ para, self.wrap_links, self.wrap_list_items, self.wrap_tables
+ ):
indent = ""
if para.startswith(" " + self.ul_item_mark):
# list item continuation: add a double indent to the
@@ -1010,7 +1043,9 @@ class HTML2Text(html.parser.HTMLParser):
return result
-def html2text(html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH) -> str:
+def html2text(
+ html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH
+) -> str:
h = html.strip() or ""
if h:
h = HTML2Text(baseurl=baseurl, bodywidth=bodywidth)
diff --git a/migration/html2text/cli.py b/migration/html2text/cli.py
index f6cf3c57..dbaba28b 100644
--- a/migration/html2text/cli.py
+++ b/migration/html2text/cli.py
@@ -1,8 +1,8 @@
-from . import __version__, config, HTML2Text
-
import argparse
import sys
+from . import HTML2Text, __version__, config
+
# noinspection DuplicatedCode
def main() -> None:
@@ -117,7 +117,10 @@ def main() -> None:
dest="images_with_size",
action="store_true",
default=config.IMAGES_WITH_SIZE,
- help=("Write image tags with height and width attrs as raw html to retain " "dimensions"),
+ help=(
+ "Write image tags with height and width attrs as raw html to retain "
+ "dimensions"
+ ),
)
p.add_argument(
"-g",
@@ -257,7 +260,9 @@ def main() -> None:
default=config.CLOSE_QUOTE,
help="The character used to close quotes",
)
- p.add_argument("--version", action="version", version=".".join(map(str, __version__)))
+ p.add_argument(
+ "--version", action="version", version=".".join(map(str, __version__))
+ )
p.add_argument("filename", nargs="?")
p.add_argument("encoding", nargs="?", default="utf-8")
args = p.parse_args()
diff --git a/migration/html2text/utils.py b/migration/html2text/utils.py
index 545bbd17..1cf22b52 100644
--- a/migration/html2text/utils.py
+++ b/migration/html2text/utils.py
@@ -1,10 +1,12 @@
-from . import config
+import html.entities
from typing import Dict, List, Optional
-import html.entities
+from . import config
unifiable_n = {
- html.entities.name2codepoint[k]: v for k, v in config.UNIFIABLE.items() if k != "nbsp"
+ html.entities.name2codepoint[k]: v
+ for k, v in config.UNIFIABLE.items()
+ if k != "nbsp"
}
@@ -154,7 +156,9 @@ def list_numbering_start(attrs: Dict[str, Optional[str]]) -> int:
return 0
-def skipwrap(para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool) -> bool:
+def skipwrap(
+ para: str, wrap_links: bool, wrap_list_items: bool, wrap_tables: bool
+) -> bool:
# If it appears to contain a link
# don't wrap
if not wrap_links and config.RE_LINK.search(para):
@@ -232,7 +236,9 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]:
max_width += [len(x) + right_margin for x in cols[-(num_cols - max_cols) :]]
max_cols = num_cols
- max_width = [max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)]
+ max_width = [
+ max(len(x) + right_margin, old_len) for x, old_len in zip(cols, max_width)
+ ]
# reformat
new_lines = []
@@ -241,13 +247,15 @@ def reformat_table(lines: List[str], right_margin: int) -> List[str]:
if set(line.strip()) == set("-|"):
filler = "-"
new_cols = [
- x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
+ x.rstrip() + (filler * (M - len(x.rstrip())))
+ for x, M in zip(cols, max_width)
]
new_lines.append("|-" + "|".join(new_cols) + "|")
else:
filler = " "
new_cols = [
- x.rstrip() + (filler * (M - len(x.rstrip()))) for x, M in zip(cols, max_width)
+ x.rstrip() + (filler * (M - len(x.rstrip())))
+ for x, M in zip(cols, max_width)
]
new_lines.append("| " + "|".join(new_cols) + "|")
return new_lines
diff --git a/migration/tables/comments.py b/migration/tables/comments.py
index 13d2809d..82e32924 100644
--- a/migration/tables/comments.py
+++ b/migration/tables/comments.py
@@ -1,50 +1,65 @@
-from base.orm import local_session
from datetime import datetime, timezone
+
from dateutil.parser import parse as date_parse
+
+from base.orm import local_session
from migration.html2text import html2text
from orm.reaction import Reaction, ReactionKind
-from orm.shout import Shout, ShoutReactionsFollower
+from orm.shout import ShoutReactionsFollower
from orm.topic import TopicFollower
from orm.user import User
+from orm.shout import Shout
ts = datetime.now(tz=timezone.utc)
def auto_followers(session, topics, reaction_dict):
# creating shout's reactions following for reaction author
- following1 = (
- session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.follower == reaction_dict["createdBy"])
- .filter(ShoutReactionsFollower.shout == reaction_dict["shout"])
- .first()
- )
+ following1 = session.query(
+ ShoutReactionsFollower
+ ).where(
+ ShoutReactionsFollower.follower == reaction_dict["createdBy"]
+ ).filter(
+ ShoutReactionsFollower.shout == reaction_dict["shout"]
+ ).first()
if not following1:
following1 = ShoutReactionsFollower.create(
- follower=reaction_dict["createdBy"], shout=reaction_dict["shout"], auto=True
+ follower=reaction_dict["createdBy"],
+ shout=reaction_dict["shout"],
+ auto=True
)
session.add(following1)
# creating topics followings for reaction author
for t in topics:
- tf = (
- session.query(TopicFollower)
- .where(TopicFollower.follower == reaction_dict["createdBy"])
- .filter(TopicFollower.topic == t["id"])
- .first()
- )
+ tf = session.query(
+ TopicFollower
+ ).where(
+ TopicFollower.follower == reaction_dict["createdBy"]
+ ).filter(
+ TopicFollower.topic == t['id']
+ ).first()
if not tf:
topic_following = TopicFollower.create(
- follower=reaction_dict["createdBy"], topic=t["id"], auto=True
+ follower=reaction_dict["createdBy"],
+ topic=t['id'],
+ auto=True
)
session.add(topic_following)
def migrate_ratings(session, entry, reaction_dict):
for comment_rating_old in entry.get("ratings", []):
- rater = session.query(User).filter(User.oid == comment_rating_old["createdBy"]).first()
+ rater = (
+ session.query(User)
+ .filter(User.oid == comment_rating_old["createdBy"])
+ .first()
+ )
re_reaction_dict = {
"shout": reaction_dict["shout"],
"replyTo": reaction_dict["id"],
- "kind": ReactionKind.LIKE if comment_rating_old["value"] > 0 else ReactionKind.DISLIKE,
+ "kind": ReactionKind.LIKE
+ if comment_rating_old["value"] > 0
+ else ReactionKind.DISLIKE,
"createdBy": rater.id if rater else 1,
}
cts = comment_rating_old.get("createdAt")
@@ -53,15 +68,18 @@ def migrate_ratings(session, entry, reaction_dict):
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
- following2 = (
- session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.follower == re_reaction_dict["createdBy"])
- .filter(ShoutReactionsFollower.shout == rr.shout)
- .first()
- )
+ following2 = session.query(
+ ShoutReactionsFollower
+ ).where(
+ ShoutReactionsFollower.follower == re_reaction_dict['createdBy']
+ ).filter(
+ ShoutReactionsFollower.shout == rr.shout
+ ).first()
if not following2:
following2 = ShoutReactionsFollower.create(
- follower=re_reaction_dict["createdBy"], shout=rr.shout, auto=True
+ follower=re_reaction_dict['createdBy'],
+ shout=rr.shout,
+ auto=True
)
session.add(following2)
session.add(rr)
@@ -132,7 +150,9 @@ async def migrate(entry, storage):
else:
stage = "author and old id found"
try:
- shout = session.query(Shout).where(Shout.slug == old_shout["slug"]).one()
+ shout = session.query(
+ Shout
+ ).where(Shout.slug == old_shout["slug"]).one()
if shout:
reaction_dict["shout"] = shout.id
reaction_dict["createdBy"] = author.id if author else 1
@@ -158,9 +178,9 @@ async def migrate(entry, storage):
def migrate_2stage(old_comment, idmap):
- if old_comment.get("body"):
- new_id = idmap.get(old_comment.get("oid"))
- new_id = idmap.get(old_comment.get("_id"))
+ if old_comment.get('body'):
+ new_id = idmap.get(old_comment.get('oid'))
+ new_id = idmap.get(old_comment.get('_id'))
if new_id:
new_replyto_id = None
old_replyto_id = old_comment.get("replyTo")
@@ -170,20 +190,17 @@ def migrate_2stage(old_comment, idmap):
comment = session.query(Reaction).where(Reaction.id == new_id).first()
try:
if new_replyto_id:
- new_reply = (
- session.query(Reaction).where(Reaction.id == new_replyto_id).first()
- )
+ new_reply = session.query(Reaction).where(Reaction.id == new_replyto_id).first()
if not new_reply:
print(new_replyto_id)
raise Exception("cannot find reply by id!")
comment.replyTo = new_reply.id
session.add(comment)
- srf = (
- session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.shout == comment.shout)
- .filter(ShoutReactionsFollower.follower == comment.createdBy)
- .first()
- )
+ srf = session.query(ShoutReactionsFollower).where(
+ ShoutReactionsFollower.shout == comment.shout
+ ).filter(
+ ShoutReactionsFollower.follower == comment.createdBy
+ ).first()
if not srf:
srf = ShoutReactionsFollower.create(
shout=comment.shout, follower=comment.createdBy, auto=True
diff --git a/migration/tables/content_items.py b/migration/tables/content_items.py
index 053a8a97..a2297d98 100644
--- a/migration/tables/content_items.py
+++ b/migration/tables/content_items.py
@@ -1,16 +1,15 @@
-from base.orm import local_session
from datetime import datetime, timezone
+import json
from dateutil.parser import parse as date_parse
-from migration.extract import extract_html, extract_media
-from orm.reaction import Reaction, ReactionKind
-from orm.shout import Shout, ShoutReactionsFollower, ShoutTopic
-from orm.topic import Topic, TopicFollower
-from orm.user import User
-from services.stat.viewed import ViewedStorage
from sqlalchemy.exc import IntegrityError
from transliterate import translit
-
-import json
+from base.orm import local_session
+from migration.extract import extract_html, extract_media
+from orm.reaction import Reaction, ReactionKind
+from orm.shout import Shout, ShoutTopic, ShoutReactionsFollower
+from orm.user import User
+from orm.topic import TopicFollower, Topic
+from services.stat.viewed import ViewedStorage
import re
OLD_DATE = "2016-03-05 22:22:00.350000"
@@ -34,7 +33,7 @@ def get_shout_slug(entry):
slug = friend.get("slug", "")
if slug:
break
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
return slug
@@ -42,27 +41,27 @@ def create_author_from_app(app):
user = None
userdata = None
# check if email is used
- if app["email"]:
+ if app['email']:
with local_session() as session:
- user = session.query(User).where(User.email == app["email"]).first()
+ user = session.query(User).where(User.email == app['email']).first()
if not user:
# print('[migration] app %r' % app)
- name = app.get("name")
+ name = app.get('name')
if name:
slug = translit(name, "ru", reversed=True).lower()
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
- print("[migration] created slug %s" % slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
+ print('[migration] created slug %s' % slug)
# check if slug is used
if slug:
user = session.query(User).where(User.slug == slug).first()
# get slug from email
if user:
- slug = app["email"].split("@")[0]
+ slug = app['email'].split('@')[0]
user = session.query(User).where(User.slug == slug).first()
# one more try
if user:
- slug += "-author"
+ slug += '-author'
user = session.query(User).where(User.slug == slug).first()
# create user with application data
@@ -80,7 +79,7 @@ def create_author_from_app(app):
user = User.create(**userdata)
session.add(user)
session.commit()
- userdata["id"] = user.id
+ userdata['id'] = user.id
userdata = user.dict()
return userdata
@@ -92,12 +91,11 @@ async def create_shout(shout_dict):
s = Shout.create(**shout_dict)
author = s.authors[0]
with local_session() as session:
- srf = (
- session.query(ShoutReactionsFollower)
- .where(ShoutReactionsFollower.shout == s.id)
- .filter(ShoutReactionsFollower.follower == author.id)
- .first()
- )
+ srf = session.query(ShoutReactionsFollower).where(
+ ShoutReactionsFollower.shout == s.id
+ ).filter(
+ ShoutReactionsFollower.follower == author.id
+ ).first()
if not srf:
srf = ShoutReactionsFollower.create(shout=s.id, follower=author.id, auto=True)
session.add(srf)
@@ -118,14 +116,14 @@ async def get_user(entry, storage):
elif user_oid:
userdata = storage["users"]["by_oid"].get(user_oid)
if not userdata:
- print("no userdata by oid, anonymous")
+ print('no userdata by oid, anonymous')
userdata = anondict
print(app)
# cleanup slug
if userdata:
slug = userdata.get("slug", "")
if slug:
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
userdata["slug"] = slug
else:
userdata = anondict
@@ -139,14 +137,11 @@ async def migrate(entry, storage):
r = {
"layout": type2layout[entry["type"]],
"title": entry["title"],
- "authors": [
- author,
- ],
+ "authors": [author, ],
"slug": get_shout_slug(entry),
"cover": (
- "https://images.discours.io/unsafe/" + entry["thumborId"]
- if entry.get("thumborId")
- else entry.get("image", {}).get("url")
+ "https://images.discours.io/unsafe/" +
+ entry["thumborId"] if entry.get("thumborId") else entry.get("image", {}).get("url")
),
"visibility": "public" if entry.get("published") else "community",
"publishedAt": date_parse(entry.get("publishedAt")) if entry.get("published") else None,
@@ -155,11 +150,11 @@ async def migrate(entry, storage):
"updatedAt": date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts,
"createdBy": author.id,
"topics": await add_topics_follower(entry, storage, author),
- "body": extract_html(entry, cleanup=True),
+ "body": extract_html(entry, cleanup=True)
}
# main topic patch
- r["mainTopic"] = r["topics"][0]
+ r['mainTopic'] = r['topics'][0]
# published author auto-confirm
if entry.get("published"):
@@ -182,16 +177,14 @@ async def migrate(entry, storage):
shout_dict["oid"] = entry.get("_id", "")
shout = await create_shout(shout_dict)
except IntegrityError as e:
- print("[migration] create_shout integrity error", e)
+ print('[migration] create_shout integrity error', e)
shout = await resolve_create_shout(shout_dict)
except Exception as e:
raise Exception(e)
# udpate data
shout_dict = shout.dict()
- shout_dict["authors"] = [
- author.dict(),
- ]
+ shout_dict["authors"] = [author.dict(), ]
# shout topics aftermath
shout_dict["topics"] = await topics_aftermath(r, storage)
@@ -200,9 +193,7 @@ async def migrate(entry, storage):
await content_ratings_to_reactions(entry, shout_dict["slug"])
# shout views
- await ViewedStorage.increment(
- shout_dict["slug"], amount=entry.get("views", 1), viewer="old-discours"
- )
+ await ViewedStorage.increment(shout_dict["slug"], amount=entry.get("views", 1), viewer='old-discours')
# del shout_dict['ratings']
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
@@ -214,9 +205,7 @@ async def add_topics_follower(entry, storage, user):
topics = set([])
category = entry.get("category")
topics_by_oid = storage["topics"]["by_oid"]
- oids = [
- category,
- ] + entry.get("tags", [])
+ oids = [category, ] + entry.get("tags", [])
for toid in oids:
tslug = topics_by_oid.get(toid, {}).get("slug")
if tslug:
@@ -228,18 +217,23 @@ async def add_topics_follower(entry, storage, user):
try:
tpc = session.query(Topic).where(Topic.slug == tpcslug).first()
if tpc:
- tf = (
- session.query(TopicFollower)
- .where(TopicFollower.follower == user.id)
- .filter(TopicFollower.topic == tpc.id)
- .first()
- )
+ tf = session.query(
+ TopicFollower
+ ).where(
+ TopicFollower.follower == user.id
+ ).filter(
+ TopicFollower.topic == tpc.id
+ ).first()
if not tf:
- tf = TopicFollower.create(topic=tpc.id, follower=user.id, auto=True)
+ tf = TopicFollower.create(
+ topic=tpc.id,
+ follower=user.id,
+ auto=True
+ )
session.add(tf)
session.commit()
except IntegrityError:
- print("[migration.shout] hidden by topic " + tpc.slug)
+ print('[migration.shout] hidden by topic ' + tpc.slug)
# main topic
maintopic = storage["replacements"].get(topics_by_oid.get(category, {}).get("slug"))
if maintopic in ttt:
@@ -260,7 +254,7 @@ async def process_user(userdata, storage, oid):
if not user:
try:
slug = userdata["slug"].lower().strip()
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
userdata["slug"] = slug
user = User.create(**userdata)
session.add(user)
@@ -288,9 +282,9 @@ async def resolve_create_shout(shout_dict):
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
bump = False
if s:
- if s.createdAt != shout_dict["createdAt"]:
+ if s.createdAt != shout_dict['createdAt']:
# create new with different slug
- shout_dict["slug"] += "-" + shout_dict["layout"]
+ shout_dict["slug"] += '-' + shout_dict["layout"]
try:
await create_shout(shout_dict)
except IntegrityError as e:
@@ -301,7 +295,10 @@ async def resolve_create_shout(shout_dict):
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
- print("[migration] shout already exists, but differs in %s" % key)
+ print(
+ "[migration] shout already exists, but differs in %s"
+ % key
+ )
bump = True
else:
print("[migration] shout already exists, but lacks %s" % key)
@@ -347,7 +344,9 @@ async def topics_aftermath(entry, storage):
)
if not shout_topic_new:
try:
- ShoutTopic.create(**{"shout": shout.id, "topic": new_topic.id})
+ ShoutTopic.create(
+ **{"shout": shout.id, "topic": new_topic.id}
+ )
except Exception:
print("[migration] shout topic error: " + newslug)
session.commit()
@@ -364,7 +363,9 @@ async def content_ratings_to_reactions(entry, slug):
with local_session() as session:
for content_rating in entry.get("ratings", []):
rater = (
- session.query(User).filter(User.oid == content_rating["createdBy"]).first()
+ session.query(User)
+ .filter(User.oid == content_rating["createdBy"])
+ .first()
) or User.default_user
shout = session.query(Shout).where(Shout.slug == slug).first()
cts = content_rating.get("createdAt")
@@ -374,7 +375,7 @@ async def content_ratings_to_reactions(entry, slug):
if content_rating["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": rater.id,
- "shout": shout.id,
+ "shout": shout.id
}
reaction = (
session.query(Reaction)
diff --git a/migration/tables/remarks.py b/migration/tables/remarks.py
index e133050f..026b95c6 100644
--- a/migration/tables/remarks.py
+++ b/migration/tables/remarks.py
@@ -5,26 +5,34 @@ from orm.reaction import Reaction, ReactionKind
def migrate(entry, storage):
- post_oid = entry["contentItem"]
+ post_oid = entry['contentItem']
print(post_oid)
- shout_dict = storage["shouts"]["by_oid"].get(post_oid)
+ shout_dict = storage['shouts']['by_oid'].get(post_oid)
if shout_dict:
- print(shout_dict["body"])
+ print(shout_dict['body'])
remark = {
- "shout": shout_dict["id"],
- "body": extract_md(html2text(entry["body"]), shout_dict),
- "kind": ReactionKind.REMARK,
+ "shout": shout_dict['id'],
+ "body": extract_md(
+ html2text(entry['body']),
+ shout_dict
+ ),
+ "kind": ReactionKind.REMARK
}
- if entry.get("textBefore"):
- remark["range"] = (
- str(shout_dict["body"].index(entry["textBefore"] or ""))
- + ":"
- + str(
- shout_dict["body"].index(entry["textAfter"] or "")
- + len(entry["textAfter"] or "")
+ if entry.get('textBefore'):
+ remark['range'] = str(
+ shout_dict['body']
+ .index(
+ entry['textBefore'] or ''
+ )
+ ) + ':' + str(
+ shout_dict['body']
+ .index(
+ entry['textAfter'] or ''
+ ) + len(
+ entry['textAfter'] or ''
+ )
)
- )
with local_session() as session:
rmrk = Reaction.create(**remark)
diff --git a/migration/tables/topics.py b/migration/tables/topics.py
index ae9ddbda..17804376 100644
--- a/migration/tables/topics.py
+++ b/migration/tables/topics.py
@@ -10,7 +10,7 @@ def migrate(entry):
"slug": entry["slug"],
"oid": entry["_id"],
"title": entry["title"].replace(" ", " "),
- "body": extract_md(html2text(body_orig)),
+ "body": extract_md(html2text(body_orig))
}
with local_session() as session:
diff --git a/migration/tables/users.py b/migration/tables/users.py
index 40c80f21..3ccf9029 100644
--- a/migration/tables/users.py
+++ b/migration/tables/users.py
@@ -1,10 +1,11 @@
-from base.orm import local_session
+import re
+
from bs4 import BeautifulSoup
from dateutil.parser import parse
-from orm.user import AuthorFollower, User, UserRating
from sqlalchemy.exc import IntegrityError
-import re
+from base.orm import local_session
+from orm.user import AuthorFollower, User, UserRating
def migrate(entry):
@@ -22,7 +23,7 @@ def migrate(entry):
"muted": False, # amnesty
"links": [],
"name": "anonymous",
- "password": entry["services"]["password"].get("bcrypt"),
+ "password": entry["services"]["password"].get("bcrypt")
}
if "updatedAt" in entry:
@@ -32,13 +33,9 @@ def migrate(entry):
if entry.get("profile"):
# slug
slug = entry["profile"].get("path").lower()
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug).strip()
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug).strip()
user_dict["slug"] = slug
- bio = (
- (entry.get("profile", {"bio": ""}).get("bio") or "")
- .replace(r"\(", "(")
- .replace(r"\)", ")")
- )
+ bio = (entry.get("profile", {"bio": ""}).get("bio") or "").replace('\(', '(').replace('\)', ')')
bio_text = BeautifulSoup(bio, features="lxml").text
if len(bio_text) > 120:
@@ -49,7 +46,8 @@ def migrate(entry):
# userpic
try:
user_dict["userpic"] = (
- "https://images.discours.io/unsafe/" + entry["profile"]["thumborId"]
+ "https://images.discours.io/unsafe/"
+ + entry["profile"]["thumborId"]
)
except KeyError:
try:
@@ -64,7 +62,11 @@ def migrate(entry):
name = (name + " " + ln) if ln else name
if not name:
name = slug if slug else "anonymous"
- name = entry["profile"]["path"].lower().strip().replace(" ", "-") if len(name) < 2 else name
+ name = (
+ entry["profile"]["path"].lower().strip().replace(" ", "-")
+ if len(name) < 2
+ else name
+ )
user_dict["name"] = name
# links
@@ -93,7 +95,9 @@ def migrate(entry):
except IntegrityError:
print("[migration] cannot create user " + user_dict["slug"])
with local_session() as session:
- old_user = session.query(User).filter(User.slug == user_dict["slug"]).first()
+ old_user = (
+ session.query(User).filter(User.slug == user_dict["slug"]).first()
+ )
old_user.oid = oid
old_user.password = user_dict["password"]
session.commit()
@@ -110,7 +114,7 @@ def post_migrate():
"slug": "old-discours",
"username": "old-discours",
"email": "old@discours.io",
- "name": "Просмотры на старой версии сайта",
+ "name": "Просмотры на старой версии сайта"
}
with local_session() as session:
@@ -143,8 +147,12 @@ def migrate_2stage(entry, id_map):
}
user_rating = UserRating.create(**user_rating_dict)
- if user_rating_dict["value"] > 0:
- af = AuthorFollower.create(author=user.id, follower=rater.id, auto=True)
+ if user_rating_dict['value'] > 0:
+ af = AuthorFollower.create(
+ author=user.id,
+ follower=rater.id,
+ auto=True
+ )
session.add(af)
session.add(user_rating)
session.commit()
diff --git a/orm/__init__.py b/orm/__init__.py
index 9f66f85c..53b13951 100644
--- a/orm/__init__.py
+++ b/orm/__init__.py
@@ -1,7 +1,7 @@
from base.orm import Base, engine
from orm.community import Community
from orm.notification import Notification
-from orm.rbac import Operation, Permission, Resource, Role
+from orm.rbac import Operation, Resource, Permission, Role
from orm.reaction import Reaction
from orm.shout import Shout
from orm.topic import Topic, TopicFollower
@@ -32,5 +32,5 @@ __all__ = [
"Notification",
"Reaction",
"UserRating",
- "init_tables",
+ "init_tables"
]
diff --git a/orm/collection.py b/orm/collection.py
index 1c432727..c9975b62 100644
--- a/orm/collection.py
+++ b/orm/collection.py
@@ -1,7 +1,9 @@
-from base.orm import Base
from datetime import datetime
+
from sqlalchemy import Column, DateTime, ForeignKey, String
+from base.orm import Base
+
class ShoutCollection(Base):
__tablename__ = "shout_collection"
diff --git a/orm/community.py b/orm/community.py
index c31732a0..b55b857f 100644
--- a/orm/community.py
+++ b/orm/community.py
@@ -1,6 +1,7 @@
-from base.orm import Base, local_session
from datetime import datetime
-from sqlalchemy import Column, DateTime, ForeignKey, String
+
+from sqlalchemy import Column, String, ForeignKey, DateTime
+from base.orm import Base, local_session
class CommunityFollower(Base):
@@ -9,7 +10,9 @@ class CommunityFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True)
community = Column(ForeignKey("community.id"), primary_key=True)
- joinedAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ joinedAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
# role = Column(ForeignKey(Role.id), nullable=False, comment="Role for member")
@@ -20,15 +23,19 @@ class Community(Base):
slug = Column(String, nullable=False, unique=True, comment="Slug")
desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default="")
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
@staticmethod
def init_table():
with local_session() as session:
- d = session.query(Community).filter(Community.slug == "discours").first()
+ d = (
+ session.query(Community).filter(Community.slug == "discours").first()
+ )
if not d:
d = Community.create(name="Дискурс", slug="discours")
session.add(d)
session.commit()
Community.default_community = d
- print("[orm] default community id: %s" % d.id)
+ print('[orm] default community id: %s' % d.id)
diff --git a/orm/notification.py b/orm/notification.py
index 2fdc9d5d..25f4e4f3 100644
--- a/orm/notification.py
+++ b/orm/notification.py
@@ -1,9 +1,10 @@
-from base.orm import Base
from datetime import datetime
-from enum import Enum as Enumeration
-from sqlalchemy import Boolean, Column, DateTime, Enum, ForeignKey, Integer
+from sqlalchemy import Column, Enum, ForeignKey, DateTime, Boolean, Integer
from sqlalchemy.dialects.postgresql import JSONB
+from base.orm import Base
+from enum import Enum as Enumeration
+
class NotificationType(Enumeration):
NEW_COMMENT = 1
diff --git a/orm/rbac.py b/orm/rbac.py
index bb7eb34b..29ade72e 100644
--- a/orm/rbac.py
+++ b/orm/rbac.py
@@ -1,8 +1,9 @@
-from base.orm import Base, local_session, REGISTRY
-from sqlalchemy import Column, ForeignKey, String, TypeDecorator, UniqueConstraint
+import warnings
+
+from sqlalchemy import String, Column, ForeignKey, UniqueConstraint, TypeDecorator
from sqlalchemy.orm import relationship
-import warnings
+from base.orm import Base, REGISTRY, engine, local_session
# Role Based Access Control #
@@ -120,23 +121,16 @@ class Operation(Base):
class Resource(Base):
__tablename__ = "resource"
- resourceClass = Column(String, nullable=False, unique=True, comment="Resource class")
+ resourceClass = Column(
+ String, nullable=False, unique=True, comment="Resource class"
+ )
name = Column(String, nullable=False, unique=True, comment="Resource name")
# TODO: community = Column(ForeignKey())
@staticmethod
def init_table():
with local_session() as session:
- for res in [
- "shout",
- "topic",
- "reaction",
- "chat",
- "message",
- "invite",
- "community",
- "user",
- ]:
+ for res in ["shout", "topic", "reaction", "chat", "message", "invite", "community", "user"]:
r = session.query(Resource).filter(Resource.name == res).first()
if not r:
r = Resource.create(name=res, resourceClass=res)
@@ -151,7 +145,9 @@ class Permission(Base):
{"extend_existing": True},
)
- role = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role")
+ role = Column(
+ ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role"
+ )
operation = Column(
ForeignKey("operation.id", ondelete="CASCADE"),
nullable=False,
@@ -164,14 +160,14 @@ class Permission(Base):
)
-# if __name__ == "__main__":
-# Base.metadata.create_all(engine)
-# ops = [
-# Permission(role=1, operation=1, resource=1),
-# Permission(role=1, operation=2, resource=1),
-# Permission(role=1, operation=3, resource=1),
-# Permission(role=1, operation=4, resource=1),
-# Permission(role=2, operation=4, resource=1),
-# ]
-# global_session.add_all(ops)
-# global_session.commit()
+if __name__ == "__main__":
+ Base.metadata.create_all(engine)
+ ops = [
+ Permission(role=1, operation=1, resource=1),
+ Permission(role=1, operation=2, resource=1),
+ Permission(role=1, operation=3, resource=1),
+ Permission(role=1, operation=4, resource=1),
+ Permission(role=2, operation=4, resource=1),
+ ]
+ global_session.add_all(ops)
+ global_session.commit()
diff --git a/orm/reaction.py b/orm/reaction.py
index 89fed9eb..1c129e23 100644
--- a/orm/reaction.py
+++ b/orm/reaction.py
@@ -1,8 +1,10 @@
-from base.orm import Base
from datetime import datetime
from enum import Enum as Enumeration
+
from sqlalchemy import Column, DateTime, Enum, ForeignKey, String
+from base.orm import Base
+
class ReactionKind(Enumeration):
AGREE = 1 # +1
@@ -25,14 +27,18 @@ class ReactionKind(Enumeration):
class Reaction(Base):
__tablename__ = "reaction"
body = Column(String, nullable=True, comment="Reaction Body")
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
createdBy = Column(ForeignKey("user.id"), nullable=False, index=True, comment="Sender")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
updatedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Last Editor")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
deletedBy = Column(ForeignKey("user.id"), nullable=True, index=True, comment="Deleted by")
shout = Column(ForeignKey("shout.id"), nullable=False, index=True)
- replyTo = Column(ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID")
+ replyTo = Column(
+ ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
+ )
range = Column(String, nullable=True, comment="Range in format
:")
kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
oid = Column(String, nullable=True, comment="Old ID")
diff --git a/orm/shout.py b/orm/shout.py
index 7a77b66c..22381d4c 100644
--- a/orm/shout.py
+++ b/orm/shout.py
@@ -1,10 +1,12 @@
-from base.orm import Base, local_session
from datetime import datetime
+
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String, JSON
+from sqlalchemy.orm import column_property, relationship
+
+from base.orm import Base, local_session
from orm.reaction import Reaction
from orm.topic import Topic
from orm.user import User
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, JSON, String
-from sqlalchemy.orm import column_property, relationship
class ShoutTopic(Base):
@@ -22,7 +24,9 @@ class ShoutReactionsFollower(Base):
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
shout = Column(ForeignKey("shout.id"), primary_key=True, index=True)
auto = Column(Boolean, nullable=False, default=False)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
deletedAt = Column(DateTime, nullable=True)
@@ -68,7 +72,7 @@ class Shout(Base):
# TODO: these field should be used or modified
community = Column(ForeignKey("community.id"), default=1)
- lang = Column(String, nullable=False, default="ru", comment="Language")
+ lang = Column(String, nullable=False, default='ru', comment="Language")
mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
visibility = Column(String, nullable=True) # owner authors community public
versionOf = Column(ForeignKey("shout.id"), nullable=True)
@@ -83,7 +87,7 @@ class Shout(Base):
"slug": "genesis-block",
"body": "",
"title": "Ничего",
- "lang": "ru",
+ "lang": "ru"
}
s = Shout.create(**entry)
session.add(s)
diff --git a/orm/topic.py b/orm/topic.py
index 6da93732..a37dc69a 100644
--- a/orm/topic.py
+++ b/orm/topic.py
@@ -1,7 +1,9 @@
-from base.orm import Base
from datetime import datetime
+
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, String
+from base.orm import Base
+
class TopicFollower(Base):
__tablename__ = "topic_followers"
@@ -9,7 +11,9 @@ class TopicFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
topic = Column(ForeignKey("topic.id"), primary_key=True, index=True)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
auto = Column(Boolean, nullable=False, default=False)
@@ -20,5 +24,7 @@ class Topic(Base):
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
- community = Column(ForeignKey("community.id"), default=1, comment="Community")
+ community = Column(
+ ForeignKey("community.id"), default=1, comment="Community"
+ )
oid = Column(String, nullable=True, comment="Old ID")
diff --git a/orm/user.py b/orm/user.py
index d76c4627..5aeab90e 100644
--- a/orm/user.py
+++ b/orm/user.py
@@ -1,10 +1,10 @@
-from base.orm import Base, local_session
from datetime import datetime
-from orm.rbac import Role
-from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer
+
from sqlalchemy import JSON as JSONType
-from sqlalchemy import String
+from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
+from base.orm import Base, local_session
+from orm.rbac import Role
class UserRating(Base):
@@ -34,7 +34,9 @@ class AuthorFollower(Base):
id = None # type: ignore
follower = Column(ForeignKey("user.id"), primary_key=True, index=True)
author = Column(ForeignKey("user.id"), primary_key=True, index=True)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
auto = Column(Boolean, nullable=False, default=False)
@@ -52,8 +54,12 @@ class User(Base):
slug = Column(String, unique=True, comment="User's slug")
muted = Column(Boolean, default=False)
emailConfirmed = Column(Boolean, default=False)
- createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
- lastSeen = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at")
+ createdAt = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Created at"
+ )
+ lastSeen = Column(
+ DateTime, nullable=False, default=datetime.now, comment="Was online at"
+ )
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
links = Column(JSONType, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 31fbe456..d221f3b0 100755
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -2,5 +2,3 @@ isort
brunette
flake8
mypy
-pre-commit
-black
diff --git a/requirements.txt b/requirements.txt
index a919e623..edbf46ff 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -18,12 +18,15 @@ transliterate~=1.10.2
requests~=2.28.1
bcrypt>=4.0.0
bson~=0.5.10
+flake8
DateTime~=4.7
asyncio~=3.4.3
python-dateutil~=2.8.2
beautifulsoup4~=4.11.1
lxml
sentry-sdk>=1.14.0
+# sse_starlette
+graphql-ws
nltk~=3.8.1
pymystem3~=0.2.0
transformers~=4.28.1
diff --git a/resetdb.sh b/resetdb.sh
index 40ba2e37..39b3b9b2 100755
--- a/resetdb.sh
+++ b/resetdb.sh
@@ -53,3 +53,4 @@ echo "Start migration"
python3 server.py migrate
if [ $? -ne 0 ]; then { echo "Migration failed, aborting." ; exit 1; } fi
echo 'Done!'
+
diff --git a/resolvers/__init__.py b/resolvers/__init__.py
new file mode 100644
index 00000000..5d753ac4
--- /dev/null
+++ b/resolvers/__init__.py
@@ -0,0 +1,67 @@
+from resolvers.auth import (
+ login,
+ sign_out,
+ is_email_used,
+ register_by_email,
+ confirm_email,
+ auth_send_link,
+ get_current_user,
+)
+
+from resolvers.create.migrate import markdown_body
+from resolvers.create.editor import create_shout, delete_shout, update_shout
+
+from resolvers.zine.profile import (
+ load_authors_by,
+ rate_user,
+ update_profile,
+ get_authors_all
+)
+
+from resolvers.zine.reactions import (
+ create_reaction,
+ delete_reaction,
+ update_reaction,
+ reactions_unfollow,
+ reactions_follow,
+ load_reactions_by
+)
+from resolvers.zine.topics import (
+ topic_follow,
+ topic_unfollow,
+ topics_by_author,
+ topics_by_community,
+ topics_all,
+ get_topic
+)
+
+from resolvers.zine.following import (
+ follow,
+ unfollow
+)
+
+from resolvers.zine.load import (
+ load_shout,
+ load_shouts_by
+)
+
+from resolvers.inbox.chats import (
+ create_chat,
+ delete_chat,
+ update_chat
+
+)
+from resolvers.inbox.messages import (
+ create_message,
+ delete_message,
+ update_message,
+ mark_as_read
+)
+from resolvers.inbox.load import (
+ load_chats,
+ load_messages_by,
+ load_recipients
+)
+from resolvers.inbox.search import search_recipients
+
+from resolvers.notifications import load_notifications
diff --git a/resolvers/auth.py b/resolvers/auth.py
index 3ba15d9d..17369b7a 100644
--- a/resolvers/auth.py
+++ b/resolvers/auth.py
@@ -1,29 +1,24 @@
# -*- coding: utf-8 -*-
+from datetime import datetime, timezone
+from urllib.parse import quote_plus
+
+from graphql.type import GraphQLResolveInfo
+from starlette.responses import RedirectResponse
+from transliterate import translit
+import re
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from auth.email import send_auth_email
from auth.identity import Identity, Password
from auth.jwtcodec import JWTCodec
from auth.tokenstorage import TokenStorage
-from base.exceptions import (
- BaseHttpException,
- InvalidPassword,
- InvalidToken,
- ObjectNotExist,
- Unauthorized,
-)
+from base.exceptions import (BaseHttpException, InvalidPassword, InvalidToken,
+ ObjectNotExist, Unauthorized)
from base.orm import local_session
from base.resolvers import mutation, query
-from datetime import datetime, timezone
-from graphql.type import GraphQLResolveInfo
from orm import Role, User
-from settings import FRONTEND_URL, SESSION_TOKEN_HEADER
-from starlette.responses import RedirectResponse
-from transliterate import translit
-from urllib.parse import quote_plus
-
-import re
+from settings import SESSION_TOKEN_HEADER, FRONTEND_URL
@mutation.field("getSession")
@@ -37,14 +32,17 @@ async def get_current_user(_, info):
user.lastSeen = datetime.now(tz=timezone.utc)
session.commit()
- return {"token": token, "user": user}
+ return {
+ "token": token,
+ "user": user
+ }
@mutation.field("confirmEmail")
async def confirm_email(_, info, token):
"""confirm owning email address"""
try:
- print("[resolvers.auth] confirm email by token")
+ print('[resolvers.auth] confirm email by token')
payload = JWTCodec.decode(token)
user_id = payload.user_id
await TokenStorage.get(f"{user_id}-{payload.username}-{token}")
@@ -55,7 +53,10 @@ async def confirm_email(_, info, token):
user.lastSeen = datetime.now(tz=timezone.utc)
session.add(user)
session.commit()
- return {"token": session_token, "user": user}
+ return {
+ "token": session_token,
+ "user": user
+ }
except InvalidToken as e:
raise InvalidToken(e.message)
except Exception as e:
@@ -67,9 +68,9 @@ async def confirm_email_handler(request):
token = request.path_params["token"] # one time
request.session["token"] = token
res = await confirm_email(None, {}, token)
- print("[resolvers.auth] confirm_email request: %r" % request)
+ print('[resolvers.auth] confirm_email request: %r' % request)
if "error" in res:
- raise BaseHttpException(res["error"])
+ raise BaseHttpException(res['error'])
else:
response = RedirectResponse(url=FRONTEND_URL)
response.set_cookie("token", res["token"]) # session token
@@ -86,22 +87,22 @@ def create_user(user_dict):
def generate_unique_slug(src):
- print("[resolvers.auth] generating slug from: " + src)
+ print('[resolvers.auth] generating slug from: ' + src)
slug = translit(src, "ru", reversed=True).replace(".", "-").lower()
- slug = re.sub("[^0-9a-zA-Z]+", "-", slug)
+ slug = re.sub('[^0-9a-zA-Z]+', '-', slug)
if slug != src:
- print("[resolvers.auth] translited name: " + slug)
+ print('[resolvers.auth] translited name: ' + slug)
c = 1
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
while user:
user = session.query(User).where(User.slug == slug).first()
- slug = slug + "-" + str(c)
+ slug = slug + '-' + str(c)
c += 1
if not user:
unique_slug = slug
- print("[resolvers.auth] " + unique_slug)
- return quote_plus(unique_slug.replace("'", "")).replace("+", "-")
+ print('[resolvers.auth] ' + unique_slug)
+ return quote_plus(unique_slug.replace('\'', '')).replace('+', '-')
@mutation.field("registerUser")
@@ -116,12 +117,12 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
slug = generate_unique_slug(name)
user = session.query(User).where(User.slug == slug).first()
if user:
- slug = generate_unique_slug(email.split("@")[0])
+ slug = generate_unique_slug(email.split('@')[0])
user_dict = {
"email": email,
"username": email, # will be used to store phone number or some messenger network id
"name": name,
- "slug": slug,
+ "slug": slug
}
if password:
user_dict["password"] = Password.encode(password)
@@ -171,7 +172,10 @@ async def login(_, info, email: str, password: str = "", lang: str = "ru"):
user = Identity.password(orm_user, password)
session_token = await TokenStorage.create_session(user)
print(f"[auth] user {email} authorized")
- return {"token": session_token, "user": user}
+ return {
+ "token": session_token,
+ "user": user
+ }
except InvalidPassword:
print(f"[auth] {email}: invalid password")
raise InvalidPassword("invalid password") # contains webserver status
diff --git a/resolvers/create/editor.py b/resolvers/create/editor.py
index 6ec690f7..c81ff404 100644
--- a/resolvers/create/editor.py
+++ b/resolvers/create/editor.py
@@ -1,13 +1,15 @@
+from datetime import datetime, timezone
+
+from sqlalchemy import and_
+from sqlalchemy.orm import joinedload
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation
-from datetime import datetime, timezone
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.topic import Topic
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
-from sqlalchemy import and_
-from sqlalchemy.orm import joinedload
@mutation.field("createShout")
@@ -16,23 +18,21 @@ async def create_shout(_, info, inp):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
- topics = session.query(Topic).filter(Topic.slug.in_(inp.get("topics", []))).all()
+ topics = session.query(Topic).filter(Topic.slug.in_(inp.get('topics', []))).all()
- new_shout = Shout.create(
- **{
- "title": inp.get("title"),
- "subtitle": inp.get("subtitle"),
- "lead": inp.get("lead"),
- "description": inp.get("description"),
- "body": inp.get("body", ""),
- "layout": inp.get("layout"),
- "authors": inp.get("authors", []),
- "slug": inp.get("slug"),
- "mainTopic": inp.get("mainTopic"),
- "visibility": "owner",
- "createdBy": auth.user_id,
- }
- )
+ new_shout = Shout.create(**{
+ "title": inp.get("title"),
+ "subtitle": inp.get('subtitle'),
+ "lead": inp.get('lead'),
+ "description": inp.get('description'),
+ "body": inp.get("body", ''),
+ "layout": inp.get("layout"),
+ "authors": inp.get("authors", []),
+ "slug": inp.get("slug"),
+ "mainTopic": inp.get("mainTopic"),
+ "visibility": "owner",
+ "createdBy": auth.user_id
+ })
for topic in topics:
t = ShoutTopic.create(topic=topic.id, shout=new_shout.id)
@@ -64,15 +64,10 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
auth: AuthCredentials = info.context["request"].auth
with local_session() as session:
- shout = (
- session.query(Shout)
- .options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- )
- .filter(Shout.id == shout_id)
- .first()
- )
+ shout = session.query(Shout).options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ ).filter(Shout.id == shout_id).first()
if not shout:
return {"error": "shout not found"}
@@ -99,36 +94,24 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
session.commit()
for new_topic_to_link in new_topics_to_link:
- created_unlinked_topic = ShoutTopic.create(
- shout=shout.id, topic=new_topic_to_link.id
- )
+ created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=new_topic_to_link.id)
session.add(created_unlinked_topic)
- existing_topics_input = [
- topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0
- ]
- existing_topic_to_link_ids = [
- existing_topic_input["id"]
- for existing_topic_input in existing_topics_input
- if existing_topic_input["id"] not in [topic.id for topic in shout.topics]
- ]
+ existing_topics_input = [topic_input for topic_input in topics_input if topic_input.get("id", 0) > 0]
+ existing_topic_to_link_ids = [existing_topic_input["id"] for existing_topic_input in existing_topics_input
+ if existing_topic_input["id"] not in [topic.id for topic in shout.topics]]
for existing_topic_to_link_id in existing_topic_to_link_ids:
- created_unlinked_topic = ShoutTopic.create(
- shout=shout.id, topic=existing_topic_to_link_id
- )
+ created_unlinked_topic = ShoutTopic.create(shout=shout.id, topic=existing_topic_to_link_id)
session.add(created_unlinked_topic)
- topic_to_unlink_ids = [
- topic.id
- for topic in shout.topics
- if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]
- ]
+ topic_to_unlink_ids = [topic.id for topic in shout.topics
+ if topic.id not in [topic_input["id"] for topic_input in existing_topics_input]]
shout_topics_to_remove = session.query(ShoutTopic).filter(
and_(
ShoutTopic.shout == shout.id,
- ShoutTopic.topic.in_(topic_to_unlink_ids),
+ ShoutTopic.topic.in_(topic_to_unlink_ids)
)
)
@@ -137,13 +120,13 @@ async def update_shout(_, info, shout_id, shout_input=None, publish=False):
shout_input["mainTopic"] = shout_input["mainTopic"]["slug"]
- if shout_input["mainTopic"] == "":
+ if shout_input["mainTopic"] == '':
del shout_input["mainTopic"]
shout.update(shout_input)
updated = True
- if publish and shout.visibility == "owner":
+ if publish and shout.visibility == 'owner':
shout.visibility = "community"
shout.publishedAt = datetime.now(tz=timezone.utc)
updated = True
diff --git a/resolvers/create/migrate.py b/resolvers/create/migrate.py
index 028808b1..f16341f0 100644
--- a/resolvers/create/migrate.py
+++ b/resolvers/create/migrate.py
@@ -1,10 +1,11 @@
-# from base.resolvers import query
-# from migration.extract import extract_md
-# from resolvers.auth import login_required
-#
-#
-# @login_required
-# @query.field("markdownBody")
-# def markdown_body(_, info, body: str):
-# body = extract_md(body)
-# return body
+
+from base.resolvers import query
+from resolvers.auth import login_required
+from migration.extract import extract_md
+
+
+@login_required
+@query.field("markdownBody")
+def markdown_body(_, info, body: str):
+ body = extract_md(body)
+ return body
diff --git a/resolvers/inbox/chats.py b/resolvers/inbox/chats.py
index 95a31f69..853defab 100644
--- a/resolvers/inbox/chats.py
+++ b/resolvers/inbox/chats.py
@@ -1,13 +1,13 @@
+import json
+import uuid
+from datetime import datetime, timezone
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
-from datetime import datetime, timezone
from validations.inbox import Chat
-import json
-import uuid
-
@mutation.field("updateChat")
@login_required
@@ -24,24 +24,27 @@ async def update_chat(_, info, chat_new: Chat):
chat_id = chat_new["id"]
chat = await redis.execute("GET", f"chats/{chat_id}")
if not chat:
- return {"error": "chat not exist"}
+ return {
+ "error": "chat not exist"
+ }
chat = dict(json.loads(chat))
# TODO
if auth.user_id in chat["admins"]:
- chat.update(
- {
- "title": chat_new.get("title", chat["title"]),
- "description": chat_new.get("description", chat["description"]),
- "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
- "admins": chat_new.get("admins", chat.get("admins") or []),
- "users": chat_new.get("users", chat["users"]),
- }
- )
+ chat.update({
+ "title": chat_new.get("title", chat["title"]),
+ "description": chat_new.get("description", chat["description"]),
+ "updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
+ "admins": chat_new.get("admins", chat.get("admins") or []),
+ "users": chat_new.get("users", chat["users"])
+ })
await redis.execute("SET", f"chats/{chat.id}", json.dumps(chat))
await redis.execute("COMMIT")
- return {"error": None, "chat": chat}
+ return {
+ "error": None,
+ "chat": chat
+ }
@mutation.field("createChat")
@@ -49,7 +52,7 @@ async def update_chat(_, info, chat_new: Chat):
async def create_chat(_, info, title="", members=[]):
auth: AuthCredentials = info.context["request"].auth
chat = {}
- print("create_chat members: %r" % members)
+ print('create_chat members: %r' % members)
if auth.user_id not in members:
members.append(int(auth.user_id))
@@ -71,12 +74,15 @@ async def create_chat(_, info, title="", members=[]):
chat = await redis.execute("GET", f"chats/{c.decode('utf-8')}")
if chat:
chat = json.loads(chat)
- if chat["title"] == "":
- print("[inbox] createChat found old chat")
+ if chat['title'] == "":
+ print('[inbox] createChat found old chat')
print(chat)
break
if chat:
- return {"chat": chat, "error": "existed"}
+ return {
+ "chat": chat,
+ "error": "existed"
+ }
chat_id = str(uuid.uuid4())
chat = {
@@ -86,7 +92,7 @@ async def create_chat(_, info, title="", members=[]):
"createdBy": auth.user_id,
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
- "admins": members if (len(members) == 2 and title == "") else [],
+ "admins": members if (len(members) == 2 and title == "") else []
}
for m in members:
@@ -94,7 +100,10 @@ async def create_chat(_, info, title="", members=[]):
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0))
await redis.execute("COMMIT")
- return {"error": None, "chat": chat}
+ return {
+ "error": None,
+ "chat": chat
+ }
@mutation.field("deleteChat")
@@ -105,9 +114,11 @@ async def delete_chat(_, info, chat_id: str):
chat = await redis.execute("GET", f"/chats/{chat_id}")
if chat:
chat = dict(json.loads(chat))
- if auth.user_id in chat["admins"]:
+ if auth.user_id in chat['admins']:
await redis.execute("DEL", f"chats/{chat_id}")
await redis.execute("SREM", "chats_by_user/" + str(auth.user_id), chat_id)
await redis.execute("COMMIT")
else:
- return {"error": "chat not exist"}
+ return {
+ "error": "chat not exist"
+ }
diff --git a/resolvers/inbox/load.py b/resolvers/inbox/load.py
index 54ae75d5..a0d41721 100644
--- a/resolvers/inbox/load.py
+++ b/resolvers/inbox/load.py
@@ -1,26 +1,28 @@
-from .unread import get_unread_counter
+import json
+# from datetime import datetime, timedelta, timezone
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.orm import local_session
from base.redis import redis
+from base.orm import local_session
from base.resolvers import query
from orm.user import User
from resolvers.zine.profile import followed_authors
-
-import json
-
-# from datetime import datetime, timedelta, timezone
+from .unread import get_unread_counter
async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
- """load :limit messages for :chat_id with :offset"""
+ ''' load :limit messages for :chat_id with :offset '''
messages = []
message_ids = []
if ids:
message_ids += ids
try:
if limit:
- mids = await redis.lrange(f"chats/{chat_id}/message_ids", offset, offset + limit)
+ mids = await redis.lrange(f"chats/{chat_id}/message_ids",
+ offset,
+ offset + limit
+ )
mids = [mid.decode("utf-8") for mid in mids]
message_ids += mids
except Exception as e:
@@ -28,10 +30,10 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
if message_ids:
message_keys = [f"chats/{chat_id}/messages/{mid}" for mid in message_ids]
messages = await redis.mget(*message_keys)
- messages = [json.loads(msg.decode("utf-8")) for msg in messages]
+ messages = [json.loads(msg.decode('utf-8')) for msg in messages]
replies = []
for m in messages:
- rt = m.get("replyTo")
+ rt = m.get('replyTo')
if rt:
rt = int(rt)
if rt not in message_ids:
@@ -44,14 +46,14 @@ async def load_messages(chat_id: str, limit: int = 5, offset: int = 0, ids=[]):
@query.field("loadChats")
@login_required
async def load_chats(_, info, limit: int = 50, offset: int = 0):
- """load :limit chats of current user with :offset"""
+ """ load :limit chats of current user with :offset """
auth: AuthCredentials = info.context["request"].auth
cids = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
if cids:
- cids = list(cids)[offset : offset + limit]
+ cids = list(cids)[offset:offset + limit]
if not cids:
- print("[inbox.load] no chats were found")
+ print('[inbox.load] no chats were found')
cids = []
onliners = await redis.execute("SMEMBERS", "users-online")
if not onliners:
@@ -62,53 +64,62 @@ async def load_chats(_, info, limit: int = 50, offset: int = 0):
c = await redis.execute("GET", "chats/" + cid)
if c:
c = dict(json.loads(c))
- c["messages"] = await load_messages(cid, 5, 0)
- c["unread"] = await get_unread_counter(cid, auth.user_id)
+ c['messages'] = await load_messages(cid, 5, 0)
+ c['unread'] = await get_unread_counter(cid, auth.user_id)
with local_session() as session:
- c["members"] = []
+ c['members'] = []
for uid in c["users"]:
a = session.query(User).where(User.id == uid).first()
if a:
- c["members"].append(
- {
- "id": a.id,
- "slug": a.slug,
- "userpic": a.userpic,
- "name": a.name,
- "lastSeen": a.lastSeen,
- "online": a.id in onliners,
- }
- )
+ c['members'].append({
+ "id": a.id,
+ "slug": a.slug,
+ "userpic": a.userpic,
+ "name": a.name,
+ "lastSeen": a.lastSeen,
+ "online": a.id in onliners
+ })
chats.append(c)
- return {"chats": chats, "error": None}
+ return {
+ "chats": chats,
+ "error": None
+ }
@query.field("loadMessagesBy")
@login_required
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
- """load :limit messages of :chat_id with :offset"""
+ ''' load :limit messages of :chat_id with :offset '''
auth: AuthCredentials = info.context["request"].auth
userchats = await redis.execute("SMEMBERS", "chats_by_user/" + str(auth.user_id))
- userchats = [c.decode("utf-8") for c in userchats]
+ userchats = [c.decode('utf-8') for c in userchats]
# print('[inbox] userchats: %r' % userchats)
if userchats:
# print('[inbox] loading messages by...')
messages = []
- by_chat = by.get("chat")
+ by_chat = by.get('chat')
if by_chat in userchats:
chat = await redis.execute("GET", f"chats/{by_chat}")
# print(chat)
if not chat:
- return {"messages": [], "error": "chat not exist"}
+ return {
+ "messages": [],
+ "error": "chat not exist"
+ }
# everyone's messages in filtered chat
messages = await load_messages(by_chat, limit, offset)
return {
- "messages": sorted(list(messages), key=lambda m: m["createdAt"]),
- "error": None,
+ "messages": sorted(
+ list(messages),
+ key=lambda m: m['createdAt']
+ ),
+ "error": None
}
else:
- return {"error": "Cannot access messages of this chat"}
+ return {
+ "error": "Cannot access messages of this chat"
+ }
@query.field("loadRecipients")
@@ -127,14 +138,15 @@ async def load_recipients(_, info, limit=50, offset=0):
chat_users += session.query(User).where(User.emailConfirmed).limit(limit).offset(offset)
members = []
for a in chat_users:
- members.append(
- {
- "id": a.id,
- "slug": a.slug,
- "userpic": a.userpic,
- "name": a.name,
- "lastSeen": a.lastSeen,
- "online": a.id in onliners,
- }
- )
- return {"members": members, "error": None}
+ members.append({
+ "id": a.id,
+ "slug": a.slug,
+ "userpic": a.userpic,
+ "name": a.name,
+ "lastSeen": a.lastSeen,
+ "online": a.id in onliners
+ })
+ return {
+ "members": members,
+ "error": None
+ }
diff --git a/resolvers/inbox/messages.py b/resolvers/inbox/messages.py
index b3d2689f..56187edf 100644
--- a/resolvers/inbox/messages.py
+++ b/resolvers/inbox/messages.py
@@ -1,36 +1,41 @@
+import asyncio
+import json
+from typing import Any
+from datetime import datetime, timezone
+from graphql.type import GraphQLResolveInfo
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.redis import redis
from base.resolvers import mutation
-from datetime import datetime, timezone
-from services.following import FollowingManager, FollowingResult
-
-import json
+from services.following import FollowingManager, FollowingResult, Following
+from validations.inbox import Message
@mutation.field("createMessage")
@login_required
async def create_message(_, info, chat: str, body: str, replyTo=None):
- """create message with :body for :chat_id replying to :replyTo optionally"""
+ """ create message with :body for :chat_id replying to :replyTo optionally """
auth: AuthCredentials = info.context["request"].auth
chat = await redis.execute("GET", f"chats/{chat}")
if not chat:
- return {"error": "chat is not exist"}
+ return {
+ "error": "chat is not exist"
+ }
else:
chat = dict(json.loads(chat))
message_id = await redis.execute("GET", f"chats/{chat['id']}/next_message_id")
message_id = int(message_id)
new_message = {
- "chatId": chat["id"],
+ "chatId": chat['id'],
"id": message_id,
"author": auth.user_id,
"body": body,
- "createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
+ "createdAt": int(datetime.now(tz=timezone.utc).timestamp())
}
if replyTo:
- new_message["replyTo"] = replyTo
- chat["updatedAt"] = new_message["createdAt"]
+ new_message['replyTo'] = replyTo
+ chat['updatedAt'] = new_message['createdAt']
await redis.execute("SET", f"chats/{chat['id']}", json.dumps(chat))
print(f"[inbox] creating message {new_message}")
await redis.execute(
@@ -41,12 +46,17 @@ async def create_message(_, info, chat: str, body: str, replyTo=None):
users = chat["users"]
for user_slug in users:
- await redis.execute("LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id))
+ await redis.execute(
+ "LPUSH", f"chats/{chat['id']}/unread/{user_slug}", str(message_id)
+ )
- result = FollowingResult("NEW", "chat", new_message)
- await FollowingManager.push("chat", result)
+ result = FollowingResult("NEW", 'chat', new_message)
+ await FollowingManager.push('chat', result)
- return {"message": new_message, "error": None}
+ return {
+ "message": new_message,
+ "error": None
+ }
@mutation.field("updateMessage")
@@ -71,10 +81,13 @@ async def update_message(_, info, chat_id: str, message_id: int, body: str):
await redis.execute("SET", f"chats/{chat_id}/messages/{message_id}", json.dumps(message))
- result = FollowingResult("UPDATED", "chat", message)
- await FollowingManager.push("chat", result)
+ result = FollowingResult("UPDATED", 'chat', message)
+ await FollowingManager.push('chat', result)
- return {"message": message, "error": None}
+ return {
+ "message": message,
+ "error": None
+ }
@mutation.field("deleteMessage")
@@ -101,7 +114,7 @@ async def delete_message(_, info, chat_id: str, message_id: int):
for user_id in users:
await redis.execute("LREM", f"chats/{chat_id}/unread/{user_id}", 0, str(message_id))
- result = FollowingResult("DELETED", "chat", message)
+ result = FollowingResult("DELETED", 'chat', message)
await FollowingManager.push(result)
return {}
@@ -124,4 +137,6 @@ async def mark_as_read(_, info, chat_id: str, messages: [int]):
for message_id in messages:
await redis.execute("LREM", f"chats/{chat_id}/unread/{auth.user_id}", 0, str(message_id))
- return {"error": None}
+ return {
+ "error": None
+ }
diff --git a/resolvers/inbox/search.py b/resolvers/inbox/search.py
index 510ce52c..1ca340e5 100644
--- a/resolvers/inbox/search.py
+++ b/resolvers/inbox/search.py
@@ -1,14 +1,13 @@
+import json
+from datetime import datetime, timezone, timedelta
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.orm import local_session
from base.redis import redis
from base.resolvers import query
-from datetime import datetime, timedelta, timezone
+from base.orm import local_session
from orm.user import AuthorFollower, User
from resolvers.inbox.load import load_messages
-import json
-
@query.field("searchRecipients")
@login_required
@@ -18,7 +17,7 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int =
auth: AuthCredentials = info.context["request"].auth
talk_before = await redis.execute("GET", f"/chats_by_user/{auth.user_id}")
if talk_before:
- talk_before = list(json.loads(talk_before))[offset : offset + limit]
+ talk_before = list(json.loads(talk_before))[offset:offset + limit]
for chat_id in talk_before:
members = await redis.execute("GET", f"/chats/{chat_id}/users")
if members:
@@ -32,24 +31,23 @@ async def search_recipients(_, info, query: str, limit: int = 50, offset: int =
with local_session() as session:
# followings
- result += (
- session.query(AuthorFollower.author)
- .join(User, User.id == AuthorFollower.follower)
- .where(User.slug.startswith(query))
- .offset(offset + len(result))
- .limit(more_amount)
- )
+ result += session.query(AuthorFollower.author).join(
+ User, User.id == AuthorFollower.follower
+ ).where(
+ User.slug.startswith(query)
+ ).offset(offset + len(result)).limit(more_amount)
more_amount = limit
# followers
- result += (
- session.query(AuthorFollower.follower)
- .join(User, User.id == AuthorFollower.author)
- .where(User.slug.startswith(query))
- .offset(offset + len(result))
- .limit(offset + len(result) + limit)
- )
- return {"members": list(result), "error": None}
+ result += session.query(AuthorFollower.follower).join(
+ User, User.id == AuthorFollower.author
+ ).where(
+ User.slug.startswith(query)
+ ).offset(offset + len(result)).limit(offset + len(result) + limit)
+ return {
+ "members": list(result),
+ "error": None
+ }
@query.field("searchMessages")
@@ -59,22 +57,22 @@ async def search_user_chats(by, messages, user_id: int, limit, offset):
cids.union(set(await redis.execute("SMEMBERS", "chats_by_user/" + str(user_id))))
messages = []
- by_author = by.get("author")
+ by_author = by.get('author')
if by_author:
# all author's messages
cids.union(set(await redis.execute("SMEMBERS", f"chats_by_user/{by_author}")))
# author's messages in filtered chat
messages.union(set(filter(lambda m: m["author"] == by_author, list(messages))))
for c in cids:
- c = c.decode("utf-8")
+ c = c.decode('utf-8')
messages = await load_messages(c, limit, offset)
- body_like = by.get("body")
+ body_like = by.get('body')
if body_like:
# search in all messages in all user's chats
for c in cids:
# FIXME: use redis scan here
- c = c.decode("utf-8")
+ c = c.decode('utf-8')
mmm = await load_messages(c, limit, offset)
for m in mmm:
if body_like in m["body"]:
@@ -85,12 +83,13 @@ async def search_user_chats(by, messages, user_id: int, limit, offset):
days = by.get("days")
if days:
- messages.extend(
- filter(
- list(messages),
- key=lambda m: (
- datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
- ),
+ messages.extend(filter(
+ list(messages),
+ key=lambda m: (
+ datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by["days"])
)
- )
- return {"messages": messages, "error": None}
+ ))
+ return {
+ "messages": messages,
+ "error": None
+ }
diff --git a/resolvers/notifications.py b/resolvers/notifications.py
index 3ece629e..0cfc2244 100644
--- a/resolvers/notifications.py
+++ b/resolvers/notifications.py
@@ -1,9 +1,10 @@
-from auth.authenticate import login_required
+from sqlalchemy import select, desc, and_, update
+
from auth.credentials import AuthCredentials
+from base.resolvers import query, mutation
+from auth.authenticate import login_required
from base.orm import local_session
-from base.resolvers import mutation, query
from orm import Notification
-from sqlalchemy import and_, desc, select, update
@query.field("loadNotifications")
@@ -15,26 +16,25 @@ async def load_notifications(_, info, params=None):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- limit = params.get("limit", 50)
- offset = params.get("offset", 0)
+ limit = params.get('limit', 50)
+ offset = params.get('offset', 0)
- q = (
- select(Notification)
- .where(Notification.user == user_id)
- .order_by(desc(Notification.createdAt))
- .limit(limit)
- .offset(offset)
- )
+ q = select(Notification).where(
+ Notification.user == user_id
+ ).order_by(desc(Notification.createdAt)).limit(limit).offset(offset)
notifications = []
with local_session() as session:
- total_count = session.query(Notification).where(Notification.user == user_id).count()
+ total_count = session.query(Notification).where(
+ Notification.user == user_id
+ ).count()
- total_unread_count = (
- session.query(Notification)
- .where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
- .count()
- )
+ total_unread_count = session.query(Notification).where(
+ and_(
+ Notification.user == user_id,
+ Notification.seen == False
+ )
+ ).count()
for [notification] in session.execute(q):
notification.type = notification.type.name
@@ -43,7 +43,7 @@ async def load_notifications(_, info, params=None):
return {
"notifications": notifications,
"totalCount": total_count,
- "totalUnreadCount": total_unread_count,
+ "totalUnreadCount": total_unread_count
}
@@ -54,11 +54,9 @@ async def mark_notification_as_read(_, info, notification_id: int):
user_id = auth.user_id
with local_session() as session:
- notification = (
- session.query(Notification)
- .where(and_(Notification.id == notification_id, Notification.user == user_id))
- .one()
- )
+ notification = session.query(Notification).where(
+ and_(Notification.id == notification_id, Notification.user == user_id)
+ ).one()
notification.seen = True
session.commit()
@@ -71,11 +69,12 @@ async def mark_all_notifications_as_read(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- statement = (
- update(Notification)
- .where(and_(Notification.user == user_id, Notification.seen == False)) # noqa: E712
- .values(seen=True)
- )
+ statement = update(Notification).where(
+ and_(
+ Notification.user == user_id,
+ Notification.seen == False
+ )
+ ).values(seen=True)
with local_session() as session:
try:
diff --git a/resolvers/upload.py b/resolvers/upload.py
index 9649222c..44c7b81c 100644
--- a/resolvers/upload.py
+++ b/resolvers/upload.py
@@ -1,37 +1,34 @@
-from botocore.exceptions import BotoCoreError, ClientError
-from starlette.responses import JSONResponse
-
-import boto3
import os
import shutil
import tempfile
import uuid
+import boto3
+from botocore.exceptions import BotoCoreError, ClientError
+from starlette.responses import JSONResponse
-STORJ_ACCESS_KEY = os.environ.get("STORJ_ACCESS_KEY")
-STORJ_SECRET_KEY = os.environ.get("STORJ_SECRET_KEY")
-STORJ_END_POINT = os.environ.get("STORJ_END_POINT")
-STORJ_BUCKET_NAME = os.environ.get("STORJ_BUCKET_NAME")
-CDN_DOMAIN = os.environ.get("CDN_DOMAIN")
+STORJ_ACCESS_KEY = os.environ.get('STORJ_ACCESS_KEY')
+STORJ_SECRET_KEY = os.environ.get('STORJ_SECRET_KEY')
+STORJ_END_POINT = os.environ.get('STORJ_END_POINT')
+STORJ_BUCKET_NAME = os.environ.get('STORJ_BUCKET_NAME')
+CDN_DOMAIN = os.environ.get('CDN_DOMAIN')
async def upload_handler(request):
form = await request.form()
- file = form.get("file")
+ file = form.get('file')
if file is None:
- return JSONResponse({"error": "No file uploaded"}, status_code=400)
+ return JSONResponse({'error': 'No file uploaded'}, status_code=400)
file_name, file_extension = os.path.splitext(file.filename)
- key = "files/" + str(uuid.uuid4()) + file_extension
+ key = 'files/' + str(uuid.uuid4()) + file_extension
# Create an S3 client with Storj configuration
- s3 = boto3.client(
- "s3",
- aws_access_key_id=STORJ_ACCESS_KEY,
- aws_secret_access_key=STORJ_SECRET_KEY,
- endpoint_url=STORJ_END_POINT,
- )
+ s3 = boto3.client('s3',
+ aws_access_key_id=STORJ_ACCESS_KEY,
+ aws_secret_access_key=STORJ_SECRET_KEY,
+ endpoint_url=STORJ_END_POINT)
try:
# Save the uploaded file to a temporary file
@@ -42,13 +39,18 @@ async def upload_handler(request):
Filename=tmp_file.name,
Bucket=STORJ_BUCKET_NAME,
Key=key,
- ExtraArgs={"ContentType": file.content_type},
+ ExtraArgs={
+ "ContentType": file.content_type
+ }
)
- url = "https://" + CDN_DOMAIN + "/" + key
+ url = 'https://' + CDN_DOMAIN + '/' + key
- return JSONResponse({"url": url, "originalFilename": file.filename})
+ return JSONResponse({'url': url, 'originalFilename': file.filename})
except (BotoCoreError, ClientError) as e:
print(e)
- return JSONResponse({"error": "Failed to upload file"}, status_code=500)
+ return JSONResponse({'error': 'Failed to upload file'}, status_code=500)
+
+
+
diff --git a/resolvers/zine/following.py b/resolvers/zine/following.py
index bc92371a..99481571 100644
--- a/resolvers/zine/following.py
+++ b/resolvers/zine/following.py
@@ -1,12 +1,17 @@
+import asyncio
+from base.orm import local_session
+from base.resolvers import mutation
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.resolvers import mutation
-
# from resolvers.community import community_follow, community_unfollow
+from orm.user import AuthorFollower
+from orm.topic import TopicFollower
+from orm.shout import ShoutReactionsFollower
from resolvers.zine.profile import author_follow, author_unfollow
from resolvers.zine.reactions import reactions_follow, reactions_unfollow
from resolvers.zine.topics import topic_follow, topic_unfollow
-from services.following import FollowingManager, FollowingResult
+from services.following import Following, FollowingManager, FollowingResult
+from graphql.type import GraphQLResolveInfo
@mutation.field("follow")
@@ -17,20 +22,20 @@ async def follow(_, info, what, slug):
try:
if what == "AUTHOR":
if author_follow(auth.user_id, slug):
- result = FollowingResult("NEW", "author", slug)
- await FollowingManager.push("author", result)
+ result = FollowingResult("NEW", 'author', slug)
+ await FollowingManager.push('author', result)
elif what == "TOPIC":
if topic_follow(auth.user_id, slug):
- result = FollowingResult("NEW", "topic", slug)
- await FollowingManager.push("topic", result)
+ result = FollowingResult("NEW", 'topic', slug)
+ await FollowingManager.push('topic', result)
elif what == "COMMUNITY":
if False: # TODO: use community_follow(auth.user_id, slug):
- result = FollowingResult("NEW", "community", slug)
- await FollowingManager.push("community", result)
+ result = FollowingResult("NEW", 'community', slug)
+ await FollowingManager.push('community', result)
elif what == "REACTIONS":
if reactions_follow(auth.user_id, slug):
- result = FollowingResult("NEW", "shout", slug)
- await FollowingManager.push("shout", result)
+ result = FollowingResult("NEW", 'shout', slug)
+ await FollowingManager.push('shout', result)
except Exception as e:
print(Exception(e))
return {"error": str(e)}
@@ -46,20 +51,20 @@ async def unfollow(_, info, what, slug):
try:
if what == "AUTHOR":
if author_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", "author", slug)
- await FollowingManager.push("author", result)
+ result = FollowingResult("DELETED", 'author', slug)
+ await FollowingManager.push('author', result)
elif what == "TOPIC":
if topic_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", "topic", slug)
- await FollowingManager.push("topic", result)
+ result = FollowingResult("DELETED", 'topic', slug)
+ await FollowingManager.push('topic', result)
elif what == "COMMUNITY":
if False: # TODO: use community_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", "community", slug)
- await FollowingManager.push("community", result)
+ result = FollowingResult("DELETED", 'community', slug)
+ await FollowingManager.push('community', result)
elif what == "REACTIONS":
if reactions_unfollow(auth.user_id, slug):
- result = FollowingResult("DELETED", "shout", slug)
- await FollowingManager.push("shout", result)
+ result = FollowingResult("DELETED", 'shout', slug)
+ await FollowingManager.push('shout', result)
except Exception as e:
return {"error": str(e)}
diff --git a/resolvers/zine/load.py b/resolvers/zine/load.py
index 90d790ac..4619efa6 100644
--- a/resolvers/zine/load.py
+++ b/resolvers/zine/load.py
@@ -1,47 +1,49 @@
+from datetime import datetime, timedelta, timezone
+
+from sqlalchemy.orm import joinedload, aliased
+from sqlalchemy.sql.expression import desc, asc, select, func, case, and_, text, nulls_last
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
-from base.exceptions import ObjectNotExist
+from base.exceptions import ObjectNotExist, OperationNotAllowed
from base.orm import local_session
from base.resolvers import query
-from datetime import datetime, timedelta, timezone
from orm import TopicFollower
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutAuthor, ShoutTopic
from orm.user import AuthorFollower
-from sqlalchemy.orm import aliased, joinedload
-from sqlalchemy.sql.expression import and_, asc, case, desc, func, nulls_last, select
def add_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction).add_columns(
- func.sum(aliased_reaction.id).label("reacted_stat"),
- func.sum(case((aliased_reaction.kind == ReactionKind.COMMENT, 1), else_=0)).label(
- "commented_stat"
- ),
+ func.sum(
+ aliased_reaction.id
+ ).label('reacted_stat'),
func.sum(
case(
- # do not count comments' reactions
- (aliased_reaction.replyTo.is_not(None), 0),
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0,
+ (aliased_reaction.kind == ReactionKind.COMMENT, 1),
+ else_=0
)
- ).label("rating_stat"),
- func.max(
- case(
- (aliased_reaction.kind != ReactionKind.COMMENT, None),
- else_=aliased_reaction.createdAt,
- )
- ).label("last_comment"),
- )
+ ).label('commented_stat'),
+ func.sum(case(
+ # do not count comments' reactions
+ (aliased_reaction.replyTo.is_not(None), 0),
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0)
+ ).label('rating_stat'),
+ func.max(case(
+ (aliased_reaction.kind != ReactionKind.COMMENT, None),
+ else_=aliased_reaction.createdAt
+ )).label('last_comment'))
return q
@@ -58,7 +60,7 @@ def apply_filters(q, filters, user_id=None):
if filters.get("layout"):
q = q.filter(Shout.layout == filters.get("layout"))
- if filters.get("excludeLayout"):
+ if filters.get('excludeLayout'):
q = q.filter(Shout.layout != filters.get("excludeLayout"))
if filters.get("author"):
q = q.filter(Shout.authors.any(slug=filters.get("author")))
@@ -85,27 +87,27 @@ async def load_shout(_, info, slug=None, shout_id=None):
q = add_stat_columns(q)
if slug is not None:
- q = q.filter(Shout.slug == slug)
+ q = q.filter(
+ Shout.slug == slug
+ )
if shout_id is not None:
- q = q.filter(Shout.id == shout_id)
+ q = q.filter(
+ Shout.id == shout_id
+ )
- q = q.filter(Shout.deletedAt.is_(None)).group_by(Shout.id)
+ q = q.filter(
+ Shout.deletedAt.is_(None)
+ ).group_by(Shout.id)
try:
- [
- shout,
- reacted_stat,
- commented_stat,
- rating_stat,
- last_comment,
- ] = session.execute(q).first()
+ [shout, reacted_stat, commented_stat, rating_stat, last_comment] = session.execute(q).first()
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat,
+ "rating": rating_stat
}
for author_caption in session.query(ShoutAuthor).join(Shout).where(Shout.slug == slug):
@@ -140,13 +142,14 @@ async def load_shouts_by(_, info, options):
:return: Shout[]
"""
- q = (
- select(Shout)
- .options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
+ q = select(Shout).options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ ).where(
+ and_(
+ Shout.deletedAt.is_(None),
+ Shout.layout.is_not(None)
)
- .where(and_(Shout.deletedAt.is_(None), Shout.layout.is_not(None)))
)
q = add_stat_columns(q)
@@ -156,7 +159,7 @@ async def load_shouts_by(_, info, options):
order_by = options.get("order_by", Shout.publishedAt)
- query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
+ query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
@@ -166,19 +169,13 @@ async def load_shouts_by(_, info, options):
with local_session() as session:
shouts_map = {}
- for [
- shout,
- reacted_stat,
- commented_stat,
- rating_stat,
- last_comment,
- ] in session.execute(q).unique():
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat,
+ "rating": rating_stat
}
shouts_map[shout.id] = shout
@@ -191,13 +188,11 @@ async def get_drafts(_, info):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- q = (
- select(Shout)
- .options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- )
- .where(and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id))
+ q = select(Shout).options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ ).where(
+ and_(Shout.deletedAt.is_(None), Shout.createdBy == user_id)
)
q = q.group_by(Shout.id)
@@ -216,26 +211,24 @@ async def get_my_feed(_, info, options):
auth: AuthCredentials = info.context["request"].auth
user_id = auth.user_id
- subquery = (
- select(Shout.id)
- .join(ShoutAuthor)
- .join(AuthorFollower, AuthorFollower.follower == user_id)
- .join(ShoutTopic)
- .join(TopicFollower, TopicFollower.follower == user_id)
+ subquery = select(Shout.id).join(
+ ShoutAuthor
+ ).join(
+ AuthorFollower, AuthorFollower.follower == user_id
+ ).join(
+ ShoutTopic
+ ).join(
+ TopicFollower, TopicFollower.follower == user_id
)
- q = (
- select(Shout)
- .options(
- joinedload(Shout.authors),
- joinedload(Shout.topics),
- )
- .where(
- and_(
- Shout.publishedAt.is_not(None),
- Shout.deletedAt.is_(None),
- Shout.id.in_(subquery),
- )
+ q = select(Shout).options(
+ joinedload(Shout.authors),
+ joinedload(Shout.topics),
+ ).where(
+ and_(
+ Shout.publishedAt.is_not(None),
+ Shout.deletedAt.is_(None),
+ Shout.id.in_(subquery)
)
)
@@ -244,7 +237,7 @@ async def get_my_feed(_, info, options):
order_by = options.get("order_by", Shout.publishedAt)
- query_order_by = desc(order_by) if options.get("order_by_desc", True) else asc(order_by)
+ query_order_by = desc(order_by) if options.get('order_by_desc', True) else asc(order_by)
offset = options.get("offset", 0)
limit = options.get("limit", 10)
@@ -253,19 +246,13 @@ async def get_my_feed(_, info, options):
shouts = []
with local_session() as session:
shouts_map = {}
- for [
- shout,
- reacted_stat,
- commented_stat,
- rating_stat,
- last_comment,
- ] in session.execute(q).unique():
+ for [shout, reacted_stat, commented_stat, rating_stat, last_comment] in session.execute(q).unique():
shouts.append(shout)
shout.stat = {
"viewed": shout.views,
"reacted": reacted_stat,
"commented": commented_stat,
- "rating": rating_stat,
+ "rating": rating_stat
}
shouts_map[shout.id] = shout
diff --git a/resolvers/zine/profile.py b/resolvers/zine/profile.py
index 7275226d..552af43f 100644
--- a/resolvers/zine/profile.py
+++ b/resolvers/zine/profile.py
@@ -1,16 +1,17 @@
+from typing import List
+from datetime import datetime, timedelta, timezone
+from sqlalchemy import and_, func, distinct, select, literal
+from sqlalchemy.orm import aliased, joinedload
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.orm import local_session
from base.resolvers import mutation, query
-from datetime import datetime, timedelta, timezone
from orm.reaction import Reaction, ReactionKind
from orm.shout import ShoutAuthor, ShoutTopic
from orm.topic import Topic, TopicFollower
from orm.user import AuthorFollower, Role, User, UserRating, UserRole
from resolvers.zine.topics import followed_by_user
-from sqlalchemy import and_, distinct, func, literal, select
-from sqlalchemy.orm import aliased, joinedload
-from typing import List
def add_author_stat_columns(q):
@@ -20,24 +21,24 @@ def add_author_stat_columns(q):
# user_rating_aliased = aliased(UserRating)
q = q.outerjoin(shout_author_aliased).add_columns(
- func.count(distinct(shout_author_aliased.shout)).label("shouts_stat")
+ func.count(distinct(shout_author_aliased.shout)).label('shouts_stat')
)
q = q.outerjoin(author_followers, author_followers.author == User.id).add_columns(
- func.count(distinct(author_followers.follower)).label("followers_stat")
+ func.count(distinct(author_followers.follower)).label('followers_stat')
)
q = q.outerjoin(author_following, author_following.follower == User.id).add_columns(
- func.count(distinct(author_following.author)).label("followings_stat")
+ func.count(distinct(author_following.author)).label('followings_stat')
)
- q = q.add_columns(literal(0).label("rating_stat"))
+ q = q.add_columns(literal(0).label('rating_stat'))
# FIXME
# q = q.outerjoin(user_rating_aliased, user_rating_aliased.user == User.id).add_columns(
# # TODO: check
# func.sum(user_rating_aliased.value).label('rating_stat')
# )
- q = q.add_columns(literal(0).label("commented_stat"))
+ q = q.add_columns(literal(0).label('commented_stat'))
# q = q.outerjoin(Reaction, and_(Reaction.createdBy == User.id, Reaction.body.is_not(None))).add_columns(
# func.count(distinct(Reaction.id)).label('commented_stat')
# )
@@ -48,19 +49,13 @@ def add_author_stat_columns(q):
def add_stat(author, stat_columns):
- [
- shouts_stat,
- followers_stat,
- followings_stat,
- rating_stat,
- commented_stat,
- ] = stat_columns
+ [shouts_stat, followers_stat, followings_stat, rating_stat, commented_stat] = stat_columns
author.stat = {
"shouts": shouts_stat,
"followers": followers_stat,
"followings": followings_stat,
"rating": rating_stat,
- "commented": commented_stat,
+ "commented": commented_stat
}
return author
@@ -124,10 +119,10 @@ async def user_followers(_, _info, slug) -> List[User]:
q = add_author_stat_columns(q)
aliased_user = aliased(User)
- q = (
- q.join(AuthorFollower, AuthorFollower.follower == User.id)
- .join(aliased_user, aliased_user.id == AuthorFollower.author)
- .where(aliased_user.slug == slug)
+ q = q.join(AuthorFollower, AuthorFollower.follower == User.id).join(
+ aliased_user, aliased_user.id == AuthorFollower.author
+ ).where(
+ aliased_user.slug == slug
)
return get_authors_from_query(q)
@@ -155,10 +150,15 @@ async def update_profile(_, info, profile):
with local_session() as session:
user = session.query(User).filter(User.id == user_id).one()
if not user:
- return {"error": "canoot find user"}
+ return {
+ "error": "canoot find user"
+ }
user.update(profile)
session.commit()
- return {"error": None, "author": user}
+ return {
+ "error": None,
+ "author": user
+ }
@mutation.field("rateUser")
@@ -200,10 +200,13 @@ def author_follow(user_id, slug):
def author_unfollow(user_id, slug):
with local_session() as session:
flw = (
- session.query(AuthorFollower)
- .join(User, User.id == AuthorFollower.author)
- .filter(and_(AuthorFollower.follower == user_id, User.slug == slug))
- .first()
+ session.query(
+ AuthorFollower
+ ).join(User, User.id == AuthorFollower.author).filter(
+ and_(
+ AuthorFollower.follower == user_id, User.slug == slug
+ )
+ ).first()
)
if flw:
session.delete(flw)
@@ -229,16 +232,12 @@ async def get_author(_, _info, slug):
[author] = get_authors_from_query(q)
with local_session() as session:
- comments_count = (
- session.query(Reaction)
- .where(
- and_(
- Reaction.createdBy == author.id,
- Reaction.kind == ReactionKind.COMMENT,
- )
+ comments_count = session.query(Reaction).where(
+ and_(
+ Reaction.createdBy == author.id,
+ Reaction.kind == ReactionKind.COMMENT
)
- .count()
- )
+ ).count()
author.stat["commented"] = comments_count
return author
@@ -261,7 +260,9 @@ async def load_authors_by(_, info, by, limit, offset):
days_before = datetime.now(tz=timezone.utc) - timedelta(days=by["createdAt"])
q = q.filter(User.createdAt > days_before)
- q = q.order_by(by.get("order", User.createdAt)).limit(limit).offset(offset)
+ q = q.order_by(
+ by.get("order", User.createdAt)
+ ).limit(limit).offset(offset)
return get_authors_from_query(q)
@@ -272,13 +273,13 @@ async def load_my_subscriptions(_, info):
auth = info.context["request"].auth
user_id = auth.user_id
- authors_query = (
- select(User)
- .join(AuthorFollower, AuthorFollower.author == User.id)
- .where(AuthorFollower.follower == user_id)
+ authors_query = select(User).join(AuthorFollower, AuthorFollower.author == User.id).where(
+ AuthorFollower.follower == user_id
)
- topics_query = select(Topic).join(TopicFollower).where(TopicFollower.follower == user_id)
+ topics_query = select(Topic).join(TopicFollower).where(
+ TopicFollower.follower == user_id
+ )
topics = []
authors = []
@@ -290,4 +291,7 @@ async def load_my_subscriptions(_, info):
for [topic] in session.execute(topics_query):
topics.append(topic)
- return {"topics": topics, "authors": authors}
+ return {
+ "topics": topics,
+ "authors": authors
+ }
diff --git a/resolvers/zine/reactions.py b/resolvers/zine/reactions.py
index 680cac52..1c132b69 100644
--- a/resolvers/zine/reactions.py
+++ b/resolvers/zine/reactions.py
@@ -1,37 +1,42 @@
+from datetime import datetime, timedelta, timezone
+from sqlalchemy import and_, asc, desc, select, text, func, case
+from sqlalchemy.orm import aliased
+
from auth.authenticate import login_required
from auth.credentials import AuthCredentials
from base.exceptions import OperationNotAllowed
from base.orm import local_session
from base.resolvers import mutation, query
-from datetime import datetime, timedelta, timezone
from orm.reaction import Reaction, ReactionKind
from orm.shout import Shout, ShoutReactionsFollower
from orm.user import User
from services.notifications.notification_service import notification_service
-from sqlalchemy import and_, asc, case, desc, func, select, text
-from sqlalchemy.orm import aliased
def add_reaction_stat_columns(q):
aliased_reaction = aliased(Reaction)
q = q.outerjoin(aliased_reaction, Reaction.id == aliased_reaction.replyTo).add_columns(
- func.sum(aliased_reaction.id).label("reacted_stat"),
- func.sum(case((aliased_reaction.body.is_not(None), 1), else_=0)).label("commented_stat"),
+ func.sum(
+ aliased_reaction.id
+ ).label('reacted_stat'),
func.sum(
case(
- (aliased_reaction.kind == ReactionKind.AGREE, 1),
- (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
- (aliased_reaction.kind == ReactionKind.PROOF, 1),
- (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
- (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
- (aliased_reaction.kind == ReactionKind.REJECT, -1),
- (aliased_reaction.kind == ReactionKind.LIKE, 1),
- (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
- else_=0,
+ (aliased_reaction.body.is_not(None), 1),
+ else_=0
)
- ).label("rating_stat"),
- )
+ ).label('commented_stat'),
+ func.sum(case(
+ (aliased_reaction.kind == ReactionKind.AGREE, 1),
+ (aliased_reaction.kind == ReactionKind.DISAGREE, -1),
+ (aliased_reaction.kind == ReactionKind.PROOF, 1),
+ (aliased_reaction.kind == ReactionKind.DISPROOF, -1),
+ (aliased_reaction.kind == ReactionKind.ACCEPT, 1),
+ (aliased_reaction.kind == ReactionKind.REJECT, -1),
+ (aliased_reaction.kind == ReactionKind.LIKE, 1),
+ (aliased_reaction.kind == ReactionKind.DISLIKE, -1),
+ else_=0)
+ ).label('rating_stat'))
return q
@@ -42,19 +47,17 @@ def reactions_follow(user_id, shout_id: int, auto=False):
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
- session.query(ShoutReactionsFollower)
- .where(
- and_(
- ShoutReactionsFollower.follower == user_id,
- ShoutReactionsFollower.shout == shout.id,
- )
- )
- .first()
+ session.query(ShoutReactionsFollower).where(and_(
+ ShoutReactionsFollower.follower == user_id,
+ ShoutReactionsFollower.shout == shout.id,
+ )).first()
)
if not following:
following = ShoutReactionsFollower.create(
- follower=user_id, shout=shout.id, auto=auto
+ follower=user_id,
+ shout=shout.id,
+ auto=auto
)
session.add(following)
session.commit()
@@ -69,14 +72,10 @@ def reactions_unfollow(user_id: int, shout_id: int):
shout = session.query(Shout).where(Shout.id == shout_id).one()
following = (
- session.query(ShoutReactionsFollower)
- .where(
- and_(
- ShoutReactionsFollower.follower == user_id,
- ShoutReactionsFollower.shout == shout.id,
- )
- )
- .first()
+ session.query(ShoutReactionsFollower).where(and_(
+ ShoutReactionsFollower.follower == user_id,
+ ShoutReactionsFollower.shout == shout.id
+ )).first()
)
if following:
@@ -89,31 +88,30 @@ def reactions_unfollow(user_id: int, shout_id: int):
def is_published_author(session, user_id):
- """checks if user has at least one publication"""
- return (
- session.query(Shout)
- .where(Shout.authors.contains(user_id))
- .filter(and_(Shout.publishedAt.is_not(None), Shout.deletedAt.is_(None)))
- .count()
- > 0
- )
+ ''' checks if user has at least one publication '''
+ return session.query(
+ Shout
+ ).where(
+ Shout.authors.contains(user_id)
+ ).filter(
+ and_(
+ Shout.publishedAt.is_not(None),
+ Shout.deletedAt.is_(None)
+ )
+ ).count() > 0
def check_to_publish(session, user_id, reaction):
- """set shout to public if publicated approvers amount > 4"""
+ ''' set shout to public if publicated approvers amount > 4 '''
if not reaction.replyTo and reaction.kind in [
ReactionKind.ACCEPT,
ReactionKind.LIKE,
- ReactionKind.PROOF,
+ ReactionKind.PROOF
]:
if is_published_author(user_id):
# now count how many approvers are voted already
- approvers_reactions = (
- session.query(Reaction).where(Reaction.shout == reaction.shout).all()
- )
- approvers = [
- user_id,
- ]
+ approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
+ approvers = [user_id, ]
for ar in approvers_reactions:
a = ar.createdBy
if is_published_author(session, a):
@@ -124,11 +122,11 @@ def check_to_publish(session, user_id, reaction):
def check_to_hide(session, user_id, reaction):
- """hides any shout if 20% of reactions are negative"""
+ ''' hides any shout if 20% of reactions are negative '''
if not reaction.replyTo and reaction.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
- ReactionKind.DISPROOF,
+ ReactionKind.DISPROOF
]:
# if is_published_author(user):
approvers_reactions = session.query(Reaction).where(Reaction.shout == reaction.shout).all()
@@ -137,7 +135,7 @@ def check_to_hide(session, user_id, reaction):
if r.kind in [
ReactionKind.REJECT,
ReactionKind.DISLIKE,
- ReactionKind.DISPROOF,
+ ReactionKind.DISPROOF
]:
rejects += 1
if len(approvers_reactions) / rejects < 5:
@@ -148,14 +146,14 @@ def check_to_hide(session, user_id, reaction):
def set_published(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
s.publishedAt = datetime.now(tz=timezone.utc)
- s.visibility = text("public")
+ s.visibility = text('public')
session.add(s)
session.commit()
def set_hidden(session, shout_id):
s = session.query(Shout).where(Shout.id == shout_id).first()
- s.visibility = text("community")
+ s.visibility = text('community')
session.add(s)
session.commit()
@@ -164,46 +162,37 @@ def set_hidden(session, shout_id):
@login_required
async def create_reaction(_, info, reaction):
auth: AuthCredentials = info.context["request"].auth
- reaction["createdBy"] = auth.user_id
+ reaction['createdBy'] = auth.user_id
rdict = {}
with local_session() as session:
shout = session.query(Shout).where(Shout.id == reaction["shout"]).one()
author = session.query(User).where(User.id == auth.user_id).one()
- if reaction["kind"] in [ReactionKind.DISLIKE.name, ReactionKind.LIKE.name]:
- existing_reaction = (
- session.query(Reaction)
- .where(
- and_(
- Reaction.shout == reaction["shout"],
- Reaction.createdBy == auth.user_id,
- Reaction.kind == reaction["kind"],
- Reaction.replyTo == reaction.get("replyTo"),
- )
+ if reaction["kind"] in [
+ ReactionKind.DISLIKE.name,
+ ReactionKind.LIKE.name
+ ]:
+ existing_reaction = session.query(Reaction).where(
+ and_(
+ Reaction.shout == reaction["shout"],
+ Reaction.createdBy == auth.user_id,
+ Reaction.kind == reaction["kind"],
+ Reaction.replyTo == reaction.get("replyTo")
)
- .first()
- )
+ ).first()
if existing_reaction is not None:
raise OperationNotAllowed("You can't vote twice")
- opposite_reaction_kind = (
- ReactionKind.DISLIKE
- if reaction["kind"] == ReactionKind.LIKE.name
- else ReactionKind.LIKE
- )
- opposite_reaction = (
- session.query(Reaction)
- .where(
+ opposite_reaction_kind = ReactionKind.DISLIKE if reaction["kind"] == ReactionKind.LIKE.name else ReactionKind.LIKE
+ opposite_reaction = session.query(Reaction).where(
and_(
Reaction.shout == reaction["shout"],
Reaction.createdBy == auth.user_id,
Reaction.kind == opposite_reaction_kind,
- Reaction.replyTo == reaction.get("replyTo"),
+ Reaction.replyTo == reaction.get("replyTo")
)
- )
- .first()
- )
+ ).first()
if opposite_reaction is not None:
session.delete(opposite_reaction)
@@ -232,8 +221,8 @@ async def create_reaction(_, info, reaction):
await notification_service.handle_new_reaction(r.id)
rdict = r.dict()
- rdict["shout"] = shout.dict()
- rdict["createdBy"] = author.dict()
+ rdict['shout'] = shout.dict()
+ rdict['createdBy'] = author.dict()
# self-regulation mechanics
if check_to_hide(session, auth.user_id, r):
@@ -246,7 +235,11 @@ async def create_reaction(_, info, reaction):
except Exception as e:
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
- rdict["stat"] = {"commented": 0, "reacted": 0, "rating": 0}
+ rdict['stat'] = {
+ "commented": 0,
+ "reacted": 0,
+ "rating": 0
+ }
return {"reaction": rdict}
@@ -279,7 +272,7 @@ async def update_reaction(_, info, id, reaction={}):
r.stat = {
"commented": commented_stat,
"reacted": reacted_stat,
- "rating": rating_stat,
+ "rating": rating_stat
}
return {"reaction": r}
@@ -297,12 +290,17 @@ async def delete_reaction(_, info, id):
if r.createdBy != auth.user_id:
return {"error": "access denied"}
- if r.kind in [ReactionKind.LIKE, ReactionKind.DISLIKE]:
+ if r.kind in [
+ ReactionKind.LIKE,
+ ReactionKind.DISLIKE
+ ]:
session.delete(r)
else:
r.deletedAt = datetime.now(tz=timezone.utc)
session.commit()
- return {"reaction": r}
+ return {
+ "reaction": r
+ }
@query.field("loadReactionsBy")
@@ -323,10 +321,12 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
:return: Reaction[]
"""
- q = (
- select(Reaction, User, Shout)
- .join(User, Reaction.createdBy == User.id)
- .join(Shout, Reaction.shout == Shout.id)
+ q = select(
+ Reaction, User, Shout
+ ).join(
+ User, Reaction.createdBy == User.id
+ ).join(
+ Shout, Reaction.shout == Shout.id
)
if by.get("shout"):
@@ -344,7 +344,7 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
if by.get("comment"):
q = q.filter(func.length(Reaction.body) > 0)
- if len(by.get("search", "")) > 2:
+ if len(by.get('search', '')) > 2:
q = q.filter(Reaction.body.ilike(f'%{by["body"]}%'))
if by.get("days"):
@@ -352,9 +352,13 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
q = q.filter(Reaction.createdAt > after)
order_way = asc if by.get("sort", "").startswith("-") else desc
- order_field = by.get("sort", "").replace("-", "") or Reaction.createdAt
+ order_field = by.get("sort", "").replace('-', '') or Reaction.createdAt
- q = q.group_by(Reaction.id, User.id, Shout.id).order_by(order_way(order_field))
+ q = q.group_by(
+ Reaction.id, User.id, Shout.id
+ ).order_by(
+ order_way(order_field)
+ )
q = add_reaction_stat_columns(q)
@@ -363,20 +367,13 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
reactions = []
with local_session() as session:
- for [
- reaction,
- user,
- shout,
- reacted_stat,
- commented_stat,
- rating_stat,
- ] in session.execute(q):
+ for [reaction, user, shout, reacted_stat, commented_stat, rating_stat] in session.execute(q):
reaction.createdBy = user
reaction.shout = shout
reaction.stat = {
"rating": rating_stat,
"commented": commented_stat,
- "reacted": reacted_stat,
+ "reacted": reacted_stat
}
reaction.kind = reaction.kind.name
diff --git a/resolvers/zine/topics.py b/resolvers/zine/topics.py
index f24065cd..f354a7b4 100644
--- a/resolvers/zine/topics.py
+++ b/resolvers/zine/topics.py
@@ -1,24 +1,24 @@
+from sqlalchemy import and_, select, distinct, func
+from sqlalchemy.orm import aliased
+
from auth.authenticate import login_required
from base.orm import local_session
from base.resolvers import mutation, query
-from orm import User
-from orm.shout import ShoutAuthor, ShoutTopic
+from orm.shout import ShoutTopic, ShoutAuthor
from orm.topic import Topic, TopicFollower
-from sqlalchemy import and_, distinct, func, select
-from sqlalchemy.orm import aliased
+from orm import User
def add_topic_stat_columns(q):
aliased_shout_author = aliased(ShoutAuthor)
aliased_topic_follower = aliased(TopicFollower)
- q = (
- q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic)
- .add_columns(func.count(distinct(ShoutTopic.shout)).label("shouts_stat"))
- .outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout)
- .add_columns(func.count(distinct(aliased_shout_author.user)).label("authors_stat"))
- .outerjoin(aliased_topic_follower)
- .add_columns(func.count(distinct(aliased_topic_follower.follower)).label("followers_stat"))
+ q = q.outerjoin(ShoutTopic, Topic.id == ShoutTopic.topic).add_columns(
+ func.count(distinct(ShoutTopic.shout)).label('shouts_stat')
+ ).outerjoin(aliased_shout_author, ShoutTopic.shout == aliased_shout_author.shout).add_columns(
+ func.count(distinct(aliased_shout_author.user)).label('authors_stat')
+ ).outerjoin(aliased_topic_follower).add_columns(
+ func.count(distinct(aliased_topic_follower.follower)).label('followers_stat')
)
q = q.group_by(Topic.id)
@@ -31,7 +31,7 @@ def add_stat(topic, stat_columns):
topic.stat = {
"shouts": shouts_stat,
"authors": authors_stat,
- "followers": followers_stat,
+ "followers": followers_stat
}
return topic
@@ -133,10 +133,12 @@ def topic_unfollow(user_id, slug):
try:
with local_session() as session:
sub = (
- session.query(TopicFollower)
- .join(Topic)
- .filter(and_(TopicFollower.follower == user_id, Topic.slug == slug))
- .first()
+ session.query(TopicFollower).join(Topic).filter(
+ and_(
+ TopicFollower.follower == user_id,
+ Topic.slug == slug
+ )
+ ).first()
)
if sub:
session.delete(sub)
diff --git a/server.py b/server.py
index a491c30d..753c60ae 100644
--- a/server.py
+++ b/server.py
@@ -1,45 +1,56 @@
-from settings import DEV_SERVER_PID_FILE_NAME, PORT
-
-import os
import sys
+import os
import uvicorn
+from settings import PORT, DEV_SERVER_PID_FILE_NAME
+
def exception_handler(exception_type, exception, traceback, debug_hook=sys.excepthook):
print("%s: %s" % (exception_type.__name__, exception))
log_settings = {
- "version": 1,
- "disable_existing_loggers": True,
- "formatters": {
- "default": {
- "()": "uvicorn.logging.DefaultFormatter",
- "fmt": "%(levelprefix)s %(message)s",
- "use_colors": None,
- },
- "access": {
- "()": "uvicorn.logging.AccessFormatter",
- "fmt": '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s',
+ 'version': 1,
+ 'disable_existing_loggers': True,
+ 'formatters': {
+ 'default': {
+ '()': 'uvicorn.logging.DefaultFormatter',
+ 'fmt': '%(levelprefix)s %(message)s',
+ 'use_colors': None
},
+ 'access': {
+ '()': 'uvicorn.logging.AccessFormatter',
+ 'fmt': '%(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s'
+ }
},
- "handlers": {
- "default": {
- "formatter": "default",
- "class": "logging.StreamHandler",
- "stream": "ext://sys.stderr",
+ 'handlers': {
+ 'default': {
+ 'formatter': 'default',
+ 'class': 'logging.StreamHandler',
+ 'stream': 'ext://sys.stderr'
},
- "access": {
- "formatter": "access",
- "class": "logging.StreamHandler",
- "stream": "ext://sys.stdout",
+ 'access': {
+ 'formatter': 'access',
+ 'class': 'logging.StreamHandler',
+ 'stream': 'ext://sys.stdout'
+ }
+ },
+ 'loggers': {
+ 'uvicorn': {
+ 'handlers': ['default'],
+ 'level': 'INFO'
},
- },
- "loggers": {
- "uvicorn": {"handlers": ["default"], "level": "INFO"},
- "uvicorn.error": {"level": "INFO", "handlers": ["default"], "propagate": True},
- "uvicorn.access": {"handlers": ["access"], "level": "INFO", "propagate": False},
- },
+ 'uvicorn.error': {
+ 'level': 'INFO',
+ 'handlers': ['default'],
+ 'propagate': True
+ },
+ 'uvicorn.access': {
+ 'handlers': ['access'],
+ 'level': 'INFO',
+ 'propagate': False
+ }
+ }
}
local_headers = [
@@ -47,8 +58,7 @@ local_headers = [
("Access-Control-Allow-Origin", "https://localhost:3000"),
(
"Access-Control-Allow-Headers",
- "DNT,User-Agent,X-Requested-With,If-Modified-Since,"
- + "Cache-Control,Content-Type,Range,Authorization",
+ "DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization",
),
("Access-Control-Expose-Headers", "Content-Length,Content-Range"),
("Access-Control-Allow-Credentials", "true"),
@@ -76,17 +86,15 @@ if __name__ == "__main__":
# log_config=log_settings,
log_level=None,
access_log=True,
- reload=want_reload,
+ reload=want_reload
) # , ssl_keyfile="discours.key", ssl_certfile="discours.crt")
elif x == "migrate":
from migration import process
-
print("MODE: MIGRATE")
process()
elif x == "bson":
from migration.bson2json import json_tables
-
print("MODE: BSON")
json_tables()
@@ -97,5 +105,5 @@ if __name__ == "__main__":
host="0.0.0.0",
port=PORT,
proxy_headers=True,
- server_header=True,
+ server_header=True
)
diff --git a/services/following.py b/services/following.py
index a2be6af4..8410eb2d 100644
--- a/services/following.py
+++ b/services/following.py
@@ -18,7 +18,12 @@ class Following:
class FollowingManager:
lock = asyncio.Lock()
- data = {"author": [], "topic": [], "shout": [], "chat": []}
+ data = {
+ 'author': [],
+ 'topic': [],
+ 'shout': [],
+ 'chat': []
+ }
@staticmethod
async def register(kind, uid):
@@ -34,13 +39,13 @@ class FollowingManager:
async def push(kind, payload):
try:
async with FollowingManager.lock:
- if kind == "chat":
- for chat in FollowingManager["chat"]:
+ if kind == 'chat':
+ for chat in FollowingManager['chat']:
if payload.message["chatId"] == chat.uid:
chat.queue.put_nowait(payload)
else:
for entity in FollowingManager[kind]:
- if payload.shout["createdBy"] == entity.uid:
+ if payload.shout['createdBy'] == entity.uid:
entity.queue.put_nowait(payload)
except Exception as e:
print(Exception(e))
diff --git a/services/main.py b/services/main.py
index 6397a5e5..10301b86 100644
--- a/services/main.py
+++ b/services/main.py
@@ -1,13 +1,13 @@
-from base.orm import local_session
from services.search import SearchService
from services.stat.viewed import ViewedStorage
+from base.orm import local_session
async def storages_init():
with local_session() as session:
- print("[main] initialize SearchService")
+ print('[main] initialize SearchService')
await SearchService.init(session)
- print("[main] SearchService initialized")
- print("[main] initialize storages")
+ print('[main] SearchService initialized')
+ print('[main] initialize storages')
await ViewedStorage.init()
- print("[main] storages initialized")
+ print('[main] storages initialized')
diff --git a/services/notifications/notification_service.py b/services/notifications/notification_service.py
index ade98763..7e92aa95 100644
--- a/services/notifications/notification_service.py
+++ b/services/notifications/notification_service.py
@@ -1,17 +1,21 @@
-from base.orm import local_session
+import asyncio
+import json
from datetime import datetime, timezone
-from orm import Notification, Reaction, Shout, User
+
+from sqlalchemy import and_
+
+from base.orm import local_session
+from orm import Reaction, Shout, Notification, User
from orm.notification import NotificationType
from orm.reaction import ReactionKind
from services.notifications.sse import connection_manager
-from sqlalchemy import and_
-
-import asyncio
-import json
def shout_to_shout_data(shout):
- return {"title": shout.title, "slug": shout.slug}
+ return {
+ "title": shout.title,
+ "slug": shout.slug
+ }
def user_to_user_data(user):
@@ -19,14 +23,14 @@ def user_to_user_data(user):
"id": user.id,
"name": user.name,
"slug": user.slug,
- "userpic": user.userpic,
+ "userpic": user.userpic
}
def update_prev_notification(notification, user, reaction):
notification_data = json.loads(notification.data)
- notification_data["users"] = [u for u in notification_data["users"] if u["id"] != user.id]
+ notification_data["users"] = [u for u in notification_data["users"] if u['id'] != user.id]
notification_data["users"].append(user_to_user_data(user))
if notification_data["reactionIds"] is None:
@@ -53,45 +57,34 @@ class NewReactionNotificator:
if reaction.kind == ReactionKind.COMMENT:
parent_reaction = None
if reaction.replyTo:
- parent_reaction = (
- session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
- )
+ parent_reaction = session.query(Reaction).where(Reaction.id == reaction.replyTo).one()
if parent_reaction.createdBy != reaction.createdBy:
- prev_new_reply_notification = (
- session.query(Notification)
- .where(
- and_(
- Notification.user == shout.createdBy,
- Notification.type == NotificationType.NEW_REPLY,
- Notification.shout == shout.id,
- Notification.reaction == parent_reaction.id,
- Notification.seen == False, # noqa: E712
- )
+ prev_new_reply_notification = session.query(Notification).where(
+ and_(
+ Notification.user == shout.createdBy,
+ Notification.type == NotificationType.NEW_REPLY,
+ Notification.shout == shout.id,
+ Notification.reaction == parent_reaction.id,
+ Notification.seen == False
)
- .first()
- )
+ ).first()
if prev_new_reply_notification:
update_prev_notification(prev_new_reply_notification, user, reaction)
else:
- reply_notification_data = json.dumps(
- {
- "shout": shout_to_shout_data(shout),
- "users": [user_to_user_data(user)],
- "reactionIds": [reaction.id],
- },
- ensure_ascii=False,
- )
+ reply_notification_data = json.dumps({
+ "shout": shout_to_shout_data(shout),
+ "users": [user_to_user_data(user)],
+ "reactionIds": [reaction.id]
+ }, ensure_ascii=False)
- reply_notification = Notification.create(
- **{
- "user": parent_reaction.createdBy,
- "type": NotificationType.NEW_REPLY,
- "shout": shout.id,
- "reaction": parent_reaction.id,
- "data": reply_notification_data,
- }
- )
+ reply_notification = Notification.create(**{
+ "user": parent_reaction.createdBy,
+ "type": NotificationType.NEW_REPLY,
+ "shout": shout.id,
+ "reaction": parent_reaction.id,
+ "data": reply_notification_data
+ })
session.add(reply_notification)
@@ -100,39 +93,30 @@ class NewReactionNotificator:
if reaction.createdBy != shout.createdBy and (
parent_reaction is None or parent_reaction.createdBy != shout.createdBy
):
- prev_new_comment_notification = (
- session.query(Notification)
- .where(
- and_(
- Notification.user == shout.createdBy,
- Notification.type == NotificationType.NEW_COMMENT,
- Notification.shout == shout.id,
- Notification.seen == False, # noqa: E712
- )
+ prev_new_comment_notification = session.query(Notification).where(
+ and_(
+ Notification.user == shout.createdBy,
+ Notification.type == NotificationType.NEW_COMMENT,
+ Notification.shout == shout.id,
+ Notification.seen == False
)
- .first()
- )
+ ).first()
if prev_new_comment_notification:
update_prev_notification(prev_new_comment_notification, user, reaction)
else:
- notification_data_string = json.dumps(
- {
- "shout": shout_to_shout_data(shout),
- "users": [user_to_user_data(user)],
- "reactionIds": [reaction.id],
- },
- ensure_ascii=False,
- )
+ notification_data_string = json.dumps({
+ "shout": shout_to_shout_data(shout),
+ "users": [user_to_user_data(user)],
+ "reactionIds": [reaction.id]
+ }, ensure_ascii=False)
- author_notification = Notification.create(
- **{
- "user": shout.createdBy,
- "type": NotificationType.NEW_COMMENT,
- "shout": shout.id,
- "data": notification_data_string,
- }
- )
+ author_notification = Notification.create(**{
+ "user": shout.createdBy,
+ "type": NotificationType.NEW_COMMENT,
+ "shout": shout.id,
+ "data": notification_data_string
+ })
session.add(author_notification)
@@ -158,7 +142,7 @@ class NotificationService:
try:
await notificator.run()
except Exception as e:
- print(f"[NotificationService.worker] error: {str(e)}")
+ print(f'[NotificationService.worker] error: {str(e)}')
notification_service = NotificationService()
diff --git a/services/notifications/sse.py b/services/notifications/sse.py
index 23352344..085dbde0 100644
--- a/services/notifications/sse.py
+++ b/services/notifications/sse.py
@@ -1,8 +1,8 @@
+import json
+
from sse_starlette.sse import EventSourceResponse
from starlette.requests import Request
-
import asyncio
-import json
class ConnectionManager:
@@ -28,7 +28,9 @@ class ConnectionManager:
return
for connection in self.connections_by_user_id[user_id]:
- data = {"type": "newNotifications"}
+ data = {
+ "type": "newNotifications"
+ }
data_string = json.dumps(data, ensure_ascii=False)
await connection.put(data_string)
diff --git a/services/search.py b/services/search.py
index ffcd32b5..834e5bf7 100644
--- a/services/search.py
+++ b/services/search.py
@@ -1,10 +1,9 @@
+import asyncio
+import json
from base.redis import redis
from orm.shout import Shout
from resolvers.zine.load import load_shouts_by
-import asyncio
-import json
-
class SearchService:
lock = asyncio.Lock()
@@ -13,7 +12,7 @@ class SearchService:
@staticmethod
async def init(session):
async with SearchService.lock:
- print("[search.service] did nothing")
+ print('[search.service] did nothing')
SearchService.cache = {}
@staticmethod
@@ -25,7 +24,7 @@ class SearchService:
"title": text,
"body": text,
"limit": limit,
- "offset": offset,
+ "offset": offset
}
payload = await load_shouts_by(None, None, options)
await redis.execute("SET", text, json.dumps(payload))
diff --git a/services/stat/viewed.py b/services/stat/viewed.py
index ce5070b2..905ade43 100644
--- a/services/stat/viewed.py
+++ b/services/stat/viewed.py
@@ -1,17 +1,18 @@
-from base.orm import local_session
-from datetime import datetime, timedelta, timezone
-from gql import Client, gql
-from gql.transport.aiohttp import AIOHTTPTransport
-from orm import Topic
-from orm.shout import Shout, ShoutTopic
+import asyncio
+import time
+from datetime import timedelta, timezone, datetime
from os import environ, path
from ssl import create_default_context
-import asyncio
-import time
+from gql import Client, gql
+from gql.transport.aiohttp import AIOHTTPTransport
+from sqlalchemy import func
-load_facts = gql(
- """
+from base.orm import local_session
+from orm import User, Topic
+from orm.shout import ShoutTopic, Shout
+
+load_facts = gql("""
query getDomains {
domains {
id
@@ -24,11 +25,9 @@ query getDomains {
}
}
}
-"""
-)
+""")
-load_pages = gql(
- """
+load_pages = gql("""
query getDomains {
domains {
title
@@ -42,9 +41,8 @@ query getDomains {
}
}
}
-"""
-)
-schema_str = open(path.dirname(__file__) + "/ackee.graphql").read()
+""")
+schema_str = open(path.dirname(__file__) + '/ackee.graphql').read()
token = environ.get("ACKEE_TOKEN", "")
@@ -54,8 +52,8 @@ def create_client(headers=None, schema=None):
transport=AIOHTTPTransport(
url="https://ackee.discours.io/api",
ssl=create_default_context(),
- headers=headers,
- ),
+ headers=headers
+ )
)
@@ -73,13 +71,13 @@ class ViewedStorage:
@staticmethod
async def init():
- """graphql client connection using permanent token"""
+ """ graphql client connection using permanent token """
self = ViewedStorage
async with self.lock:
if token:
- self.client = create_client(
- {"Authorization": "Bearer %s" % str(token)}, schema=schema_str
- )
+ self.client = create_client({
+ "Authorization": "Bearer %s" % str(token)
+ }, schema=schema_str)
print("[stat.viewed] * authorized permanentely by ackee.discours.io: %s" % token)
else:
print("[stat.viewed] * please set ACKEE_TOKEN")
@@ -87,7 +85,7 @@ class ViewedStorage:
@staticmethod
async def update_pages():
- """query all the pages from ackee sorted by views count"""
+ """ query all the pages from ackee sorted by views count """
print("[stat.viewed] ⎧ updating ackee pages data ---")
start = time.time()
self = ViewedStorage
@@ -98,7 +96,7 @@ class ViewedStorage:
try:
for page in self.pages:
p = page["value"].split("?")[0]
- slug = p.split("discours.io/")[-1]
+ slug = p.split('discours.io/')[-1]
shouts[slug] = page["count"]
for slug in shouts.keys():
await ViewedStorage.increment(slug, shouts[slug])
@@ -120,7 +118,7 @@ class ViewedStorage:
# unused yet
@staticmethod
async def get_shout(shout_slug):
- """getting shout views metric by slug"""
+ """ getting shout views metric by slug """
self = ViewedStorage
async with self.lock:
shout_views = self.by_shouts.get(shout_slug)
@@ -138,7 +136,7 @@ class ViewedStorage:
@staticmethod
async def get_topic(topic_slug):
- """getting topic views value summed"""
+ """ getting topic views value summed """
self = ViewedStorage
topic_views = 0
async with self.lock:
@@ -148,28 +146,24 @@ class ViewedStorage:
@staticmethod
def update_topics(session, shout_slug):
- """updates topics counters by shout slug"""
+ """ updates topics counters by shout slug """
self = ViewedStorage
- for [shout_topic, topic] in (
- session.query(ShoutTopic, Topic)
- .join(Topic)
- .join(Shout)
- .where(Shout.slug == shout_slug)
- .all()
- ):
+ for [shout_topic, topic] in session.query(ShoutTopic, Topic).join(Topic).join(Shout).where(
+ Shout.slug == shout_slug
+ ).all():
if not self.by_topics.get(topic.slug):
self.by_topics[topic.slug] = {}
self.by_topics[topic.slug][shout_slug] = self.by_shouts[shout_slug]
@staticmethod
- async def increment(shout_slug, amount=1, viewer="ackee"):
- """the only way to change views counter"""
+ async def increment(shout_slug, amount=1, viewer='ackee'):
+ """ the only way to change views counter """
self = ViewedStorage
async with self.lock:
# TODO optimize, currenty we execute 1 DB transaction per shout
with local_session() as session:
shout = session.query(Shout).where(Shout.slug == shout_slug).one()
- if viewer == "old-discours":
+ if viewer == 'old-discours':
# this is needed for old db migration
if shout.viewsOld == amount:
print(f"viewsOld amount: {amount}")
@@ -191,7 +185,7 @@ class ViewedStorage:
@staticmethod
async def worker():
- """async task worker"""
+ """ async task worker """
failed = 0
self = ViewedStorage
if self.disabled:
@@ -211,10 +205,9 @@ class ViewedStorage:
if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat())
- print(
- "[stat.viewed] ⎩ next update: %s"
- % (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
- )
+ print("[stat.viewed] ⎩ next update: %s" % (
+ t.split("T")[0] + " " + t.split("T")[1].split(".")[0]
+ ))
await asyncio.sleep(self.period)
else:
await asyncio.sleep(10)
diff --git a/services/zine/gittask.py b/services/zine/gittask.py
index 6c6ce440..31e55025 100644
--- a/services/zine/gittask.py
+++ b/services/zine/gittask.py
@@ -1,8 +1,8 @@
-from pathlib import Path
-from settings import SHOUTS_REPO
-
import asyncio
import subprocess
+from pathlib import Path
+
+from settings import SHOUTS_REPO
class GitTask:
diff --git a/settings.py b/settings.py
index f3da9952..270b4551 100644
--- a/settings.py
+++ b/settings.py
@@ -3,9 +3,8 @@ from os import environ
PORT = 8080
DB_URL = (
- environ.get("DATABASE_URL")
- or environ.get("DB_URL")
- or "postgresql://postgres@localhost:5432/discoursio"
+ environ.get("DATABASE_URL") or environ.get("DB_URL") or
+ "postgresql://postgres@localhost:5432/discoursio"
)
JWT_ALGORITHM = "HS256"
JWT_SECRET_KEY = environ.get("JWT_SECRET_KEY") or "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key"
@@ -31,4 +30,4 @@ SENTRY_DSN = environ.get("SENTRY_DSN")
SESSION_SECRET_KEY = environ.get("SESSION_SECRET_KEY") or "!secret"
# for local development
-DEV_SERVER_PID_FILE_NAME = "dev-server.pid"
+DEV_SERVER_PID_FILE_NAME = 'dev-server.pid'
diff --git a/setup.cfg b/setup.cfg
old mode 100644
new mode 100755
index e3db2ef9..588918a1
--- a/setup.cfg
+++ b/setup.cfg
@@ -9,16 +9,15 @@ force_alphabetical_sort = false
[tool:brunette]
# https://github.com/odwyersoftware/brunette
-line-length = 100
+line-length = 120
single-quotes = false
[flake8]
# https://github.com/PyCQA/flake8
exclude = .git,__pycache__,.mypy_cache,.vercel
-max-line-length = 100
-max-complexity = 10
+max-line-length = 120
+max-complexity = 15
select = B,C,E,F,W,T4,B9
-# FIXME
# E203: Whitespace before ':'
# E266: Too many leading '#' for block comment
# E501: Line too long (82 > 79 characters)
diff --git a/setup.cfg.bak b/setup.cfg.bak
deleted file mode 100644
index 588918a1..00000000
--- a/setup.cfg.bak
+++ /dev/null
@@ -1,39 +0,0 @@
-[isort]
-# https://github.com/PyCQA/isort
-line_length = 120
-multi_line_output = 3
-include_trailing_comma = true
-force_grid_wrap = 0
-use_parentheses = true
-force_alphabetical_sort = false
-
-[tool:brunette]
-# https://github.com/odwyersoftware/brunette
-line-length = 120
-single-quotes = false
-
-[flake8]
-# https://github.com/PyCQA/flake8
-exclude = .git,__pycache__,.mypy_cache,.vercel
-max-line-length = 120
-max-complexity = 15
-select = B,C,E,F,W,T4,B9
-# E203: Whitespace before ':'
-# E266: Too many leading '#' for block comment
-# E501: Line too long (82 > 79 characters)
-# E722: Do not use bare except, specify exception instead
-# W503: Line break occurred before a binary operator
-# F403: 'from module import *' used; unable to detect undefined names
-# C901: Function is too complex
-ignore = E203,E266,E501,E722,W503,F403,C901
-
-[mypy]
-# https://github.com/python/mypy
-ignore_missing_imports = true
-warn_return_any = false
-warn_unused_configs = true
-disallow_untyped_calls = true
-disallow_untyped_defs = true
-disallow_incomplete_defs = true
-[mypy-api.*]
-ignore_errors = true
diff --git a/validations/auth.py b/validations/auth.py
index 59c49bd4..216d7dcb 100644
--- a/validations/auth.py
+++ b/validations/auth.py
@@ -1,5 +1,5 @@
-from pydantic import BaseModel
from typing import Optional, Text
+from pydantic import BaseModel
class AuthInput(BaseModel):
diff --git a/validations/inbox.py b/validations/inbox.py
index d864ed67..d03cca05 100644
--- a/validations/inbox.py
+++ b/validations/inbox.py
@@ -1,5 +1,5 @@
+from typing import Optional, Text, List
from pydantic import BaseModel
-from typing import List, Optional, Text
class Message(BaseModel):