format and lint orm

This commit is contained in:
tonyrewin 2022-09-03 13:50:14 +03:00
parent 85892a88bc
commit a89a44f660
55 changed files with 4811 additions and 4174 deletions

View File

@ -2,7 +2,7 @@ root = true
[*]
indent_style = tabs
indent_size = 1
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace=true

5
.flake8 Normal file
View File

@ -0,0 +1,5 @@
[flake8]
ignore = D203
exclude = .git,__pycache__
max-complexity = 10
max-line-length = 108

View File

@ -16,119 +16,126 @@ from settings import JWT_AUTH_HEADER, EMAIL_TOKEN_LIFE_SPAN
class _Authenticate:
@classmethod
async def verify(cls, token: str):
"""
Rules for a token to be valid.
1. token format is legal &&
token exists in redis database &&
token is not expired
2. token format is legal &&
token exists in redis database &&
token is expired &&
token is of specified type
"""
try:
payload = JWTCodec.decode(token)
except ExpiredSignatureError:
payload = JWTCodec.decode(token, verify_exp=False)
if not await cls.exists(payload.user_id, token):
raise InvalidToken("Login expired, please login again")
if payload.device == "mobile": # noqa
"we cat set mobile token to be valid forever"
return payload
except DecodeError as e:
raise InvalidToken("token format error") from e
else:
if not await cls.exists(payload.user_id, token):
raise InvalidToken("Login expired, please login again")
return payload
@classmethod
async def verify(cls, token: str):
"""
Rules for a token to be valid.
1. token format is legal &&
token exists in redis database &&
token is not expired
2. token format is legal &&
token exists in redis database &&
token is expired &&
token is of specified type
"""
try:
payload = JWTCodec.decode(token)
except ExpiredSignatureError:
payload = JWTCodec.decode(token, verify_exp=False)
if not await cls.exists(payload.user_id, token):
raise InvalidToken("Login expired, please login again")
if payload.device == "mobile": # noqa
"we cat set mobile token to be valid forever"
return payload
except DecodeError as e:
raise InvalidToken("token format error") from e
else:
if not await cls.exists(payload.user_id, token):
raise InvalidToken("Login expired, please login again")
return payload
@classmethod
async def exists(cls, user_id, token):
return await TokenStorage.exist(f"{user_id}-{token}")
@classmethod
async def exists(cls, user_id, token):
return await TokenStorage.exist(f"{user_id}-{token}")
class JWTAuthenticate(AuthenticationBackend):
async def authenticate(
self, request: HTTPConnection
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
if JWT_AUTH_HEADER not in request.headers:
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
async def authenticate(
self, request: HTTPConnection
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
if JWT_AUTH_HEADER not in request.headers:
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
token = request.headers[JWT_AUTH_HEADER]
try:
payload = await _Authenticate.verify(token)
except Exception as exc:
return AuthCredentials(scopes=[], error_message=str(exc)), AuthUser(user_id=None)
if payload is None:
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
token = request.headers[JWT_AUTH_HEADER]
try:
payload = await _Authenticate.verify(token)
except Exception as exc:
return AuthCredentials(scopes=[], error_message=str(exc)), AuthUser(
user_id=None
)
if not payload.device in ("pc", "mobile"):
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
if payload is None:
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
user = await UserStorage.get_user(payload.user_id)
if not user:
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
if not payload.device in ("pc", "mobile"):
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
user = await UserStorage.get_user(payload.user_id)
if not user:
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
scopes = await user.get_permission()
return (
AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
user,
)
scopes = await user.get_permission()
return AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True), user
class EmailAuthenticate:
@staticmethod
async def get_email_token(user):
token = await Authorize.authorize(
user,
device="email",
life_span=EMAIL_TOKEN_LIFE_SPAN
)
return token
@staticmethod
async def get_email_token(user):
token = await Authorize.authorize(
user, device="email", life_span=EMAIL_TOKEN_LIFE_SPAN
)
return token
@staticmethod
async def authenticate(token):
payload = await _Authenticate.verify(token)
if payload is None:
raise InvalidToken("invalid token")
if payload.device != "email":
raise InvalidToken("invalid token")
with local_session() as session:
user = session.query(User).filter_by(id=payload.user_id).first()
if not user:
raise Exception("user not exist")
if not user.emailConfirmed:
user.emailConfirmed = True
session.commit()
auth_token = await Authorize.authorize(user)
return (auth_token, user)
@staticmethod
async def authenticate(token):
payload = await _Authenticate.verify(token)
if payload is None:
raise InvalidToken("invalid token")
if payload.device != "email":
raise InvalidToken("invalid token")
with local_session() as session:
user = session.query(User).filter_by(id=payload.user_id).first()
if not user:
raise Exception("user not exist")
if not user.emailConfirmed:
user.emailConfirmed = True
session.commit()
auth_token = await Authorize.authorize(user)
return (auth_token, user)
class ResetPassword:
@staticmethod
async def get_reset_token(user):
exp = datetime.utcnow() + timedelta(seconds=EMAIL_TOKEN_LIFE_SPAN)
token = JWTCodec.encode(user, exp=exp, device="pc")
await TokenStorage.save(f"{user.id}-reset-{token}", EMAIL_TOKEN_LIFE_SPAN, True)
return token
@staticmethod
async def get_reset_token(user):
exp = datetime.utcnow() + timedelta(seconds=EMAIL_TOKEN_LIFE_SPAN)
token = JWTCodec.encode(user, exp=exp, device="pc")
await TokenStorage.save(f"{user.id}-reset-{token}", EMAIL_TOKEN_LIFE_SPAN, True)
return token
@staticmethod
async def verify(token):
try:
payload = JWTCodec.decode(token)
except ExpiredSignatureError:
raise InvalidToken("Login expired, please login again")
except DecodeError as e:
raise InvalidToken("token format error") from e
else:
if not await TokenStorage.exist(f"{payload.user_id}-reset-{token}"):
raise InvalidToken("Login expired, please login again")
@staticmethod
async def verify(token):
try:
payload = JWTCodec.decode(token)
except ExpiredSignatureError:
raise InvalidToken("Login expired, please login again")
except DecodeError as e:
raise InvalidToken("token format error") from e
else:
if not await TokenStorage.exist(f"{payload.user_id}-reset-{token}"):
raise InvalidToken("Login expired, please login again")
return payload.user_id
return payload.user_id
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
return {"error" : auth.error_message or "Please login"}
return await func(parent, info, *args, **kwargs)
return wrap
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
auth: AuthCredentials = info.context["request"].auth
if not auth.logged_in:
return {"error": auth.error_message or "Please login"}
return await func(parent, info, *args, **kwargs)
return wrap

View File

@ -5,38 +5,41 @@ from base.redis import redis
from settings import JWT_LIFE_SPAN
from auth.validations import User
class TokenStorage:
@staticmethod
async def save(token_key, life_span, auto_delete=True):
await redis.execute("SET", token_key, "True")
if auto_delete:
expire_at = (datetime.now() + timedelta(seconds=life_span)).timestamp()
await redis.execute("EXPIREAT", token_key, int(expire_at))
@staticmethod
async def exist(token_key):
return await redis.execute("GET", token_key)
class TokenStorage:
@staticmethod
async def save(token_key, life_span, auto_delete=True):
await redis.execute("SET", token_key, "True")
if auto_delete:
expire_at = (datetime.now() + timedelta(seconds=life_span)).timestamp()
await redis.execute("EXPIREAT", token_key, int(expire_at))
@staticmethod
async def exist(token_key):
return await redis.execute("GET", token_key)
class Authorize:
@staticmethod
async def authorize(user: User, device: str = "pc", life_span = JWT_LIFE_SPAN, auto_delete=True) -> str:
exp = datetime.utcnow() + timedelta(seconds=life_span)
token = JWTCodec.encode(user, exp=exp, device=device)
await TokenStorage.save(f"{user.id}-{token}", life_span, auto_delete)
return token
@staticmethod
async def authorize(
user: User, device: str = "pc", life_span=JWT_LIFE_SPAN, auto_delete=True
) -> str:
exp = datetime.utcnow() + timedelta(seconds=life_span)
token = JWTCodec.encode(user, exp=exp, device=device)
await TokenStorage.save(f"{user.id}-{token}", life_span, auto_delete)
return token
@staticmethod
async def revoke(token: str) -> bool:
try:
payload = JWTCodec.decode(token)
except: # noqa
pass
else:
await redis.execute("DEL", f"{payload.user_id}-{token}")
return True
@staticmethod
async def revoke(token: str) -> bool:
try:
payload = JWTCodec.decode(token)
except: # noqa
pass
else:
await redis.execute("DEL", f"{payload.user_id}-{token}")
return True
@staticmethod
async def revoke_all(user: User):
tokens = await redis.execute("KEYS", f"{user.id}-*")
await redis.execute("DEL", *tokens)
@staticmethod
async def revoke_all(user: User):
tokens = await redis.execute("KEYS", f"{user.id}-*")
await redis.execute("DEL", *tokens)

View File

@ -2,71 +2,83 @@ import requests
from starlette.responses import RedirectResponse
from auth.authenticate import EmailAuthenticate, ResetPassword
from base.orm import local_session
from settings import BACKEND_URL, MAILGUN_API_KEY, MAILGUN_DOMAIN, RESET_PWD_URL, \
CONFIRM_EMAIL_URL, ERROR_URL_ON_FRONTEND
from settings import (
BACKEND_URL,
MAILGUN_API_KEY,
MAILGUN_DOMAIN,
RESET_PWD_URL,
CONFIRM_EMAIL_URL,
ERROR_URL_ON_FRONTEND,
)
MAILGUN_API_URL = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN)
MAILGUN_FROM = "discours.io <noreply@%s>" % (MAILGUN_DOMAIN)
AUTH_URL = "%s/email_authorize" % (BACKEND_URL)
email_templates = {"confirm_email" : "", "auth_email" : "", "reset_password_email" : ""}
email_templates = {"confirm_email": "", "auth_email": "", "reset_password_email": ""}
def load_email_templates():
for name in email_templates:
filename = "auth/templates/%s.tmpl" % name
with open(filename) as f:
email_templates[name] = f.read()
print("[auth.email] templates loaded")
for name in email_templates:
filename = "auth/templates/%s.tmpl" % name
with open(filename) as f:
email_templates[name] = f.read()
print("[auth.email] templates loaded")
async def send_confirm_email(user):
text = email_templates["confirm_email"]
token = await EmailAuthenticate.get_email_token(user)
await send_email(user, AUTH_URL, text, token)
text = email_templates["confirm_email"]
token = await EmailAuthenticate.get_email_token(user)
await send_email(user, AUTH_URL, text, token)
async def send_auth_email(user):
text = email_templates["auth_email"]
token = await EmailAuthenticate.get_email_token(user)
await send_email(user, AUTH_URL, text, token)
text = email_templates["auth_email"]
token = await EmailAuthenticate.get_email_token(user)
await send_email(user, AUTH_URL, text, token)
async def send_reset_password_email(user):
text = email_templates["reset_password_email"]
token = await ResetPassword.get_reset_token(user)
await send_email(user, RESET_PWD_URL, text, token)
text = email_templates["reset_password_email"]
token = await ResetPassword.get_reset_token(user)
await send_email(user, RESET_PWD_URL, text, token)
async def send_email(user, url, text, token):
to = "%s <%s>" % (user.username, user.email)
url_with_token = "%s?token=%s" % (url, token)
text = text % (url_with_token)
response = requests.post(
MAILGUN_API_URL,
auth = ("api", MAILGUN_API_KEY),
data = {
"from": MAILGUN_FROM,
"to": to,
"subject": "authorize log in",
"html": text
}
)
response.raise_for_status()
to = "%s <%s>" % (user.username, user.email)
url_with_token = "%s?token=%s" % (url, token)
text = text % (url_with_token)
response = requests.post(
MAILGUN_API_URL,
auth=("api", MAILGUN_API_KEY),
data={
"from": MAILGUN_FROM,
"to": to,
"subject": "authorize log in",
"html": text,
},
)
response.raise_for_status()
async def email_authorize(request):
token = request.query_params.get('token')
if not token:
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
return RedirectResponse(url = url_with_error)
token = request.query_params.get("token")
if not token:
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
return RedirectResponse(url=url_with_error)
try:
auth_token, user = await EmailAuthenticate.authenticate(token)
except:
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
return RedirectResponse(url = url_with_error)
if not user.emailConfirmed:
with local_session() as session:
user.emailConfirmed = True
session.commit()
try:
auth_token, user = await EmailAuthenticate.authenticate(token)
except:
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
return RedirectResponse(url=url_with_error)
response = RedirectResponse(url = CONFIRM_EMAIL_URL)
response.set_cookie("token", auth_token)
return response
if not user.emailConfirmed:
with local_session() as session:
user.emailConfirmed = True
session.commit()
response = RedirectResponse(url=CONFIRM_EMAIL_URL)
response.set_cookie("token", auth_token)
return response

View File

@ -8,26 +8,32 @@ from sqlalchemy import or_
class Identity:
@staticmethod
def identity(orm_user: OrmUser, password: str) -> User:
user = User(**orm_user.dict())
if user.password is None:
raise InvalidPassword("Wrong user password")
if not Password.verify(password, user.password):
raise InvalidPassword("Wrong user password")
return user
@staticmethod
def identity_oauth(input) -> User:
with local_session() as session:
user = session.query(OrmUser).filter(
or_(OrmUser.oauth == input["oauth"], OrmUser.email == input["email"])
).first()
if not user:
user = OrmUser.create(**input)
if not user.oauth:
user.oauth = input["oauth"]
session.commit()
@staticmethod
def identity(orm_user: OrmUser, password: str) -> User:
user = User(**orm_user.dict())
if user.password is None:
raise InvalidPassword("Wrong user password")
if not Password.verify(password, user.password):
raise InvalidPassword("Wrong user password")
return user
user = User(**user.dict())
return user
@staticmethod
def identity_oauth(input) -> User:
with local_session() as session:
user = (
session.query(OrmUser)
.filter(
or_(
OrmUser.oauth == input["oauth"], OrmUser.email == input["email"]
)
)
.first()
)
if not user:
user = OrmUser.create(**input)
if not user.oauth:
user.oauth = input["oauth"]
session.commit()
user = User(**user.dict())
return user

View File

@ -5,17 +5,22 @@ from auth.validations import PayLoad, User
class JWTCodec:
@staticmethod
def encode(user: User, exp: datetime, device: str = "pc") -> str:
payload = {"user_id": user.id, "device": device, "exp": exp, "iat": datetime.utcnow()}
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
@staticmethod
def encode(user: User, exp: datetime, device: str = "pc") -> str:
payload = {
"user_id": user.id,
"device": device,
"exp": exp,
"iat": datetime.utcnow(),
}
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
@staticmethod
def decode(token: str, verify_exp: bool = True) -> PayLoad:
payload = jwt.decode(
token,
key=JWT_SECRET_KEY,
options={"verify_exp": verify_exp},
algorithms=[JWT_ALGORITHM],
)
return PayLoad(**payload)
@staticmethod
def decode(token: str, verify_exp: bool = True) -> PayLoad:
payload = jwt.decode(
token,
key=JWT_SECRET_KEY,
options={"verify_exp": verify_exp},
algorithms=[JWT_ALGORITHM],
)
return PayLoad(**payload)

View File

@ -8,79 +8,84 @@ from settings import OAUTH_CLIENTS, BACKEND_URL, OAUTH_CALLBACK_URL
oauth = OAuth()
oauth.register(
name='facebook',
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
access_token_url='https://graph.facebook.com/v11.0/oauth/access_token',
access_token_params=None,
authorize_url='https://www.facebook.com/v11.0/dialog/oauth',
authorize_params=None,
api_base_url='https://graph.facebook.com/',
client_kwargs={'scope': 'public_profile email'},
name="facebook",
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
access_token_url="https://graph.facebook.com/v11.0/oauth/access_token",
access_token_params=None,
authorize_url="https://www.facebook.com/v11.0/dialog/oauth",
authorize_params=None,
api_base_url="https://graph.facebook.com/",
client_kwargs={"scope": "public_profile email"},
)
oauth.register(
name='github',
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
access_token_url='https://github.com/login/oauth/access_token',
access_token_params=None,
authorize_url='https://github.com/login/oauth/authorize',
authorize_params=None,
api_base_url='https://api.github.com/',
client_kwargs={'scope': 'user:email'},
name="github",
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
access_token_url="https://github.com/login/oauth/access_token",
access_token_params=None,
authorize_url="https://github.com/login/oauth/authorize",
authorize_params=None,
api_base_url="https://api.github.com/",
client_kwargs={"scope": "user:email"},
)
oauth.register(
name='google',
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
client_kwargs={'scope': 'openid email profile'}
name="google",
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
client_kwargs={"scope": "openid email profile"},
)
async def google_profile(client, request, token):
profile = await client.parse_id_token(request, token)
profile["id"] = profile["sub"]
return profile
profile = await client.parse_id_token(request, token)
profile["id"] = profile["sub"]
return profile
async def facebook_profile(client, request, token):
profile = await client.get('me?fields=name,id,email', token=token)
return profile.json()
profile = await client.get("me?fields=name,id,email", token=token)
return profile.json()
async def github_profile(client, request, token):
profile = await client.get('user', token=token)
return profile.json()
profile = await client.get("user", token=token)
return profile.json()
profile_callbacks = {
"google" : google_profile,
"facebook" : facebook_profile,
"github" : github_profile
"google": google_profile,
"facebook": facebook_profile,
"github": github_profile,
}
async def oauth_login(request):
provider = request.path_params['provider']
request.session['provider'] = provider
client = oauth.create_client(provider)
redirect_uri = "%s/%s" % (BACKEND_URL, 'oauth_authorize')
return await client.authorize_redirect(request, redirect_uri)
provider = request.path_params["provider"]
request.session["provider"] = provider
client = oauth.create_client(provider)
redirect_uri = "%s/%s" % (BACKEND_URL, "oauth_authorize")
return await client.authorize_redirect(request, redirect_uri)
async def oauth_authorize(request):
provider = request.session['provider']
client = oauth.create_client(provider)
token = await client.authorize_access_token(request)
get_profile = profile_callbacks[provider]
profile = await get_profile(client, request, token)
user_oauth_info = "%s:%s" % (provider, profile["id"])
user_input = {
"oauth" : user_oauth_info,
"email" : profile["email"],
"username" : profile["name"]
}
user = Identity.identity_oauth(user_input)
token = await Authorize.authorize(user, device="pc")
provider = request.session["provider"]
client = oauth.create_client(provider)
token = await client.authorize_access_token(request)
get_profile = profile_callbacks[provider]
profile = await get_profile(client, request, token)
user_oauth_info = "%s:%s" % (provider, profile["id"])
user_input = {
"oauth": user_oauth_info,
"email": profile["email"],
"username": profile["name"],
}
user = Identity.identity_oauth(user_input)
token = await Authorize.authorize(user, device="pc")
response = RedirectResponse(url = OAUTH_CALLBACK_URL)
response.set_cookie("token", token)
return response
response = RedirectResponse(url=OAUTH_CALLBACK_URL)
response.set_cookie("token", token)
return response

View File

@ -5,50 +5,52 @@ from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
from settings import DB_URL
if DB_URL.startswith('sqlite'):
engine = create_engine(DB_URL)
if DB_URL.startswith("sqlite"):
engine = create_engine(DB_URL)
else:
engine = create_engine(DB_URL, convert_unicode=True, echo=False, \
pool_size=10, max_overflow=20)
engine = create_engine(
DB_URL, convert_unicode=True, echo=False, pool_size=10, max_overflow=20
)
T = TypeVar("T")
REGISTRY: Dict[str, type] = {}
def local_session():
return Session(bind=engine, expire_on_commit=False)
return Session(bind=engine, expire_on_commit=False)
class Base(declarative_base()):
__table__: Table
__tablename__: str
__new__: Callable
__init__: Callable
__table__: Table
__tablename__: str
__new__: Callable
__init__: Callable
__abstract__: bool = True
__table_args__ = {"extend_existing": True}
id: int = Column(Integer, primary_key=True)
__abstract__: bool = True
__table_args__ = {"extend_existing": True}
id: int = Column(Integer, primary_key=True)
def __init_subclass__(cls, **kwargs):
REGISTRY[cls.__name__] = cls
def __init_subclass__(cls, **kwargs):
REGISTRY[cls.__name__] = cls
@classmethod
def create(cls: Generic[T], **kwargs) -> Generic[T]:
instance = cls(**kwargs)
return instance.save()
@classmethod
def create(cls: Generic[T], **kwargs) -> Generic[T]:
instance = cls(**kwargs)
return instance.save()
def save(self) -> Generic[T]:
with local_session() as session:
session.add(self)
session.commit()
return self
def save(self) -> Generic[T]:
with local_session() as session:
session.add(self)
session.commit()
return self
def update(self, input):
column_names = self.__table__.columns.keys()
for (name, value) in input.items():
if name in column_names:
setattr(self, name, value)
def update(self, input):
column_names = self.__table__.columns.keys()
for (name, value) in input.items():
if name in column_names:
setattr(self, name, value)
def dict(self) -> Dict[str, Any]:
column_names = self.__table__.columns.keys()
return {c: getattr(self, c) for c in column_names}
def dict(self) -> Dict[str, Any]:
column_names = self.__table__.columns.keys()
return {c: getattr(self, c) for c in column_names}

View File

@ -1,34 +1,34 @@
import aioredis
from settings import REDIS_URL
class Redis:
def __init__(self, uri=REDIS_URL):
self._uri: str = uri
self._instance = None
def __init__(self, uri=REDIS_URL):
self._uri: str = uri
self._instance = None
async def connect(self):
if self._instance is not None:
return
self._instance = aioredis.from_url(self._uri, encoding="utf-8")
async def connect(self):
if self._instance is not None:
return
self._instance = aioredis.from_url(self._uri, encoding="utf-8")
async def disconnect(self):
if self._instance is None:
return
self._instance.close()
await self._instance.wait_closed()
self._instance = None
async def disconnect(self):
if self._instance is None:
return
self._instance.close()
await self._instance.wait_closed()
self._instance = None
async def execute(self, command, *args, **kwargs):
return await self._instance.execute_command(command, *args, **kwargs)
async def execute(self, command, *args, **kwargs):
return await self._instance.execute_command(command, *args, **kwargs)
async def lrange(self, key, start, stop):
return await self._instance.lrange(key, start, stop)
async def lrange(self, key, start, stop):
return await self._instance.lrange(key, start, stop)
async def mget(self, key, *keys):
return await self._instance.mget(key, *keys)
async def mget(self, key, *keys):
return await self._instance.mget(key, *keys)
redis = Redis()
__all__ = ['redis']
__all__ = ["redis"]

View File

@ -3,9 +3,11 @@ from ariadne import MutationType, QueryType, SubscriptionType, ScalarType
datetime_scalar = ScalarType("DateTime")
@datetime_scalar.serializer
def serialize_datetime(value):
return value.isoformat()
return value.isoformat()
query = QueryType()
mutation = MutationType()

107
main.py
View File

@ -1,49 +1,58 @@
from importlib import import_module
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware
from starlette.routing import Route
from auth.authenticate import JWTAuthenticate
from auth.oauth import oauth_login, oauth_authorize
from auth.email import email_authorize
from base.redis import redis
from base.resolvers import resolvers
from resolvers.zine import ShoutsCache
from services.stat.reacted import ReactedStorage
from services.stat.viewed import ViewedStorage
from services.zine.gittask import GitTask
from services.stat.topicstat import TopicStat
from services.zine.shoutauthor import ShoutAuthorStorage
import asyncio
import_module('resolvers')
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers)
middleware = [
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
Middleware(SessionMiddleware, secret_key="!secret")
]
async def start_up():
await redis.connect()
viewed_storage_task = asyncio.create_task(ViewedStorage.worker())
# reacted_storage_task = asyncio.create_task(ReactedStorage.worker())
shouts_cache_task = asyncio.create_task(ShoutsCache.worker())
shout_author_task = asyncio.create_task(ShoutAuthorStorage.worker())
topic_stat_task = asyncio.create_task(TopicStat.worker())
git_task = asyncio.create_task(GitTask.git_task_worker())
async def shutdown():
await redis.disconnect()
routes = [
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth_authorize", endpoint=oauth_authorize),
Route("/email_authorize", endpoint=email_authorize)
]
app = Starlette(debug=True, on_startup=[start_up], on_shutdown=[shutdown], middleware=middleware, routes=routes)
app.mount("/", GraphQL(schema, debug=True))
from importlib import import_module
from ariadne import load_schema_from_path, make_executable_schema
from ariadne.asgi import GraphQL
from starlette.applications import Starlette
from starlette.middleware import Middleware
from starlette.middleware.authentication import AuthenticationMiddleware
from starlette.middleware.sessions import SessionMiddleware
from starlette.routing import Route
from auth.authenticate import JWTAuthenticate
from auth.oauth import oauth_login, oauth_authorize
from auth.email import email_authorize
from base.redis import redis
from base.resolvers import resolvers
from resolvers.zine import ShoutsCache
from services.stat.reacted import ReactedStorage
from services.stat.viewed import ViewedStorage
from services.zine.gittask import GitTask
from services.stat.topicstat import TopicStat
from services.zine.shoutauthor import ShoutAuthorStorage
import asyncio
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers) # type: ignore
middleware = [
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
Middleware(SessionMiddleware, secret_key="!secret"),
]
async def start_up():
await redis.connect()
viewed_storage_task = asyncio.create_task(ViewedStorage.worker())
# reacted_storage_task = asyncio.create_task(ReactedStorage.worker())
shouts_cache_task = asyncio.create_task(ShoutsCache.worker())
shout_author_task = asyncio.create_task(ShoutAuthorStorage.worker())
topic_stat_task = asyncio.create_task(TopicStat.worker())
git_task = asyncio.create_task(GitTask.git_task_worker())
async def shutdown():
await redis.disconnect()
routes = [
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth_authorize", endpoint=oauth_authorize),
Route("/email_authorize", endpoint=email_authorize),
]
app = Starlette(
debug=True,
on_startup=[start_up],
on_shutdown=[shutdown],
middleware=middleware,
routes=routes,
)
app.mount("/", GraphQL(schema, debug=True))

View File

@ -1,4 +1,4 @@
''' cmd managed migration '''
""" cmd managed migration """
import csv
import asyncio
from datetime import datetime
@ -8,6 +8,7 @@ import sys
import os
import bs4
import numpy as np
# from export import export_email_subscriptions
from .export import export_mdx, export_slug
from orm.reaction import Reaction
@ -21,293 +22,308 @@ from .tables.comments import migrate_2stage as migrateComment_2stage
from settings import DB_URL
TODAY = datetime.strftime(datetime.now(), '%Y%m%d')
TODAY = datetime.strftime(datetime.now(), "%Y%m%d")
OLD_DATE = '2016-03-05 22:22:00.350000'
OLD_DATE = "2016-03-05 22:22:00.350000"
def users_handle(storage):
''' migrating users first '''
counter = 0
id_map = {}
print('[migration] migrating %d users' % (len(storage['users']['data'])))
for entry in storage['users']['data']:
oid = entry['_id']
user = migrateUser(entry)
storage['users']['by_oid'][oid] = user # full
del user['password']
del user['notifications']
del user['emailConfirmed']
del user['username']
del user['email']
storage['users']['by_slug'][user['slug']] = user # public
id_map[user['oid']] = user['slug']
counter += 1
ce = 0
for entry in storage['users']['data']:
ce += migrateUser_2stage(entry, id_map)
return storage
"""migrating users first"""
counter = 0
id_map = {}
print("[migration] migrating %d users" % (len(storage["users"]["data"])))
for entry in storage["users"]["data"]:
oid = entry["_id"]
user = migrateUser(entry)
storage["users"]["by_oid"][oid] = user # full
del user["password"]
del user["notifications"]
del user["emailConfirmed"]
del user["username"]
del user["email"]
storage["users"]["by_slug"][user["slug"]] = user # public
id_map[user["oid"]] = user["slug"]
counter += 1
ce = 0
for entry in storage["users"]["data"]:
ce += migrateUser_2stage(entry, id_map)
return storage
def topics_handle(storage):
''' topics from categories and tags '''
counter = 0
for t in (storage['topics']['tags'] + storage['topics']['cats']):
if t['slug'] in storage['replacements']:
t['slug'] = storage['replacements'][t['slug']]
topic = migrateTopic(t)
storage['topics']['by_oid'][t['_id']] = topic
storage['topics']['by_slug'][t['slug']] = topic
counter += 1
else:
print('[migration] topic ' + t['slug'] + ' ignored')
for oldslug, newslug in storage['replacements'].items():
if oldslug != newslug and oldslug in storage['topics']['by_slug']:
oid = storage['topics']['by_slug'][oldslug]['_id']
del storage['topics']['by_slug'][oldslug]
storage['topics']['by_oid'][oid] = storage['topics']['by_slug'][newslug]
print('[migration] ' + str(counter) + ' topics migrated')
print('[migration] ' + str(len(storage['topics']
['by_oid'].values())) + ' topics by oid')
print('[migration] ' + str(len(storage['topics']
['by_slug'].values())) + ' topics by slug')
# raise Exception
return storage
"""topics from categories and tags"""
counter = 0
for t in storage["topics"]["tags"] + storage["topics"]["cats"]:
if t["slug"] in storage["replacements"]:
t["slug"] = storage["replacements"][t["slug"]]
topic = migrateTopic(t)
storage["topics"]["by_oid"][t["_id"]] = topic
storage["topics"]["by_slug"][t["slug"]] = topic
counter += 1
else:
print("[migration] topic " + t["slug"] + " ignored")
for oldslug, newslug in storage["replacements"].items():
if oldslug != newslug and oldslug in storage["topics"]["by_slug"]:
oid = storage["topics"]["by_slug"][oldslug]["_id"]
del storage["topics"]["by_slug"][oldslug]
storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug]
print("[migration] " + str(counter) + " topics migrated")
print(
"[migration] "
+ str(len(storage["topics"]["by_oid"].values()))
+ " topics by oid"
)
print(
"[migration] "
+ str(len(storage["topics"]["by_slug"].values()))
+ " topics by slug"
)
# raise Exception
return storage
async def shouts_handle(storage, args):
''' migrating content items one by one '''
counter = 0
discours_author = 0
pub_counter = 0
topics_dataset_bodies = []
topics_dataset_tlist = []
for entry in storage['shouts']['data']:
# slug
slug = get_shout_slug(entry)
"""migrating content items one by one"""
counter = 0
discours_author = 0
pub_counter = 0
topics_dataset_bodies = []
topics_dataset_tlist = []
for entry in storage["shouts"]["data"]:
# slug
slug = get_shout_slug(entry)
# single slug mode
if '-' in args and slug not in args: continue
# single slug mode
if "-" in args and slug not in args:
continue
# migrate
shout = await migrateShout(entry, storage)
storage['shouts']['by_oid'][entry['_id']] = shout
storage['shouts']['by_slug'][shout['slug']] = shout
# shouts.topics
if not shout['topics']: print('[migration] no topics!')
# migrate
shout = await migrateShout(entry, storage)
storage["shouts"]["by_oid"][entry["_id"]] = shout
storage["shouts"]["by_slug"][shout["slug"]] = shout
# shouts.topics
if not shout["topics"]:
print("[migration] no topics!")
# wuth author
author = shout['authors'][0].slug
if author == 'discours': discours_author += 1
# print('[migration] ' + shout['slug'] + ' with author ' + author)
# wuth author
author = shout["authors"][0].slug
if author == "discours":
discours_author += 1
# print('[migration] ' + shout['slug'] + ' with author ' + author)
if entry.get('published'):
if 'mdx' in args: export_mdx(shout)
pub_counter += 1
if entry.get("published"):
if "mdx" in args:
export_mdx(shout)
pub_counter += 1
# print main counter
counter += 1
line = str(counter+1) + ': ' + shout['slug'] + " @" + author
print(line)
b = bs4.BeautifulSoup(shout['body'], 'html.parser')
texts = []
texts.append(shout['title'].lower().replace(r'[^а-яА-Яa-zA-Z]', ''))
texts = b.findAll(text=True)
topics_dataset_bodies.append(u" ".join([x.strip().lower() for x in texts]))
topics_dataset_tlist.append(shout['topics'])
# np.savetxt('topics_dataset.csv', (topics_dataset_bodies, topics_dataset_tlist), delimiter=',', fmt='%s')
# print main counter
counter += 1
line = str(counter + 1) + ": " + shout["slug"] + " @" + author
print(line)
b = bs4.BeautifulSoup(shout["body"], "html.parser")
texts = []
texts.append(shout["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", ""))
texts = b.findAll(text=True)
topics_dataset_bodies.append(" ".join([x.strip().lower() for x in texts]))
topics_dataset_tlist.append(shout["topics"])
print('[migration] ' + str(counter) + ' content items were migrated')
print('[migration] ' + str(pub_counter) + ' have been published')
print('[migration] ' + str(discours_author) + ' authored by @discours')
return storage
# np.savetxt('topics_dataset.csv', (topics_dataset_bodies, topics_dataset_tlist), delimiter=',', fmt='%s')
print("[migration] " + str(counter) + " content items were migrated")
print("[migration] " + str(pub_counter) + " have been published")
print("[migration] " + str(discours_author) + " authored by @discours")
return storage
async def comments_handle(storage):
id_map = {}
ignored_counter = 0
missed_shouts = {}
for oldcomment in storage['reactions']['data']:
if not oldcomment.get('deleted'):
reaction = await migrateComment(oldcomment, storage)
if type(reaction) == str:
missed_shouts[reaction] = oldcomment
elif type(reaction) == Reaction:
reaction = reaction.dict()
id = reaction['id']
oid = reaction['oid']
id_map[oid] = id
else:
ignored_counter += 1
id_map = {}
ignored_counter = 0
missed_shouts = {}
for oldcomment in storage["reactions"]["data"]:
if not oldcomment.get("deleted"):
reaction = await migrateComment(oldcomment, storage)
if type(reaction) == str:
missed_shouts[reaction] = oldcomment
elif type(reaction) == Reaction:
reaction = reaction.dict()
id = reaction["id"]
oid = reaction["oid"]
id_map[oid] = id
else:
ignored_counter += 1
for reaction in storage['reactions']['data']: migrateComment_2stage(
reaction, id_map)
print('[migration] ' + str(len(id_map)) + ' comments migrated')
print('[migration] ' + str(ignored_counter) + ' comments ignored')
print('[migration] ' + str(len(missed_shouts.keys())) +
' commented shouts missed')
missed_counter = 0
for missed in missed_shouts.values():
missed_counter += len(missed)
print('[migration] ' + str(missed_counter) + ' comments dropped')
return storage
for reaction in storage["reactions"]["data"]:
migrateComment_2stage(reaction, id_map)
print("[migration] " + str(len(id_map)) + " comments migrated")
print("[migration] " + str(ignored_counter) + " comments ignored")
print("[migration] " + str(len(missed_shouts.keys())) + " commented shouts missed")
missed_counter = 0
for missed in missed_shouts.values():
missed_counter += len(missed)
print("[migration] " + str(missed_counter) + " comments dropped")
return storage
def bson_handle():
# decode bson # preparing data
from migration import bson2json
bson2json.json_tables()
# decode bson # preparing data
from migration import bson2json
bson2json.json_tables()
def export_one(slug, storage, args = None):
topics_handle(storage)
users_handle(storage)
shouts_handle(storage, args)
export_slug(slug, storage)
def export_one(slug, storage, args=None):
topics_handle(storage)
users_handle(storage)
shouts_handle(storage, args)
export_slug(slug, storage)
async def all_handle(storage, args):
print('[migration] handle everything')
users_handle(storage)
topics_handle(storage)
await shouts_handle(storage, args)
await comments_handle(storage)
# export_email_subscriptions()
print('[migration] done!')
print("[migration] handle everything")
users_handle(storage)
topics_handle(storage)
await shouts_handle(storage, args)
await comments_handle(storage)
# export_email_subscriptions()
print("[migration] done!")
def data_load():
storage = {
'content_items': {
'by_oid': {},
'by_slug': {},
},
'shouts': {
'by_oid': {},
'by_slug': {},
'data': []
},
'reactions': {
'by_oid': {},
'by_slug': {},
'by_content': {},
'data': []
},
'topics': {
'by_oid': {},
'by_slug': {},
'cats': [],
'tags': [],
},
'users': {
'by_oid': {},
'by_slug': {},
'data': []
},
'replacements': json.loads(open('migration/tables/replacements.json').read())
}
users_data = []
tags_data = []
cats_data = []
comments_data = []
content_data = []
try:
users_data = json.loads(open('migration/data/users.json').read())
print('[migration.load] ' + str(len(users_data)) + ' users ')
tags_data = json.loads(open('migration/data/tags.json').read())
storage['topics']['tags'] = tags_data
print('[migration.load] ' + str(len(tags_data)) + ' tags ')
cats_data = json.loads(
open('migration/data/content_item_categories.json').read())
storage['topics']['cats'] = cats_data
print('[migration.load] ' + str(len(cats_data)) + ' cats ')
comments_data = json.loads(open('migration/data/comments.json').read())
storage['reactions']['data'] = comments_data
print('[migration.load] ' + str(len(comments_data)) + ' comments ')
content_data = json.loads(open('migration/data/content_items.json').read())
storage['shouts']['data'] = content_data
print('[migration.load] ' + str(len(content_data)) + ' content items ')
# fill out storage
for x in users_data:
storage['users']['by_oid'][x['_id']] = x
# storage['users']['by_slug'][x['slug']] = x
# no user.slug yet
print('[migration.load] ' + str(len(storage['users']
['by_oid'].keys())) + ' users by oid')
for x in tags_data:
storage['topics']['by_oid'][x['_id']] = x
storage['topics']['by_slug'][x['slug']] = x
for x in cats_data:
storage['topics']['by_oid'][x['_id']] = x
storage['topics']['by_slug'][x['slug']] = x
print('[migration.load] ' + str(len(storage['topics']
['by_slug'].keys())) + ' topics by slug')
for item in content_data:
slug = get_shout_slug(item)
storage['content_items']['by_slug'][slug] = item
storage['content_items']['by_oid'][item['_id']] = item
print('[migration.load] ' + str(len(content_data)) + ' content items')
for x in comments_data:
storage['reactions']['by_oid'][x['_id']] = x
cid = x['contentItem']
storage['reactions']['by_content'][cid] = x
ci = storage['content_items']['by_oid'].get(cid, {})
if 'slug' in ci: storage['reactions']['by_slug'][ci['slug']] = x
print('[migration.load] ' + str(len(storage['reactions']
['by_content'].keys())) + ' with comments')
except Exception as e: raise e
storage['users']['data'] = users_data
storage['topics']['tags'] = tags_data
storage['topics']['cats'] = cats_data
storage['shouts']['data'] = content_data
storage['reactions']['data'] = comments_data
return storage
storage = {
"content_items": {
"by_oid": {},
"by_slug": {},
},
"shouts": {"by_oid": {}, "by_slug": {}, "data": []},
"reactions": {"by_oid": {}, "by_slug": {}, "by_content": {}, "data": []},
"topics": {
"by_oid": {},
"by_slug": {},
"cats": [],
"tags": [],
},
"users": {"by_oid": {}, "by_slug": {}, "data": []},
"replacements": json.loads(open("migration/tables/replacements.json").read()),
}
users_data = []
tags_data = []
cats_data = []
comments_data = []
content_data = []
try:
users_data = json.loads(open("migration/data/users.json").read())
print("[migration.load] " + str(len(users_data)) + " users ")
tags_data = json.loads(open("migration/data/tags.json").read())
storage["topics"]["tags"] = tags_data
print("[migration.load] " + str(len(tags_data)) + " tags ")
cats_data = json.loads(
open("migration/data/content_item_categories.json").read()
)
storage["topics"]["cats"] = cats_data
print("[migration.load] " + str(len(cats_data)) + " cats ")
comments_data = json.loads(open("migration/data/comments.json").read())
storage["reactions"]["data"] = comments_data
print("[migration.load] " + str(len(comments_data)) + " comments ")
content_data = json.loads(open("migration/data/content_items.json").read())
storage["shouts"]["data"] = content_data
print("[migration.load] " + str(len(content_data)) + " content items ")
# fill out storage
for x in users_data:
storage["users"]["by_oid"][x["_id"]] = x
# storage['users']['by_slug'][x['slug']] = x
# no user.slug yet
print(
"[migration.load] "
+ str(len(storage["users"]["by_oid"].keys()))
+ " users by oid"
)
for x in tags_data:
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
for x in cats_data:
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
print(
"[migration.load] "
+ str(len(storage["topics"]["by_slug"].keys()))
+ " topics by slug"
)
for item in content_data:
slug = get_shout_slug(item)
storage["content_items"]["by_slug"][slug] = item
storage["content_items"]["by_oid"][item["_id"]] = item
print("[migration.load] " + str(len(content_data)) + " content items")
for x in comments_data:
storage["reactions"]["by_oid"][x["_id"]] = x
cid = x["contentItem"]
storage["reactions"]["by_content"][cid] = x
ci = storage["content_items"]["by_oid"].get(cid, {})
if "slug" in ci:
storage["reactions"]["by_slug"][ci["slug"]] = x
print(
"[migration.load] "
+ str(len(storage["reactions"]["by_content"].keys()))
+ " with comments"
)
except Exception as e:
raise e
storage["users"]["data"] = users_data
storage["topics"]["tags"] = tags_data
storage["topics"]["cats"] = cats_data
storage["shouts"]["data"] = content_data
storage["reactions"]["data"] = comments_data
return storage
def mongo_download(url):
if not url: raise Exception('\n\nYou should set MONGODB_URL enviroment variable\n')
print('[migration] mongodump ' + url)
subprocess.call([
'mongodump',
'--uri', url + '/?authSource=admin',
'--forceTableScan',
], stderr = subprocess.STDOUT)
if not url:
raise Exception("\n\nYou should set MONGODB_URL enviroment variable\n")
print("[migration] mongodump " + url)
subprocess.call(
[
"mongodump",
"--uri",
url + "/?authSource=admin",
"--forceTableScan",
],
stderr=subprocess.STDOUT,
)
def create_pgdump():
pgurl = DB_URL
if not pgurl: raise Exception('\n\nYou should set DATABASE_URL enviroment variable\n')
subprocess.call(
[ 'pg_dump', pgurl, '-f', TODAY + '-pgdump.sql'],
stderr = subprocess.STDOUT
)
subprocess.call([
'scp',
TODAY + '-pgdump.sql',
'root@build.discours.io:/root/.'
])
pgurl = DB_URL
if not pgurl:
raise Exception("\n\nYou should set DATABASE_URL enviroment variable\n")
subprocess.call(
["pg_dump", pgurl, "-f", TODAY + "-pgdump.sql"], stderr=subprocess.STDOUT
)
subprocess.call(["scp", TODAY + "-pgdump.sql", "root@build.discours.io:/root/."])
async def handle_auto():
print('[migration] no command given, auto mode')
url = os.getenv('MONGODB_URL')
if url: mongo_download(url)
bson_handle()
await all_handle(data_load(), sys.argv)
create_pgdump()
print("[migration] no command given, auto mode")
url = os.getenv("MONGODB_URL")
if url:
mongo_download(url)
bson_handle()
await all_handle(data_load(), sys.argv)
create_pgdump()
async def main():
if len(sys.argv) > 1:
cmd=sys.argv[1]
if type(cmd) == str: print('[migration] command: ' + cmd)
await handle_auto()
else:
print('[migration] usage: python server.py migrate')
if len(sys.argv) > 1:
cmd = sys.argv[1]
if type(cmd) == str:
print("[migration] command: " + cmd)
await handle_auto()
else:
print("[migration] usage: python server.py migrate")
def migrate():
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
if __name__ == '__main__':
migrate()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
if __name__ == "__main__":
migrate()

View File

@ -4,25 +4,27 @@ import json
from .utils import DateTimeEncoder
def json_tables():
print('[migration] unpack dump/discours/*.bson to migration/data/*.json')
data = {
"content_items": [],
"content_item_categories": [],
"tags": [],
"email_subscriptions": [],
"users": [],
"comments": []
}
for table in data.keys():
lc = []
with open('dump/discours/'+table+'.bson', 'rb') as f:
bs = f.read()
f.close()
base = 0
while base < len(bs):
base, d = bson.decode_document(bs, base)
lc.append(d)
data[table] = lc
open(os.getcwd() + '/migration/data/'+table+'.json', 'w').write(json.dumps(lc,cls=DateTimeEncoder))
def json_tables():
print("[migration] unpack dump/discours/*.bson to migration/data/*.json")
data = {
"content_items": [],
"content_item_categories": [],
"tags": [],
"email_subscriptions": [],
"users": [],
"comments": [],
}
for table in data.keys():
lc = []
with open("dump/discours/" + table + ".bson", "rb") as f:
bs = f.read()
f.close()
base = 0
while base < len(bs):
base, d = bson.decode_document(bs, base)
lc.append(d)
data[table] = lc
open(os.getcwd() + "/migration/data/" + table + ".json", "w").write(
json.dumps(lc, cls=DateTimeEncoder)
)

View File

@ -1,4 +1,3 @@
from datetime import datetime
import json
import os
@ -6,100 +5,150 @@ import frontmatter
from .extract import extract_html, prepare_html_body
from .utils import DateTimeEncoder
OLD_DATE = '2016-03-05 22:22:00.350000'
EXPORT_DEST = '../discoursio-web/data/'
parentDir = '/'.join(os.getcwd().split('/')[:-1])
contentDir = parentDir + '/discoursio-web/content/'
OLD_DATE = "2016-03-05 22:22:00.350000"
EXPORT_DEST = "../discoursio-web/data/"
parentDir = "/".join(os.getcwd().split("/")[:-1])
contentDir = parentDir + "/discoursio-web/content/"
ts = datetime.now()
def get_metadata(r):
authors = []
for a in r['authors']:
authors.append({ # a short version for public listings
'slug': a.slug or 'discours',
'name': a.name or 'Дискурс',
'userpic': a.userpic or 'https://discours.io/static/img/discours.png'
})
metadata = {}
metadata['title'] = r.get('title', '').replace('{', '(').replace('}', ')')
metadata['authors'] = authors
metadata['createdAt'] = r.get('createdAt', ts)
metadata['layout'] = r['layout']
metadata['topics'] = [topic for topic in r['topics']]
metadata['topics'].sort()
if r.get('cover', False): metadata['cover'] = r.get('cover')
return metadata
authors = []
for a in r["authors"]:
authors.append(
{ # a short version for public listings
"slug": a.slug or "discours",
"name": a.name or "Дискурс",
"userpic": a.userpic or "https://discours.io/static/img/discours.png",
}
)
metadata = {}
metadata["title"] = r.get("title", "").replace("{", "(").replace("}", ")")
metadata["authors"] = authors
metadata["createdAt"] = r.get("createdAt", ts)
metadata["layout"] = r["layout"]
metadata["topics"] = [topic for topic in r["topics"]]
metadata["topics"].sort()
if r.get("cover", False):
metadata["cover"] = r.get("cover")
return metadata
def export_mdx(r):
# print('[export] mdx %s' % r['slug'])
content = ''
metadata = get_metadata(r)
content = frontmatter.dumps(frontmatter.Post(r['body'], **metadata))
ext = 'mdx'
filepath = contentDir + r['slug']
bc = bytes(content,'utf-8').decode('utf-8','ignore')
open(filepath + '.' + ext, 'w').write(bc)
# print('[export] mdx %s' % r['slug'])
content = ""
metadata = get_metadata(r)
content = frontmatter.dumps(frontmatter.Post(r["body"], **metadata))
ext = "mdx"
filepath = contentDir + r["slug"]
bc = bytes(content, "utf-8").decode("utf-8", "ignore")
open(filepath + "." + ext, "w").write(bc)
def export_body(shout, storage):
entry = storage['content_items']['by_oid'][shout['oid']]
if entry:
shout['body'] = prepare_html_body(entry) # prepare_md_body(entry)
export_mdx(shout)
print('[export] html for %s' % shout['slug'])
body = extract_html(entry)
open(contentDir + shout['slug'] + '.html', 'w').write(body)
else:
raise Exception('no content_items entry found')
entry = storage["content_items"]["by_oid"][shout["oid"]]
if entry:
shout["body"] = prepare_html_body(entry) # prepare_md_body(entry)
export_mdx(shout)
print("[export] html for %s" % shout["slug"])
body = extract_html(entry)
open(contentDir + shout["slug"] + ".html", "w").write(body)
else:
raise Exception("no content_items entry found")
def export_slug(slug, storage):
shout = storage['shouts']['by_slug'][slug]
shout = storage['shouts']['by_slug'].get(slug)
assert shout, '[export] no shout found by slug: %s ' % slug
author = shout['authors'][0]
assert author, '[export] no author error'
export_body(shout, storage)
shout = storage["shouts"]["by_slug"][slug]
shout = storage["shouts"]["by_slug"].get(slug)
assert shout, "[export] no shout found by slug: %s " % slug
author = shout["authors"][0]
assert author, "[export] no author error"
export_body(shout, storage)
def export_email_subscriptions():
email_subscriptions_data = json.loads(open('migration/data/email_subscriptions.json').read())
for data in email_subscriptions_data:
# migrate_email_subscription(data)
pass
print('[migration] ' + str(len(email_subscriptions_data)) + ' email subscriptions exported')
email_subscriptions_data = json.loads(
open("migration/data/email_subscriptions.json").read()
)
for data in email_subscriptions_data:
# migrate_email_subscription(data)
pass
print(
"[migration] "
+ str(len(email_subscriptions_data))
+ " email subscriptions exported"
)
def export_shouts(storage):
# update what was just migrated or load json again
if len(storage['users']['by_slugs'].keys()) == 0:
storage['users']['by_slugs'] = json.loads(open(EXPORT_DEST + 'authors.json').read())
print('[migration] ' + str(len(storage['users']['by_slugs'].keys())) + ' exported authors ')
if len(storage['shouts']['by_slugs'].keys()) == 0:
storage['shouts']['by_slugs'] = json.loads(open(EXPORT_DEST + 'articles.json').read())
print('[migration] ' + str(len(storage['shouts']['by_slugs'].keys())) + ' exported articles ')
for slug in storage['shouts']['by_slugs'].keys(): export_slug(slug, storage)
# update what was just migrated or load json again
if len(storage["users"]["by_slugs"].keys()) == 0:
storage["users"]["by_slugs"] = json.loads(
open(EXPORT_DEST + "authors.json").read()
)
print(
"[migration] "
+ str(len(storage["users"]["by_slugs"].keys()))
+ " exported authors "
)
if len(storage["shouts"]["by_slugs"].keys()) == 0:
storage["shouts"]["by_slugs"] = json.loads(
open(EXPORT_DEST + "articles.json").read()
)
print(
"[migration] "
+ str(len(storage["shouts"]["by_slugs"].keys()))
+ " exported articles "
)
for slug in storage["shouts"]["by_slugs"].keys():
export_slug(slug, storage)
def export_json(export_articles = {}, export_authors = {}, export_topics = {}, export_comments = {}):
open(EXPORT_DEST + 'authors.json', 'w').write(json.dumps(export_authors,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False))
print('[migration] ' + str(len(export_authors.items())) + ' authors exported')
open(EXPORT_DEST + 'topics.json', 'w').write(json.dumps(export_topics,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False))
print('[migration] ' + str(len(export_topics.keys())) + ' topics exported')
open(EXPORT_DEST + 'articles.json', 'w').write(json.dumps(export_articles,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False))
print('[migration] ' + str(len(export_articles.items())) + ' articles exported')
open(EXPORT_DEST + 'comments.json', 'w').write(json.dumps(export_comments,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False))
print('[migration] ' + str(len(export_comments.items())) + ' exported articles with comments')
def export_json(
export_articles={}, export_authors={}, export_topics={}, export_comments={}
):
open(EXPORT_DEST + "authors.json", "w").write(
json.dumps(
export_authors,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_authors.items())) + " authors exported")
open(EXPORT_DEST + "topics.json", "w").write(
json.dumps(
export_topics,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_topics.keys())) + " topics exported")
open(EXPORT_DEST + "articles.json", "w").write(
json.dumps(
export_articles,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_articles.items())) + " articles exported")
open(EXPORT_DEST + "comments.json", "w").write(
json.dumps(
export_comments,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print(
"[migration] "
+ str(len(export_comments.items()))
+ " exported articles with comments"
)

View File

@ -3,322 +3,397 @@ import re
import base64
from .html2text import html2text
TOOLTIP_REGEX = r'(\/\/\/(.+)\/\/\/)'
contentDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'discoursio-web', 'content')
s3 = 'https://discours-io.s3.amazonaws.com/'
cdn = 'https://assets.discours.io'
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
contentDir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "..", "..", "discoursio-web", "content"
)
s3 = "https://discours-io.s3.amazonaws.com/"
cdn = "https://assets.discours.io"
def replace_tooltips(body):
# change if you prefer regexp
newbody = body
matches = list(re.finditer(TOOLTIP_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
for match in matches:
newbody = body.replace(match.group(1), '<Tooltip text="' + match.group(2) + '" />') # NOTE: doesn't work
if len(matches) > 0:
print('[extract] found %d tooltips' % len(matches))
return newbody
def replace_tooltips(body):
# change if you prefer regexp
newbody = body
matches = list(re.finditer(TOOLTIP_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
for match in matches:
newbody = body.replace(
match.group(1), '<Tooltip text="' + match.group(2) + '" />'
) # NOTE: doesn't work
if len(matches) > 0:
print("[extract] found %d tooltips" % len(matches))
return newbody
def place_tooltips(body):
parts = body.split('&&&')
l = len(parts)
newparts = list(parts)
placed = False
if l & 1:
if l > 1:
i = 1
print('[extract] found %d tooltips' % (l-1))
for part in parts[1:]:
if i & 1:
placed = True
if 'a class="footnote-url" href=' in part:
print('[extract] footnote: ' + part)
fn = 'a class="footnote-url" href="'
link = part.split(fn,1)[1].split('"', 1)[0]
extracted_part = part.split(fn,1)[0] + ' ' + part.split('/', 1)[-1]
newparts[i] = '<Tooltip' + (' link="' + link + '" ' if link else '') + '>' + extracted_part + '</Tooltip>'
else:
newparts[i] = '<Tooltip>%s</Tooltip>' % part
# print('[extract] ' + newparts[i])
else:
# print('[extract] ' + part[:10] + '..')
newparts[i] = part
i += 1
return (''.join(newparts), placed)
parts = body.split("&&&")
l = len(parts)
newparts = list(parts)
placed = False
if l & 1:
if l > 1:
i = 1
print("[extract] found %d tooltips" % (l - 1))
for part in parts[1:]:
if i & 1:
placed = True
if 'a class="footnote-url" href=' in part:
print("[extract] footnote: " + part)
fn = 'a class="footnote-url" href="'
link = part.split(fn, 1)[1].split('"', 1)[0]
extracted_part = (
part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
)
newparts[i] = (
"<Tooltip"
+ (' link="' + link + '" ' if link else "")
+ ">"
+ extracted_part
+ "</Tooltip>"
)
else:
newparts[i] = "<Tooltip>%s</Tooltip>" % part
# print('[extract] ' + newparts[i])
else:
# print('[extract] ' + part[:10] + '..')
newparts[i] = part
i += 1
return ("".join(newparts), placed)
IMG_REGEX = r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}=|[A-Za-z\d+\/]{2}==)))\)"
parentDir = '/'.join(os.getcwd().split('/')[:-1])
public = parentDir + '/discoursio-web/public'
parentDir = "/".join(os.getcwd().split("/")[:-1])
public = parentDir + "/discoursio-web/public"
cache = {}
def reextract_images(body, oid):
# change if you prefer regexp
matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
i = 0
for match in matches:
print('[extract] image ' + match.group(1))
ext = match.group(3)
name = oid + str(i)
link = public + '/upload/image-' + name + '.' + ext
img = match.group(4)
title = match.group(1) # NOTE: this is not the title
if img not in cache:
content = base64.b64decode(img + '==')
print(str(len(img)) + ' image bytes been written')
open('../' + link, 'wb').write(content)
cache[img] = name
i += 1
else:
print('[extract] image cached ' + cache[img])
body.replace(str(match), '![' + title + '](' + cdn + link + ')') # WARNING: this does not work
return body
def reextract_images(body, oid):
# change if you prefer regexp
matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
i = 0
for match in matches:
print("[extract] image " + match.group(1))
ext = match.group(3)
name = oid + str(i)
link = public + "/upload/image-" + name + "." + ext
img = match.group(4)
title = match.group(1) # NOTE: this is not the title
if img not in cache:
content = base64.b64decode(img + "==")
print(str(len(img)) + " image bytes been written")
open("../" + link, "wb").write(content)
cache[img] = name
i += 1
else:
print("[extract] image cached " + cache[img])
body.replace(
str(match), "![" + title + "](" + cdn + link + ")"
) # WARNING: this does not work
return body
IMAGES = {
'data:image/png': 'png',
'data:image/jpg': 'jpg',
'data:image/jpeg': 'jpg',
"data:image/png": "png",
"data:image/jpg": "jpg",
"data:image/jpeg": "jpg",
}
b64 = ';base64,'
b64 = ";base64,"
def extract_imageparts(bodyparts, prefix):
# recursive loop
newparts = list(bodyparts)
for current in bodyparts:
i = bodyparts.index(current)
for mime in IMAGES.keys():
if mime == current[-len(mime):] and (i + 1 < len(bodyparts)):
print('[extract] ' + mime)
next = bodyparts[i+1]
ext = IMAGES[mime]
b64end = next.index(')')
b64encoded = next[:b64end]
name = prefix + '-' + str(len(cache))
link = '/upload/image-' + name + '.' + ext
print('[extract] name: ' + name)
print('[extract] link: ' + link)
print('[extract] %d bytes' % len(b64encoded))
if b64encoded not in cache:
try:
content = base64.b64decode(b64encoded + '==')
open(public + link, 'wb').write(content)
print('[extract] ' +str(len(content)) + ' image bytes been written')
cache[b64encoded] = name
except:
raise Exception
# raise Exception('[extract] error decoding image %r' %b64encoded)
else:
print('[extract] cached link ' + cache[b64encoded])
name = cache[b64encoded]
link = cdn + '/upload/image-' + name + '.' + ext
newparts[i] = current[:-len(mime)] + current[-len(mime):] + link + next[-b64end:]
newparts[i+1] = next[:-b64end]
break
return extract_imageparts(newparts[i] + newparts[i+1] + b64.join(bodyparts[i+2:]), prefix) \
if len(bodyparts) > (i + 1) else ''.join(newparts)
# recursive loop
newparts = list(bodyparts)
for current in bodyparts:
i = bodyparts.index(current)
for mime in IMAGES.keys():
if mime == current[-len(mime) :] and (i + 1 < len(bodyparts)):
print("[extract] " + mime)
next = bodyparts[i + 1]
ext = IMAGES[mime]
b64end = next.index(")")
b64encoded = next[:b64end]
name = prefix + "-" + str(len(cache))
link = "/upload/image-" + name + "." + ext
print("[extract] name: " + name)
print("[extract] link: " + link)
print("[extract] %d bytes" % len(b64encoded))
if b64encoded not in cache:
try:
content = base64.b64decode(b64encoded + "==")
open(public + link, "wb").write(content)
print(
"[extract] "
+ str(len(content))
+ " image bytes been written"
)
cache[b64encoded] = name
except:
raise Exception
# raise Exception('[extract] error decoding image %r' %b64encoded)
else:
print("[extract] cached link " + cache[b64encoded])
name = cache[b64encoded]
link = cdn + "/upload/image-" + name + "." + ext
newparts[i] = (
current[: -len(mime)]
+ current[-len(mime) :]
+ link
+ next[-b64end:]
)
newparts[i + 1] = next[:-b64end]
break
return (
extract_imageparts(
newparts[i] + newparts[i + 1] + b64.join(bodyparts[i + 2 :]), prefix
)
if len(bodyparts) > (i + 1)
else "".join(newparts)
)
def extract_dataimages(parts, prefix):
newparts = list(parts)
for part in parts:
i = parts.index(part)
if part.endswith(']('):
[ext, rest] = parts[i+1].split(b64)
name = prefix + '-' + str(len(cache))
if ext == '/jpeg': ext = 'jpg'
else: ext = ext.replace('/', '')
link = '/upload/image-' + name + '.' + ext
print('[extract] filename: ' + link)
b64end = rest.find(')')
if b64end !=-1:
b64encoded = rest[:b64end]
print('[extract] %d text bytes' % len(b64encoded))
# write if not cached
if b64encoded not in cache:
try:
content = base64.b64decode(b64encoded + '==')
open(public + link, 'wb').write(content)
print('[extract] ' +str(len(content)) + ' image bytes')
cache[b64encoded] = name
except:
raise Exception
# raise Exception('[extract] error decoding image %r' %b64encoded)
else:
print('[extract] 0 image bytes, cached for ' + cache[b64encoded])
name = cache[b64encoded]
newparts = list(parts)
for part in parts:
i = parts.index(part)
if part.endswith("]("):
[ext, rest] = parts[i + 1].split(b64)
name = prefix + "-" + str(len(cache))
if ext == "/jpeg":
ext = "jpg"
else:
ext = ext.replace("/", "")
link = "/upload/image-" + name + "." + ext
print("[extract] filename: " + link)
b64end = rest.find(")")
if b64end != -1:
b64encoded = rest[:b64end]
print("[extract] %d text bytes" % len(b64encoded))
# write if not cached
if b64encoded not in cache:
try:
content = base64.b64decode(b64encoded + "==")
open(public + link, "wb").write(content)
print("[extract] " + str(len(content)) + " image bytes")
cache[b64encoded] = name
except:
raise Exception
# raise Exception('[extract] error decoding image %r' %b64encoded)
else:
print("[extract] 0 image bytes, cached for " + cache[b64encoded])
name = cache[b64encoded]
# update link with CDN
link = cdn + '/upload/image-' + name + '.' + ext
# patch newparts
newparts[i+1] = link + rest[b64end:]
else:
raise Exception('cannot find the end of base64 encoded string')
else:
print('[extract] dataimage skipping part ' + str(i))
continue
return ''.join(newparts)
# update link with CDN
link = cdn + "/upload/image-" + name + "." + ext
# patch newparts
newparts[i + 1] = link + rest[b64end:]
else:
raise Exception("cannot find the end of base64 encoded string")
else:
print("[extract] dataimage skipping part " + str(i))
continue
return "".join(newparts)
di = "data:image"
di = 'data:image'
def extract_md_images(body, oid):
newbody = ''
body = body\
.replace('\n! []('+di, '\n ![]('+di)\
.replace('\n[]('+di, '\n![]('+di)\
.replace(' []('+di, ' ![]('+di)
parts = body.split(di)
i = 0
if len(parts) > 1: newbody = extract_dataimages(parts, oid)
else: newbody = body
return newbody
newbody = ""
body = (
body.replace("\n! [](" + di, "\n ![](" + di)
.replace("\n[](" + di, "\n![](" + di)
.replace(" [](" + di, " ![](" + di)
)
parts = body.split(di)
i = 0
if len(parts) > 1:
newbody = extract_dataimages(parts, oid)
else:
newbody = body
return newbody
def cleanup(body):
newbody = body\
.replace('<', '').replace('>', '')\
.replace('{', '(').replace('}', ')')\
.replace('', '...')\
.replace(' __ ', ' ')\
.replace('_ _', ' ')\
.replace('****', '')\
.replace('\u00a0', ' ')\
.replace('\u02c6', '^')\
.replace('\u00a0',' ')\
.replace('\ufeff', '')\
.replace('\u200b', '')\
.replace('\u200c', '')\
# .replace('\u2212', '-')
return newbody
newbody = (
body.replace("<", "")
.replace(">", "")
.replace("{", "(")
.replace("}", ")")
.replace("", "...")
.replace(" __ ", " ")
.replace("_ _", " ")
.replace("****", "")
.replace("\u00a0", " ")
.replace("\u02c6", "^")
.replace("\u00a0", " ")
.replace("\ufeff", "")
.replace("\u200b", "")
.replace("\u200c", "")
) # .replace('\u2212', '-')
return newbody
def extract_md(body, oid):
newbody = body
if newbody:
newbody = extract_md_images(newbody, oid)
if not newbody: raise Exception('extract_images error')
newbody = cleanup(newbody)
if not newbody: raise Exception('cleanup error')
newbody, placed = place_tooltips(newbody)
if not newbody: raise Exception('place_tooltips error')
if placed:
newbody = 'import Tooltip from \'$/components/Article/Tooltip\'\n\n' + newbody
return newbody
newbody = body
if newbody:
newbody = extract_md_images(newbody, oid)
if not newbody:
raise Exception("extract_images error")
newbody = cleanup(newbody)
if not newbody:
raise Exception("cleanup error")
newbody, placed = place_tooltips(newbody)
if not newbody:
raise Exception("place_tooltips error")
if placed:
newbody = "import Tooltip from '$/components/Article/Tooltip'\n\n" + newbody
return newbody
def prepare_md_body(entry):
# body modifications
body = ''
kind = entry.get('type')
addon = ''
if kind == 'Video':
addon = ''
for m in entry.get('media', []):
if 'youtubeId' in m: addon += '<VideoPlayer youtubeId=\'' + m['youtubeId'] + '\' />\n'
elif 'vimeoId' in m: addon += '<VideoPlayer vimeoId=\'' + m['vimeoId'] + '\' />\n'
else:
print('[extract] media is not supported')
print(m)
body = 'import VideoPlayer from \'$/components/Article/VideoPlayer\'\n\n' + addon
elif kind == 'Music':
addon = ''
for m in entry.get('media', []):
artist = m.get('performer')
trackname = ''
if artist: trackname += artist + ' - '
if 'title' in m: trackname += m.get('title','')
addon += '<MusicPlayer src=\"' + m.get('fileUrl','') + '\" title=\"' + trackname + '\" />\n'
body = 'import MusicPlayer from \'$/components/Article/MusicPlayer\'\n\n' + addon
# body modifications
body = ""
kind = entry.get("type")
addon = ""
if kind == "Video":
addon = ""
for m in entry.get("media", []):
if "youtubeId" in m:
addon += "<VideoPlayer youtubeId='" + m["youtubeId"] + "' />\n"
elif "vimeoId" in m:
addon += "<VideoPlayer vimeoId='" + m["vimeoId"] + "' />\n"
else:
print("[extract] media is not supported")
print(m)
body = "import VideoPlayer from '$/components/Article/VideoPlayer'\n\n" + addon
elif kind == "Music":
addon = ""
for m in entry.get("media", []):
artist = m.get("performer")
trackname = ""
if artist:
trackname += artist + " - "
if "title" in m:
trackname += m.get("title", "")
addon += (
'<MusicPlayer src="'
+ m.get("fileUrl", "")
+ '" title="'
+ trackname
+ '" />\n'
)
body = "import MusicPlayer from '$/components/Article/MusicPlayer'\n\n" + addon
body_orig = extract_html(entry)
if body_orig:
body += extract_md(html2text(body_orig), entry["_id"])
if not body:
print("[extract] empty MDX body")
return body
body_orig = extract_html(entry)
if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
if not body: print('[extract] empty MDX body')
return body
def prepare_html_body(entry):
# body modifications
body = ''
kind = entry.get('type')
addon = ''
if kind == 'Video':
addon = ''
for m in entry.get('media', []):
if 'youtubeId' in m:
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
addon += m['youtubeId']
addon += '?autoplay=1" frameborder="0" allowfullscreen></iframe>\n'
elif 'vimeoId' in m:
addon += '<iframe src="https://player.vimeo.com/video/'
addon += m['vimeoId']
addon += ' width="420" height="345" frameborder="0" allow="autoplay; fullscreen" allowfullscreen></iframe>'
else:
print('[extract] media is not supported')
print(m)
body += addon
elif kind == 'Music':
addon = ''
for m in entry.get('media', []):
artist = m.get('performer')
trackname = ''
if artist: trackname += artist + ' - '
if 'title' in m: trackname += m.get('title','')
addon += '<figure><figcaption>'
addon += trackname
addon += '</figcaption><audio controls src="'
addon += m.get('fileUrl','')
addon += '"></audio></figure>'
body += addon
# body modifications
body = ""
kind = entry.get("type")
addon = ""
if kind == "Video":
addon = ""
for m in entry.get("media", []):
if "youtubeId" in m:
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
addon += m["youtubeId"]
addon += '?autoplay=1" frameborder="0" allowfullscreen></iframe>\n'
elif "vimeoId" in m:
addon += '<iframe src="https://player.vimeo.com/video/'
addon += m["vimeoId"]
addon += ' width="420" height="345" frameborder="0" allow="autoplay; fullscreen" allowfullscreen></iframe>'
else:
print("[extract] media is not supported")
print(m)
body += addon
elif kind == "Music":
addon = ""
for m in entry.get("media", []):
artist = m.get("performer")
trackname = ""
if artist:
trackname += artist + " - "
if "title" in m:
trackname += m.get("title", "")
addon += "<figure><figcaption>"
addon += trackname
addon += '</figcaption><audio controls src="'
addon += m.get("fileUrl", "")
addon += '"></audio></figure>'
body += addon
body = extract_html(entry)
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
if not body:
print("[extract] empty HTML body")
return body
body = extract_html(entry)
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
if not body: print('[extract] empty HTML body')
return body
def extract_html(entry):
body_orig = entry.get('body') or ''
media = entry.get('media', [])
kind = entry.get('type') or ''
print('[extract] kind: ' + kind)
mbodies = set([])
if media:
# print('[extract] media is found')
for m in media:
mbody = m.get('body', '')
addon = ''
if kind == 'Literature':
mbody = m.get('literatureBody') or m.get('body', '')
elif kind == 'Image':
cover = ''
if 'thumborId' in entry: cover = cdn + '/unsafe/1600x/' + entry['thumborId']
if not cover:
if 'image' in entry: cover = entry['image'].get('url', '')
if 'cloudinary' in cover: cover = ''
# else: print('[extract] cover: ' + cover)
title = m.get('title','').replace('\n', ' ').replace('&nbsp;', ' ')
u = m.get('thumborId') or cover or ''
if title: addon += '<h4>' + title + '</h4>\n'
if not u.startswith('http'): u = s3 + u
if not u: print('[extract] no image url for ' + str(m))
if 'cloudinary' in u: u = 'img/lost.svg'
if u != cover or (u == cover and media.index(m) == 0):
addon += '<img src=\"' + u + '\" alt=\"'+ title +'\" />\n'
if addon:
body_orig += addon
# print('[extract] item addon: ' + addon)
# if addon: print('[extract] addon: %s' % addon)
if mbody and mbody not in mbodies:
mbodies.add(mbody)
body_orig += mbody
if len(list(mbodies)) != len(media):
print('[extract] %d/%d media item bodies appended' % (len(list(mbodies)),len(media)))
# print('[extract] media items body: \n' + body_orig)
if not body_orig:
for up in entry.get('bodyHistory', []) or []:
body_orig = up.get('text', '') or ''
if body_orig:
print('[extract] got html body from history')
break
if not body_orig: print('[extract] empty HTML body')
# body_html = str(BeautifulSoup(body_orig, features="html.parser"))
return body_orig
body_orig = entry.get("body") or ""
media = entry.get("media", [])
kind = entry.get("type") or ""
print("[extract] kind: " + kind)
mbodies = set([])
if media:
# print('[extract] media is found')
for m in media:
mbody = m.get("body", "")
addon = ""
if kind == "Literature":
mbody = m.get("literatureBody") or m.get("body", "")
elif kind == "Image":
cover = ""
if "thumborId" in entry:
cover = cdn + "/unsafe/1600x/" + entry["thumborId"]
if not cover:
if "image" in entry:
cover = entry["image"].get("url", "")
if "cloudinary" in cover:
cover = ""
# else: print('[extract] cover: ' + cover)
title = m.get("title", "").replace("\n", " ").replace("&nbsp;", " ")
u = m.get("thumborId") or cover or ""
if title:
addon += "<h4>" + title + "</h4>\n"
if not u.startswith("http"):
u = s3 + u
if not u:
print("[extract] no image url for " + str(m))
if "cloudinary" in u:
u = "img/lost.svg"
if u != cover or (u == cover and media.index(m) == 0):
addon += '<img src="' + u + '" alt="' + title + '" />\n'
if addon:
body_orig += addon
# print('[extract] item addon: ' + addon)
# if addon: print('[extract] addon: %s' % addon)
if mbody and mbody not in mbodies:
mbodies.add(mbody)
body_orig += mbody
if len(list(mbodies)) != len(media):
print(
"[extract] %d/%d media item bodies appended"
% (len(list(mbodies)), len(media))
)
# print('[extract] media items body: \n' + body_orig)
if not body_orig:
for up in entry.get("bodyHistory", []) or []:
body_orig = up.get("text", "") or ""
if body_orig:
print("[extract] got html body from history")
break
if not body_orig:
print("[extract] empty HTML body")
# body_html = str(BeautifulSoup(body_orig, features="html.parser"))
return body_orig

File diff suppressed because it is too large Load Diff

View File

@ -1 +1 @@
__all__ = ["users", "tags", "content_items", "comments"],
__all__ = (["users", "tags", "content_items", "comments"],)

View File

@ -8,104 +8,128 @@ from services.stat.reacted import ReactedStorage
ts = datetime.now()
async def migrate(entry, storage):
'''
{
"_id": "hdtwS8fSyFLxXCgSC",
"body": "<p>",
"contentItem": "mnK8KsJHPRi8DrybQ",
"createdBy": "bMFPuyNg6qAD2mhXe",
"thread": "01/",
"createdAt": "2016-04-19 04:33:53+00:00",
"ratings": [
{ "createdBy": "AqmRukvRiExNpAe8C", "value": 1 },
{ "createdBy": "YdE76Wth3yqymKEu5", "value": 1 }
],
"rating": 2,
"updatedAt": "2020-05-27 19:22:57.091000+00:00",
"updatedBy": "0"
}
"""
{
"_id": "hdtwS8fSyFLxXCgSC",
"body": "<p>",
"contentItem": "mnK8KsJHPRi8DrybQ",
"createdBy": "bMFPuyNg6qAD2mhXe",
"thread": "01/",
"createdAt": "2016-04-19 04:33:53+00:00",
"ratings": [
{ "createdBy": "AqmRukvRiExNpAe8C", "value": 1 },
{ "createdBy": "YdE76Wth3yqymKEu5", "value": 1 }
],
"rating": 2,
"updatedAt": "2020-05-27 19:22:57.091000+00:00",
"updatedBy": "0"
}
->
->
type Reaction {
id: Int!
shout: Shout!
createdAt: DateTime!
createdBy: User!
updatedAt: DateTime
deletedAt: DateTime
deletedBy: User
range: String # full / 0:2340
kind: ReactionKind!
body: String
replyTo: Reaction
stat: Stat
old_id: String
old_thread: String
}
'''
reaction_dict = {}
reaction_dict['createdAt'] = ts if not entry.get('createdAt') else date_parse(entry.get('createdAt'))
print('[migration] reaction original date %r' % entry.get('createdAt'))
# print('[migration] comment date %r ' % comment_dict['createdAt'])
reaction_dict['body'] = html2text(entry.get('body', ''))
reaction_dict['oid'] = entry['_id']
if entry.get('createdAt'): reaction_dict['createdAt'] = date_parse(entry.get('createdAt'))
shout_oid = entry.get('contentItem')
if not shout_oid in storage['shouts']['by_oid']:
if len(storage['shouts']['by_oid']) > 0:
return shout_oid
else:
print('[migration] no shouts migrated yet')
raise Exception
return
else:
with local_session() as session:
author = session.query(User).filter(User.oid == entry['createdBy']).first()
shout_dict = storage['shouts']['by_oid'][shout_oid]
if shout_dict:
reaction_dict['shout'] = shout_dict['slug']
reaction_dict['createdBy'] = author.slug if author else 'discours'
reaction_dict['kind'] = ReactionKind.COMMENT
type Reaction {
id: Int!
shout: Shout!
createdAt: DateTime!
createdBy: User!
updatedAt: DateTime
deletedAt: DateTime
deletedBy: User
range: String # full / 0:2340
kind: ReactionKind!
body: String
replyTo: Reaction
stat: Stat
old_id: String
old_thread: String
}
"""
reaction_dict = {}
reaction_dict["createdAt"] = (
ts if not entry.get("createdAt") else date_parse(entry.get("createdAt"))
)
print("[migration] reaction original date %r" % entry.get("createdAt"))
# print('[migration] comment date %r ' % comment_dict['createdAt'])
reaction_dict["body"] = html2text(entry.get("body", ""))
reaction_dict["oid"] = entry["_id"]
if entry.get("createdAt"):
reaction_dict["createdAt"] = date_parse(entry.get("createdAt"))
shout_oid = entry.get("contentItem")
if not shout_oid in storage["shouts"]["by_oid"]:
if len(storage["shouts"]["by_oid"]) > 0:
return shout_oid
else:
print("[migration] no shouts migrated yet")
raise Exception
return
else:
with local_session() as session:
author = session.query(User).filter(User.oid == entry["createdBy"]).first()
shout_dict = storage["shouts"]["by_oid"][shout_oid]
if shout_dict:
reaction_dict["shout"] = shout_dict["slug"]
reaction_dict["createdBy"] = author.slug if author else "discours"
reaction_dict["kind"] = ReactionKind.COMMENT
# creating reaction from old comment
day = (reaction_dict.get("createdAt") or ts).replace(
hour=0, minute=0, second=0, microsecond=0
)
reaction = Reaction.create(**reaction_dict)
await ReactedStorage.increment(reaction)
reaction_dict["id"] = reaction.id
for comment_rating_old in entry.get("ratings", []):
rater = (
session.query(User)
.filter(User.oid == comment_rating_old["createdBy"])
.first()
)
reactedBy = (
rater
if rater
else session.query(User).filter(User.slug == "noname").first()
)
re_reaction_dict = {
"shout": reaction_dict["shout"],
"replyTo": reaction.id,
"kind": ReactionKind.LIKE
if comment_rating_old["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": reactedBy.slug if reactedBy else "discours",
}
cts = comment_rating_old.get("createdAt")
if cts:
re_reaction_dict["createdAt"] = date_parse(cts)
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
await ReactedStorage.increment(rr)
except Exception as e:
print("[migration] comment rating error: %r" % re_reaction_dict)
raise e
else:
print(
"[migration] error: cannot find shout for comment %r"
% reaction_dict
)
return reaction
# creating reaction from old comment
day = (reaction_dict.get('createdAt') or ts).replace(hour=0, minute=0, second=0, microsecond=0)
reaction = Reaction.create(**reaction_dict)
await ReactedStorage.increment(reaction)
reaction_dict['id'] = reaction.id
for comment_rating_old in entry.get('ratings',[]):
rater = session.query(User).filter(User.oid == comment_rating_old['createdBy']).first()
reactedBy = rater if rater else session.query(User).filter(User.slug == 'noname').first()
re_reaction_dict = {
'shout': reaction_dict['shout'],
'replyTo': reaction.id,
'kind': ReactionKind.LIKE if comment_rating_old['value'] > 0 else ReactionKind.DISLIKE,
'createdBy': reactedBy.slug if reactedBy else 'discours'
}
cts = comment_rating_old.get('createdAt')
if cts: re_reaction_dict['createdAt'] = date_parse(cts)
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
await ReactedStorage.increment(rr)
except Exception as e:
print('[migration] comment rating error: %r' % re_reaction_dict)
raise e
else:
print('[migration] error: cannot find shout for comment %r' % reaction_dict)
return reaction
def migrate_2stage(rr, old_new_id):
reply_oid = rr.get('replyTo')
if not reply_oid: return
new_id = old_new_id.get(rr.get('oid'))
if not new_id: return
with local_session() as session:
comment = session.query(Reaction).filter(Reaction.id == new_id).first()
comment.replyTo = old_new_id.get(reply_oid)
comment.save()
session.commit()
if not rr['body']: raise Exception(rr)
reply_oid = rr.get("replyTo")
if not reply_oid:
return
new_id = old_new_id.get(rr.get("oid"))
if not new_id:
return
with local_session() as session:
comment = session.query(Reaction).filter(Reaction.id == new_id).first()
comment.replyTo = old_new_id.get(reply_oid)
comment.save()
session.commit()
if not rr["body"]:
raise Exception(rr)

View File

@ -10,224 +10,279 @@ from migration.extract import prepare_html_body
from orm.community import Community
from orm.reaction import Reaction, ReactionKind
OLD_DATE = '2016-03-05 22:22:00.350000'
OLD_DATE = "2016-03-05 22:22:00.350000"
ts = datetime.now()
type2layout = {
'Article': 'article',
'Literature': 'prose',
'Music': 'music',
'Video': 'video',
'Image': 'image'
"Article": "article",
"Literature": "prose",
"Music": "music",
"Video": "video",
"Image": "image",
}
def get_shout_slug(entry):
slug = entry.get('slug', '')
if not slug:
for friend in entry.get('friendlySlugs', []):
slug = friend.get('slug', '')
if slug: break
return slug
slug = entry.get("slug", "")
if not slug:
for friend in entry.get("friendlySlugs", []):
slug = friend.get("slug", "")
if slug:
break
return slug
async def migrate(entry, storage):
# init, set title and layout
r = {
'layout': type2layout[entry['type']],
'title': entry['title'],
'community': Community.default_community.id,
'authors': [],
'topics': set([]),
# 'rating': 0,
# 'ratings': [],
'createdAt': []
}
topics_by_oid = storage['topics']['by_oid']
users_by_oid = storage['users']['by_oid']
# init, set title and layout
r = {
"layout": type2layout[entry["type"]],
"title": entry["title"],
"community": Community.default_community.id,
"authors": [],
"topics": set([]),
# 'rating': 0,
# 'ratings': [],
"createdAt": [],
}
topics_by_oid = storage["topics"]["by_oid"]
users_by_oid = storage["users"]["by_oid"]
# author
# author
oid = entry.get('createdBy', entry.get('_id', entry.get('oid')))
userdata = users_by_oid.get(oid)
if not userdata:
app = entry.get('application')
if app:
userslug = translit(app['name'], 'ru', reversed=True)\
.replace(' ', '-')\
.replace('\'', '')\
.replace('.', '-').lower()
userdata = {
'username': app['email'],
'email': app['email'],
'name': app['name'],
'bio': app.get('bio', ''),
'emailConfirmed': False,
'slug': userslug,
'createdAt': ts,
'wasOnlineAt': ts
}
else:
userdata = User.default_user.dict()
assert userdata, 'no user found for %s from ' % [oid, len(users_by_oid.keys())]
r['authors'] = [userdata, ]
oid = entry.get("createdBy", entry.get("_id", entry.get("oid")))
userdata = users_by_oid.get(oid)
if not userdata:
app = entry.get("application")
if app:
userslug = (
translit(app["name"], "ru", reversed=True)
.replace(" ", "-")
.replace("'", "")
.replace(".", "-")
.lower()
)
userdata = {
"username": app["email"],
"email": app["email"],
"name": app["name"],
"bio": app.get("bio", ""),
"emailConfirmed": False,
"slug": userslug,
"createdAt": ts,
"wasOnlineAt": ts,
}
else:
userdata = User.default_user.dict()
assert userdata, "no user found for %s from " % [oid, len(users_by_oid.keys())]
r["authors"] = [
userdata,
]
# slug
# slug
slug = get_shout_slug(entry)
if slug: r['slug'] = slug
else: raise Exception
# cover
c = ''
if entry.get('thumborId'):
c = 'https://assets.discours.io/unsafe/1600x/' + entry['thumborId']
else:
c = entry.get('image', {}).get('url')
if not c or 'cloudinary' in c: c = ''
r['cover'] = c
slug = get_shout_slug(entry)
if slug:
r["slug"] = slug
else:
raise Exception
# timestamps
# cover
c = ""
if entry.get("thumborId"):
c = "https://assets.discours.io/unsafe/1600x/" + entry["thumborId"]
else:
c = entry.get("image", {}).get("url")
if not c or "cloudinary" in c:
c = ""
r["cover"] = c
r['createdAt'] = date_parse(entry.get('createdAt', OLD_DATE))
r['updatedAt'] = date_parse(entry['updatedAt']) if 'updatedAt' in entry else ts
if entry.get('published'):
r['publishedAt'] = date_parse(entry.get('publishedAt', OLD_DATE))
if 'deletedAt' in entry: r['deletedAt'] = date_parse(entry['deletedAt'])
# timestamps
# topics
category = entry['category']
mainTopic = topics_by_oid.get(category)
if mainTopic:
r['mainTopic'] = storage['replacements'].get(mainTopic["slug"], mainTopic["slug"])
topic_oids = [category, ]
topic_oids.extend(entry.get('tags', []))
for oid in topic_oids:
if oid in storage['topics']['by_oid']:
r['topics'].add(storage['topics']['by_oid'][oid]['slug'])
else:
print('[migration] unknown old topic id: ' + oid)
r['topics'] = list(r['topics'])
entry['topics'] = r['topics']
entry['cover'] = r['cover']
entry['authors'] = r['authors']
r["createdAt"] = date_parse(entry.get("createdAt", OLD_DATE))
r["updatedAt"] = date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts
if entry.get("published"):
r["publishedAt"] = date_parse(entry.get("publishedAt", OLD_DATE))
if "deletedAt" in entry:
r["deletedAt"] = date_parse(entry["deletedAt"])
# body
r['body'] = prepare_html_body(entry)
# topics
category = entry["category"]
mainTopic = topics_by_oid.get(category)
if mainTopic:
r["mainTopic"] = storage["replacements"].get(
mainTopic["slug"], mainTopic["slug"]
)
topic_oids = [
category,
]
topic_oids.extend(entry.get("tags", []))
for oid in topic_oids:
if oid in storage["topics"]["by_oid"]:
r["topics"].add(storage["topics"]["by_oid"][oid]["slug"])
else:
print("[migration] unknown old topic id: " + oid)
r["topics"] = list(r["topics"])
# save shout to db
entry["topics"] = r["topics"]
entry["cover"] = r["cover"]
entry["authors"] = r["authors"]
s = object()
shout_dict = r.copy()
user = None
del shout_dict['topics'] # NOTE: AttributeError: 'str' object has no attribute '_sa_instance_state'
#del shout_dict['rating'] # NOTE: TypeError: 'rating' is an invalid keyword argument for Shout
#del shout_dict['ratings']
email = userdata.get('email')
slug = userdata.get('slug')
if not slug: raise Exception
with local_session() as session:
# c = session.query(Community).all().pop()
if email: user = session.query(User).filter(User.email == email).first()
if not user and slug: user = session.query(User).filter(User.slug == slug).first()
if not user and userdata:
try:
userdata['slug'] = userdata['slug'].lower().strip().replace(' ', '-')
user = User.create(**userdata)
except sqlalchemy.exc.IntegrityError:
print('[migration] user error: ' + userdata)
userdata['id'] = user.id
userdata['createdAt'] = user.createdAt
storage['users']['by_slug'][userdata['slug']] = userdata
storage['users']['by_oid'][entry['_id']] = userdata
assert user, 'could not get a user'
shout_dict['authors'] = [ user, ]
# body
r["body"] = prepare_html_body(entry)
try:
s = Shout.create(**shout_dict)
except sqlalchemy.exc.IntegrityError as e:
with local_session() as session:
s = session.query(Shout).filter(Shout.slug == shout_dict['slug']).first()
bump = False
if s:
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
print('[migration] shout already exists, but differs in %s' % key)
bump = True
else:
print('[migration] shout already exists, but lacks %s' % key)
bump = True
if bump:
s.update(shout_dict)
else:
print('[migration] something went wrong with shout: \n%r' % shout_dict)
raise e
session.commit()
except Exception as e:
print(e)
print(s)
raise Exception
# save shout to db
# shout topics aftermath
shout_dict['topics'] = []
for tpc in r['topics']:
oldslug = tpc
newslug = storage['replacements'].get(oldslug, oldslug)
if newslug:
with local_session() as session:
shout_topic_old = session.query(ShoutTopic)\
.filter(ShoutTopic.shout == shout_dict['slug'])\
.filter(ShoutTopic.topic == oldslug).first()
if shout_topic_old:
shout_topic_old.update({ 'slug': newslug })
else:
shout_topic_new = session.query(ShoutTopic)\
.filter(ShoutTopic.shout == shout_dict['slug'])\
.filter(ShoutTopic.topic == newslug).first()
if not shout_topic_new:
try: ShoutTopic.create(**{ 'shout': shout_dict['slug'], 'topic': newslug })
except: print('[migration] shout topic error: ' + newslug)
session.commit()
if newslug not in shout_dict['topics']:
shout_dict['topics'].append(newslug)
else:
print('[migration] ignored topic slug: \n%r' % tpc['slug'])
# raise Exception
s = object()
shout_dict = r.copy()
user = None
del shout_dict[
"topics"
] # NOTE: AttributeError: 'str' object has no attribute '_sa_instance_state'
# del shout_dict['rating'] # NOTE: TypeError: 'rating' is an invalid keyword argument for Shout
# del shout_dict['ratings']
email = userdata.get("email")
slug = userdata.get("slug")
if not slug:
raise Exception
with local_session() as session:
# c = session.query(Community).all().pop()
if email:
user = session.query(User).filter(User.email == email).first()
if not user and slug:
user = session.query(User).filter(User.slug == slug).first()
if not user and userdata:
try:
userdata["slug"] = userdata["slug"].lower().strip().replace(" ", "-")
user = User.create(**userdata)
except sqlalchemy.exc.IntegrityError:
print("[migration] user error: " + userdata)
userdata["id"] = user.id
userdata["createdAt"] = user.createdAt
storage["users"]["by_slug"][userdata["slug"]] = userdata
storage["users"]["by_oid"][entry["_id"]] = userdata
assert user, "could not get a user"
shout_dict["authors"] = [
user,
]
# content_item ratings to reactions
try:
for content_rating in entry.get('ratings',[]):
with local_session() as session:
rater = session.query(User).filter(User.oid == content_rating['createdBy']).first()
reactedBy = rater if rater else session.query(User).filter(User.slug == 'noname').first()
if rater:
reaction_dict = {
'kind': ReactionKind.LIKE if content_rating['value'] > 0 else ReactionKind.DISLIKE,
'createdBy': reactedBy.slug,
'shout': shout_dict['slug']
}
cts = content_rating.get('createdAt')
if cts: reaction_dict['createdAt'] = date_parse(cts)
reaction = session.query(Reaction).\
filter(Reaction.shout == reaction_dict['shout']).\
filter(Reaction.createdBy == reaction_dict['createdBy']).\
filter(Reaction.kind == reaction_dict['kind']).first()
if reaction:
reaction_dict['kind'] = ReactionKind.AGREE if content_rating['value'] > 0 else ReactionKind.DISAGREE,
reaction.update(reaction_dict)
else:
day = (reaction_dict.get('createdAt') or ts).replace(hour=0, minute=0, second=0, microsecond=0)
rea = Reaction.create(**reaction_dict)
await ReactedStorage.increment(rea)
# shout_dict['ratings'].append(reaction_dict)
except:
print('[migration] content_item.ratings error: \n%r' % content_rating)
raise Exception
try:
s = Shout.create(**shout_dict)
except sqlalchemy.exc.IntegrityError as e:
with local_session() as session:
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
bump = False
if s:
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
print(
"[migration] shout already exists, but differs in %s"
% key
)
bump = True
else:
print("[migration] shout already exists, but lacks %s" % key)
bump = True
if bump:
s.update(shout_dict)
else:
print("[migration] something went wrong with shout: \n%r" % shout_dict)
raise e
session.commit()
except Exception as e:
print(e)
print(s)
raise Exception
# shout views
ViewedByDay.create( shout = shout_dict['slug'], value = entry.get('views', 1) )
# del shout_dict['ratings']
shout_dict['oid'] = entry.get('_id')
storage['shouts']['by_oid'][entry['_id']] = shout_dict
storage['shouts']['by_slug'][slug] = shout_dict
return shout_dict
# shout topics aftermath
shout_dict["topics"] = []
for tpc in r["topics"]:
oldslug = tpc
newslug = storage["replacements"].get(oldslug, oldslug)
if newslug:
with local_session() as session:
shout_topic_old = (
session.query(ShoutTopic)
.filter(ShoutTopic.shout == shout_dict["slug"])
.filter(ShoutTopic.topic == oldslug)
.first()
)
if shout_topic_old:
shout_topic_old.update({"slug": newslug})
else:
shout_topic_new = (
session.query(ShoutTopic)
.filter(ShoutTopic.shout == shout_dict["slug"])
.filter(ShoutTopic.topic == newslug)
.first()
)
if not shout_topic_new:
try:
ShoutTopic.create(
**{"shout": shout_dict["slug"], "topic": newslug}
)
except:
print("[migration] shout topic error: " + newslug)
session.commit()
if newslug not in shout_dict["topics"]:
shout_dict["topics"].append(newslug)
else:
print("[migration] ignored topic slug: \n%r" % tpc["slug"])
# raise Exception
# content_item ratings to reactions
try:
for content_rating in entry.get("ratings", []):
with local_session() as session:
rater = (
session.query(User)
.filter(User.oid == content_rating["createdBy"])
.first()
)
reactedBy = (
rater
if rater
else session.query(User).filter(User.slug == "noname").first()
)
if rater:
reaction_dict = {
"kind": ReactionKind.LIKE
if content_rating["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": reactedBy.slug,
"shout": shout_dict["slug"],
}
cts = content_rating.get("createdAt")
if cts:
reaction_dict["createdAt"] = date_parse(cts)
reaction = (
session.query(Reaction)
.filter(Reaction.shout == reaction_dict["shout"])
.filter(Reaction.createdBy == reaction_dict["createdBy"])
.filter(Reaction.kind == reaction_dict["kind"])
.first()
)
if reaction:
reaction_dict["kind"] = (
ReactionKind.AGREE
if content_rating["value"] > 0
else ReactionKind.DISAGREE,
)
reaction.update(reaction_dict)
else:
day = (reaction_dict.get("createdAt") or ts).replace(
hour=0, minute=0, second=0, microsecond=0
)
rea = Reaction.create(**reaction_dict)
await ReactedStorage.increment(rea)
# shout_dict['ratings'].append(reaction_dict)
except:
print("[migration] content_item.ratings error: \n%r" % content_rating)
raise Exception
# shout views
ViewedByDay.create(shout=shout_dict["slug"], value=entry.get("views", 1))
# del shout_dict['ratings']
shout_dict["oid"] = entry.get("_id")
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
storage["shouts"]["by_slug"][slug] = shout_dict
return shout_dict

View File

@ -4,104 +4,144 @@ from orm import User, UserRating
from dateutil.parser import parse
from base.orm import local_session
def migrate(entry):
if 'subscribedTo' in entry: del entry['subscribedTo']
email = entry['emails'][0]['address']
user_dict = {
'oid': entry['_id'],
'roles': [],
'ratings': [],
'username': email,
'email': email,
'password': entry['services']['password'].get('bcrypt', ''),
'createdAt': parse(entry['createdAt']),
'emailConfirmed': bool(entry['emails'][0]['verified']),
'muted': False, # amnesty
'bio': entry['profile'].get('bio', ''),
'notifications': [],
'createdAt': parse(entry['createdAt']),
'roles': [], # entry['roles'] # roles by community
'ratings': [], # entry['ratings']
'links': [],
'name': 'anonymous'
}
if 'updatedAt' in entry: user_dict['updatedAt'] = parse(entry['updatedAt'])
if 'wasOnineAt' in entry: user_dict['wasOnlineAt'] = parse(entry['wasOnlineAt'])
if entry.get('profile'):
# slug
user_dict['slug'] = entry['profile'].get('path').lower().replace(' ', '-').strip()
user_dict['bio'] = html2text(entry.get('profile').get('bio') or '')
if "subscribedTo" in entry:
del entry["subscribedTo"]
email = entry["emails"][0]["address"]
user_dict = {
"oid": entry["_id"],
"roles": [],
"ratings": [],
"username": email,
"email": email,
"password": entry["services"]["password"].get("bcrypt", ""),
"createdAt": parse(entry["createdAt"]),
"emailConfirmed": bool(entry["emails"][0]["verified"]),
"muted": False, # amnesty
"bio": entry["profile"].get("bio", ""),
"notifications": [],
"createdAt": parse(entry["createdAt"]),
"roles": [], # entry['roles'] # roles by community
"ratings": [], # entry['ratings']
"links": [],
"name": "anonymous",
}
if "updatedAt" in entry:
user_dict["updatedAt"] = parse(entry["updatedAt"])
if "wasOnineAt" in entry:
user_dict["wasOnlineAt"] = parse(entry["wasOnlineAt"])
if entry.get("profile"):
# slug
user_dict["slug"] = (
entry["profile"].get("path").lower().replace(" ", "-").strip()
)
user_dict["bio"] = html2text(entry.get("profile").get("bio") or "")
# userpic
try: user_dict['userpic'] = 'https://assets.discours.io/unsafe/100x/' + entry['profile']['thumborId']
except KeyError:
try: user_dict['userpic'] = entry['profile']['image']['url']
except KeyError: user_dict['userpic'] = ''
# userpic
try:
user_dict["userpic"] = (
"https://assets.discours.io/unsafe/100x/"
+ entry["profile"]["thumborId"]
)
except KeyError:
try:
user_dict["userpic"] = entry["profile"]["image"]["url"]
except KeyError:
user_dict["userpic"] = ""
# name
fn = entry['profile'].get('firstName', '')
ln = entry['profile'].get('lastName', '')
name = user_dict['slug'] if user_dict['slug'] else 'anonymous'
name = fn if fn else name
name = (name + ' ' + ln) if ln else name
name = entry['profile']['path'].lower().strip().replace(' ', '-') if len(name) < 2 else name
user_dict['name'] = name
# name
fn = entry["profile"].get("firstName", "")
ln = entry["profile"].get("lastName", "")
name = user_dict["slug"] if user_dict["slug"] else "anonymous"
name = fn if fn else name
name = (name + " " + ln) if ln else name
name = (
entry["profile"]["path"].lower().strip().replace(" ", "-")
if len(name) < 2
else name
)
user_dict["name"] = name
# links
fb = entry['profile'].get('facebook', False)
if fb: user_dict['links'].append(fb)
vk = entry['profile'].get('vkontakte', False)
if vk: user_dict['links'].append(vk)
tr = entry['profile'].get('twitter', False)
if tr: user_dict['links'].append(tr)
ws = entry['profile'].get('website', False)
if ws: user_dict['links'].append(ws)
# links
fb = entry["profile"].get("facebook", False)
if fb:
user_dict["links"].append(fb)
vk = entry["profile"].get("vkontakte", False)
if vk:
user_dict["links"].append(vk)
tr = entry["profile"].get("twitter", False)
if tr:
user_dict["links"].append(tr)
ws = entry["profile"].get("website", False)
if ws:
user_dict["links"].append(ws)
# some checks
if not user_dict['slug'] and len(user_dict['links']) > 0:
user_dict['slug'] = user_dict['links'][0].split('/')[-1]
# some checks
if not user_dict["slug"] and len(user_dict["links"]) > 0:
user_dict["slug"] = user_dict["links"][0].split("/")[-1]
user_dict["slug"] = user_dict.get("slug", user_dict["email"].split("@")[0])
oid = user_dict["oid"]
user_dict["slug"] = user_dict["slug"].lower().strip().replace(" ", "-")
try:
user = User.create(**user_dict.copy())
except sqlalchemy.exc.IntegrityError:
print("[migration] cannot create user " + user_dict["slug"])
with local_session() as session:
old_user = (
session.query(User).filter(User.slug == user_dict["slug"]).first()
)
old_user.oid = oid
user = old_user
if not user:
print("[migration] ERROR: cannot find user " + user_dict["slug"])
raise Exception
user_dict["id"] = user.id
return user_dict
user_dict['slug'] = user_dict.get('slug', user_dict['email'].split('@')[0])
oid = user_dict['oid']
user_dict['slug'] = user_dict['slug'].lower().strip().replace(' ', '-')
try: user = User.create(**user_dict.copy())
except sqlalchemy.exc.IntegrityError:
print('[migration] cannot create user ' + user_dict['slug'])
with local_session() as session:
old_user = session.query(User).filter(User.slug == user_dict['slug']).first()
old_user.oid = oid
user = old_user
if not user:
print('[migration] ERROR: cannot find user ' + user_dict['slug'])
raise Exception
user_dict['id'] = user.id
return user_dict
def migrate_2stage(entry, id_map):
ce = 0
for rating_entry in entry.get('ratings',[]):
rater_oid = rating_entry['createdBy']
rater_slug = id_map.get(rater_oid)
if not rater_slug:
ce +=1
# print(rating_entry)
continue
oid = entry['_id']
author_slug = id_map.get(oid)
user_rating_dict = {
'value': rating_entry['value'],
'rater': rater_slug,
'user': author_slug
}
with local_session() as session:
try:
user_rating = UserRating.create(**user_rating_dict)
except sqlalchemy.exc.IntegrityError:
old_rating = session.query(UserRating).filter(UserRating.rater == rater_slug).first()
print('[migration] cannot create ' + author_slug + '`s rate from ' + rater_slug)
print('[migration] concat rating value %d+%d=%d' % (old_rating.value, rating_entry['value'], old_rating.value + rating_entry['value']))
old_rating.update({ 'value': old_rating.value + rating_entry['value'] })
session.commit()
except Exception as e:
print(e)
return ce
ce = 0
for rating_entry in entry.get("ratings", []):
rater_oid = rating_entry["createdBy"]
rater_slug = id_map.get(rater_oid)
if not rater_slug:
ce += 1
# print(rating_entry)
continue
oid = entry["_id"]
author_slug = id_map.get(oid)
user_rating_dict = {
"value": rating_entry["value"],
"rater": rater_slug,
"user": author_slug,
}
with local_session() as session:
try:
user_rating = UserRating.create(**user_rating_dict)
except sqlalchemy.exc.IntegrityError:
old_rating = (
session.query(UserRating)
.filter(UserRating.rater == rater_slug)
.first()
)
print(
"[migration] cannot create "
+ author_slug
+ "`s rate from "
+ rater_slug
)
print(
"[migration] concat rating value %d+%d=%d"
% (
old_rating.value,
rating_entry["value"],
old_rating.value + rating_entry["value"],
)
)
old_rating.update({"value": old_rating.value + rating_entry["value"]})
session.commit()
except Exception as e:
print(e)
return ce

View File

@ -1,9 +1,10 @@
from datetime import datetime
from json import JSONEncoder
class DateTimeEncoder(JSONEncoder):
def default(self, z):
if isinstance(z, datetime):
return (str(z))
return str(z)
else:
return super().default(z)
return super().default(z)

View File

@ -12,9 +12,19 @@ from services.auth.users import UserStorage
from services.stat.viewed import ViewedStorage
from base.orm import Base, engine, local_session
__all__ = ["User", "Role", "Operation", "Permission", \
"Community", "Shout", "Topic", "TopicFollower", \
"Notification", "Reaction", "UserRating"]
__all__ = [
"User",
"Role",
"Operation",
"Permission",
"Community",
"Shout",
"Topic",
"TopicFollower",
"Notification",
"Reaction",
"UserRating",
]
Base.metadata.create_all(engine)
Operation.init_table()
@ -24,8 +34,8 @@ Community.init_table()
Role.init_table()
with local_session() as session:
ViewedStorage.init(session)
ReactedStorage.init(session)
RoleStorage.init(session)
UserStorage.init(session)
TopicStorage.init(session)
ViewedStorage.init(session)
ReactedStorage.init(session)
RoleStorage.init(session)
UserStorage.init(session)
TopicStorage.init(session)

View File

@ -2,21 +2,22 @@ from datetime import datetime
from sqlalchemy import Boolean, Column, String, ForeignKey, DateTime
from base.orm import Base
class CollabAuthor(Base):
__tablename__ = 'collab_author'
id = None
collab = Column(ForeignKey('collab.id'), primary_key = True)
author = Column(ForeignKey('user.slug'), primary_key = True)
accepted = Column(Boolean, default=False)
class CollabAuthor(Base):
__tablename__ = "collab_author"
id = None # type: ignore
collab = Column(ForeignKey("collab.id"), primary_key=True)
author = Column(ForeignKey("user.slug"), primary_key=True)
accepted = Column(Boolean, default=False)
class Collab(Base):
__tablename__ = 'collab'
authors = Column()
title: str = Column(String, nullable=True, comment="Title")
body: str = Column(String, nullable=True, comment="Body")
pic: str = Column(String, nullable=True, comment="Picture")
createdAt: datetime = Column(DateTime, default=datetime.now, comment="Created At")
createdBy: str = Column(ForeignKey('user.id'), comment="Created By")
__tablename__ = "collab"
authors = Column()
title = Column(String, nullable=True, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
createdAt = Column(DateTime, default=datetime.now, comment="Created At")
createdBy = Column(ForeignKey("user.id"), comment="Created By")

View File

@ -1,22 +1,23 @@
from datetime import datetime
from sqlalchemy import Column, String, ForeignKey, DateTime, JSON as JSONType
from sqlalchemy import Column, String, ForeignKey, DateTime
from base.orm import Base
class ShoutCollection(Base):
__tablename__ = 'shout_collection'
id = None
shout = Column(ForeignKey('shout.slug'), primary_key = True)
collection = Column(ForeignKey('collection.slug'), primary_key = True)
__tablename__ = "shout_collection"
id = None # type: ignore
shout = Column(ForeignKey("shout.slug"), primary_key=True)
collection = Column(ForeignKey("collection.slug"), primary_key=True)
class Collection(Base):
__tablename__ = 'collection'
id = None
slug: str = Column(String, primary_key = True)
title: str = Column(String, nullable=False, comment="Title")
body: str = Column(String, nullable=True, comment="Body")
pic: str = Column(String, nullable=True, comment="Picture")
createdAt: datetime = Column(DateTime, default=datetime.now, comment="Created At")
createdBy: str = Column(ForeignKey('user.id'), comment="Created By")
__tablename__ = "collection"
id = None # type: ignore
slug = Column(String, primary_key=True)
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
createdAt = Column(DateTime, default=datetime.now, comment="Created At")
createdBy = Column(ForeignKey("user.id"), comment="Created By")

View File

@ -2,34 +2,39 @@ from datetime import datetime
from sqlalchemy import Column, String, ForeignKey, DateTime
from base.orm import Base, local_session
class CommunityFollower(Base):
__tablename__ = 'community_followers'
id = None
follower = Column(ForeignKey('user.slug'), primary_key = True)
community = Column(ForeignKey('community.slug'), primary_key = True)
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
class CommunityFollower(Base):
__tablename__ = "community_followers"
id = None # type: ignore
follower = Column(ForeignKey("user.slug"), primary_key=True)
community = Column(ForeignKey("community.slug"), primary_key=True)
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
class Community(Base):
__tablename__ = 'community'
__tablename__ = "community"
name: str = Column(String, nullable=False, comment="Name")
slug: str = Column(String, nullable = False, unique=True, comment="Slug")
desc: str = Column(String, nullable=False, default='')
pic: str = Column(String, nullable=False, default='')
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
createdBy: str = Column(ForeignKey("user.slug"), nullable=False, comment="Author")
name = Column(String, nullable=False, comment="Name")
slug = Column(String, nullable=False, unique=True, comment="Slug")
desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default="")
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
createdBy = Column(ForeignKey("user.slug"), nullable=False, comment="Author")
@staticmethod
def init_table():
with local_session() as session:
default = session.query(Community).filter(Community.slug == "discours").first()
if not default:
default = Community.create(
name = "Дискурс",
slug = "discours",
createdBy = "discours"
)
@staticmethod
def init_table():
with local_session() as session:
default = (
session.query(Community).filter(Community.slug == "discours").first()
)
if not default:
default = Community.create(
name="Дискурс", slug="discours", createdBy="discours"
)
Community.default_community = default
Community.default_community = default

View File

@ -1,11 +1,12 @@
from sqlalchemy import Column, String, JSON as JSONType
from base.orm import Base
class Notification(Base):
__tablename__ = 'notification'
__tablename__ = "notification"
kind: str = Column(String, unique = True, primary_key = True)
template: str = Column(String, nullable = False)
variables: JSONType = Column(JSONType, nullable = True) # [ <var1>, .. ]
kind = Column(String, unique=True, primary_key=True)
template = Column(String, nullable=False)
variables = Column(JSONType, nullable=True) # [ <var1>, .. ]
# looks like frontend code
# looks like frontend code

View File

@ -6,92 +6,113 @@ from orm.community import Community
class ClassType(TypeDecorator):
impl = String
impl = String
@property
def python_type(self):
return NotImplemented
@property
def python_type(self):
return NotImplemented
def process_literal_param(self, value, dialect):
return NotImplemented
def process_literal_param(self, value, dialect):
return NotImplemented
def process_bind_param(self, value, dialect):
return value.__name__ if isinstance(value, type) else str(value)
def process_bind_param(self, value, dialect):
return value.__name__ if isinstance(value, type) else str(value)
def process_result_value(self, value, dialect):
class_ = REGISTRY.get(value)
if class_ is None:
warnings.warn(f"Can't find class <{value}>,find it yourself!", stacklevel=2)
return class_
def process_result_value(self, value, dialect):
class_ = REGISTRY.get(value)
if class_ is None:
warnings.warn(f"Can't find class <{value}>,find it yourself!", stacklevel=2)
return class_
class Role(Base):
__tablename__ = 'role'
__tablename__ = "role"
name: str = Column(String, nullable=False, comment="Role Name")
desc: str = Column(String, nullable=True, comment="Role Description")
community: int = Column(ForeignKey("community.id", ondelete="CASCADE"), nullable=False, comment="Community")
permissions = relationship(lambda: Permission)
name = Column(String, nullable=False, comment="Role Name")
desc = Column(String, nullable=True, comment="Role Description")
community = Column(
ForeignKey("community.id", ondelete="CASCADE"),
nullable=False,
comment="Community",
)
permissions = relationship(lambda: Permission)
@staticmethod
def init_table():
with local_session() as session:
default = session.query(Role).filter(Role.name == "author").first()
if default:
Role.default_role = default
return
@staticmethod
def init_table():
with local_session() as session:
default = session.query(Role).filter(Role.name == "author").first()
if default:
Role.default_role = default
return
default = Role.create(
name = "author",
desc = "Role for author",
community = Community.default_community.id
)
default = Role.create(
name="author",
desc="Role for author",
community=Community.default_community.id,
)
Role.default_role = default
Role.default_role = default
class Operation(Base):
__tablename__ = 'operation'
name: str = Column(String, nullable=False, unique=True, comment="Operation Name")
__tablename__ = "operation"
name = Column(String, nullable=False, unique=True, comment="Operation Name")
@staticmethod
def init_table():
with local_session() as session:
edit_op = session.query(Operation).filter(Operation.name == "edit").first()
if not edit_op:
edit_op = Operation.create(name = "edit")
Operation.edit_id = edit_op.id
@staticmethod
def init_table():
with local_session() as session:
edit_op = session.query(Operation).filter(Operation.name == "edit").first()
if not edit_op:
edit_op = Operation.create(name="edit")
Operation.edit_id = edit_op.id # type: ignore
class Resource(Base):
__tablename__ = "resource"
resource_class: str = Column(String, nullable=False, unique=True, comment="Resource class")
name: str = Column(String, nullable=False, unique=True, comment="Resource name")
__tablename__ = "resource"
resource_class = Column(
String, nullable=False, unique=True, comment="Resource class"
)
name = Column(String, nullable=False, unique=True, comment="Resource name")
@staticmethod
def init_table():
with local_session() as session:
shout_res = session.query(Resource).filter(Resource.name == "shout").first()
if not shout_res:
shout_res = Resource.create(name = "shout", resource_class = "shout")
Resource.shout_id = shout_res.id
@staticmethod
def init_table():
with local_session() as session:
shout_res = session.query(Resource).filter(Resource.name == "shout").first()
if not shout_res:
shout_res = Resource.create(name="shout", resource_class="shout")
Resource.shout_id = shout_res.id # type: ignore
class Permission(Base):
__tablename__ = "permission"
__table_args__ = (UniqueConstraint("role_id", "operation_id", "resource_id"), {"extend_existing": True})
__tablename__ = "permission"
__table_args__ = (
UniqueConstraint("role_id", "operation_id", "resource_id"),
{"extend_existing": True},
)
role_id: int = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role")
operation_id: int = Column(ForeignKey("operation.id", ondelete="CASCADE"), nullable=False, comment="Operation")
resource_id: int = Column(ForeignKey("resource.id", ondelete="CASCADE"), nullable=False, comment="Resource")
role_id = Column(
ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role"
)
operation_id = Column(
ForeignKey("operation.id", ondelete="CASCADE"),
nullable=False,
comment="Operation",
)
resource_id = Column(
ForeignKey("resource.id", ondelete="CASCADE"),
nullable=False,
comment="Resource",
)
if __name__ == '__main__':
Base.metadata.create_all(engine)
ops = [
Permission(role_id=1, operation_id=1, resource_id=1),
Permission(role_id=1, operation_id=2, resource_id=1),
Permission(role_id=1, operation_id=3, resource_id=1),
Permission(role_id=1, operation_id=4, resource_id=1),
Permission(role_id=2, operation_id=4, resource_id=1)
]
global_session.add_all(ops)
global_session.commit()
if __name__ == "__main__":
Base.metadata.create_all(engine)
ops = [
Permission(role_id=1, operation_id=1, resource_id=1),
Permission(role_id=1, operation_id=2, resource_id=1),
Permission(role_id=1, operation_id=3, resource_id=1),
Permission(role_id=1, operation_id=4, resource_id=1),
Permission(role_id=2, operation_id=4, resource_id=1),
]
global_session.add_all(ops)
global_session.commit()

View File

@ -5,27 +5,34 @@ from sqlalchemy import Enum
from services.stat.reacted import ReactedStorage, ReactionKind
from services.stat.viewed import ViewedStorage
class Reaction(Base):
__tablename__ = 'reaction'
body: str = Column(String, nullable=True, comment="Reaction Body")
createdAt = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
createdBy: str = Column(ForeignKey("user.slug"), nullable=False, comment="Sender")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
updatedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Last Editor")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
deletedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Deleted by")
shout = Column(ForeignKey("shout.slug"), nullable=False)
replyTo: int = Column(ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID")
range: str = Column(String, nullable=True, comment="Range in format <start index>:<end>")
kind: int = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
oid: str = Column(String, nullable=True, comment="Old ID")
@property
async def stat(self):
return {
"viewed": await ViewedStorage.get_reaction(self.id),
"reacted": len(await ReactedStorage.get_reaction(self.id)),
# TODO: "replied"
"rating": await ReactedStorage.get_reaction_rating(self.id),
"commented": len(await ReactedStorage.get_reaction_comments(self.id))
}
class Reaction(Base):
__tablename__ = "reaction"
body = Column(String, nullable=True, comment="Reaction Body")
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
createdBy = Column(ForeignKey("user.slug"), nullable=False, comment="Sender")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
updatedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Last Editor")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
deletedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Deleted by")
shout = Column(ForeignKey("shout.slug"), nullable=False)
replyTo = Column(
ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
)
range = Column(
String, nullable=True, comment="Range in format <start index>:<end>"
)
kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
oid = Column(String, nullable=True, comment="Old ID")
@property
async def stat(self):
return {
"viewed": await ViewedStorage.get_reaction(self.id),
"reacted": len(await ReactedStorage.get_reaction(self.id)),
# TODO: "replied"
"rating": await ReactedStorage.get_reaction_rating(self.id),
"commented": len(await ReactedStorage.get_reaction_comments(self.id)),
}

View File

@ -4,62 +4,64 @@ from sqlalchemy.orm import relationship
from orm.user import User
from orm.topic import Topic, ShoutTopic
from orm.reaction import Reaction
from services.stat.reacted import ReactedStorage, ReactionKind
from services.stat.reacted import ReactedStorage
from services.stat.viewed import ViewedStorage
from base.orm import Base
class ShoutReactionsFollower(Base):
__tablename__ = "shout_reactions_followers"
id = None
follower = Column(ForeignKey('user.slug'), primary_key = True)
shout = Column(ForeignKey('shout.slug'), primary_key = True)
auto = Column(Boolean, nullable=False, default = False)
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
deletedAt: str = Column(DateTime, nullable=True)
id = None # type: ignore
follower = Column(ForeignKey("user.slug"), primary_key=True)
shout = Column(ForeignKey("shout.slug"), primary_key=True)
auto = Column(Boolean, nullable=False, default=False)
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
deletedAt = Column(DateTime, nullable=True)
class ShoutAuthor(Base):
__tablename__ = "shout_author"
id = None
shout = Column(ForeignKey('shout.slug'), primary_key = True)
user = Column(ForeignKey('user.slug'), primary_key = True)
caption: str = Column(String, nullable = True, default = "")
id = None # type: ignore
shout = Column(ForeignKey("shout.slug"), primary_key=True)
user = Column(ForeignKey("user.slug"), primary_key=True)
caption = Column(String, nullable=True, default="")
class ShoutAllowed(Base):
__tablename__ = "shout_allowed"
id = None
shout = Column(ForeignKey('shout.slug'), primary_key = True)
user = Column(ForeignKey('user.id'), primary_key = True)
id = None # type: ignore
shout = Column(ForeignKey("shout.slug"), primary_key=True)
user = Column(ForeignKey("user.id"), primary_key=True)
class Shout(Base):
__tablename__ = 'shout'
__tablename__ = "shout"
id = None
slug: str = Column(String, primary_key=True)
community: str = Column(Integer, ForeignKey("community.id"), nullable=False, comment="Community")
body: str = Column(String, nullable=False, comment="Body")
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
updatedAt: str = Column(DateTime, nullable=True, comment="Updated at")
replyTo: int = Column(ForeignKey("shout.slug"), nullable=True)
versionOf: int = Column(ForeignKey("shout.slug"), nullable=True)
tags: str = Column(String, nullable=True)
publishedBy: int = Column(ForeignKey("user.id"), nullable=True)
publishedAt: str = Column(DateTime, nullable=True)
cover: str = Column(String, nullable = True)
title: str = Column(String, nullable = True)
subtitle: str = Column(String, nullable = True)
layout: str = Column(String, nullable = True)
id = None # type: ignore
slug = Column(String, primary_key=True)
community = Column(Integer, ForeignKey("community.id"), nullable=False, comment="Community")
body = Column(String, nullable=False, comment="Body")
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
replyTo = Column(ForeignKey("shout.slug"), nullable=True)
versionOf = Column(ForeignKey("shout.slug"), nullable=True)
tags = Column(String, nullable=True)
publishedBy = Column(ForeignKey("user.id"), nullable=True)
publishedAt = Column(DateTime, nullable=True)
cover = Column(String, nullable=True)
title = Column(String, nullable=True)
subtitle = Column(String, nullable=True)
layout = Column(String, nullable=True)
reactions = relationship(lambda: Reaction)
authors = relationship(lambda: User, secondary=ShoutAuthor.__tablename__)
topics = relationship(lambda: Topic, secondary=ShoutTopic.__tablename__)
mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
visibleFor = relationship(lambda: User, secondary=ShoutAllowed.__tablename__)
draft: bool = Column(Boolean, default=True)
oid: str = Column(String, nullable=True)
draft = Column(Boolean, default=True)
oid = Column(String, nullable=True)
@property
async def stat(self):
@ -67,5 +69,5 @@ class Shout(Base):
"viewed": await ViewedStorage.get_shout(self.slug),
"reacted": len(await ReactedStorage.get_shout(self.slug)),
"commented": len(await ReactedStorage.get_comments(self.slug)),
"rating": await ReactedStorage.get_rating(self.slug)
}
"rating": await ReactedStorage.get_rating(self.slug),
}

View File

@ -2,30 +2,32 @@ from datetime import datetime
from sqlalchemy import Column, String, ForeignKey, DateTime, JSON as JSONType
from base.orm import Base
class ShoutTopic(Base):
__tablename__ = 'shout_topic'
id = None
shout = Column(ForeignKey('shout.slug'), primary_key = True)
topic = Column(ForeignKey('topic.slug'), primary_key = True)
__tablename__ = "shout_topic"
id = None # type: ignore
shout = Column(ForeignKey("shout.slug"), primary_key=True)
topic = Column(ForeignKey("topic.slug"), primary_key=True)
class TopicFollower(Base):
__tablename__ = "topic_followers"
id = None
follower = Column(ForeignKey('user.slug'), primary_key = True)
topic = Column(ForeignKey('topic.slug'), primary_key = True)
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
__tablename__ = "topic_followers"
id = None # type: ignore
follower = Column(ForeignKey("user.slug"), primary_key=True)
topic = Column(ForeignKey("topic.slug"), primary_key=True)
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
class Topic(Base):
__tablename__ = 'topic'
id = None
slug: str = Column(String, primary_key = True)
title: str = Column(String, nullable=False, comment="Title")
body: str = Column(String, nullable=True, comment="Body")
pic: str = Column(String, nullable=True, comment="Picture")
children = Column(JSONType, nullable=True, default = [], comment="list of children topics")
community = Column(ForeignKey("community.slug"), nullable=False, comment="Community")
oid: str = Column(String, nullable=True, comment="Old ID")
__tablename__ = "topic"
id = None # type: ignore
slug = Column(String, primary_key=True)
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
children = Column(JSONType, nullable=True, default=[], comment="list of children topics")
community = Column(ForeignKey("community.slug"), nullable=False, comment="Community")
oid = Column(String, nullable=True, comment="Old ID")

View File

@ -1,89 +1,101 @@
from datetime import datetime
from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, DateTime, JSON as JSONType
from sqlalchemy import (
Column,
Integer,
String,
ForeignKey,
Boolean,
DateTime,
JSON as JSONType,
)
from sqlalchemy.orm import relationship
from base.orm import Base, local_session
from orm.rbac import Role
from services.auth.roles import RoleStorage
class UserNotifications(Base):
__tablename__ = 'user_notifications'
id: int = Column(Integer, primary_key = True)
user_id: int = Column(Integer, ForeignKey("user.id"))
kind: str = Column(String, ForeignKey("notification.kind"))
values: JSONType = Column(JSONType, nullable = True) # [ <var1>, .. ]
class UserNotifications(Base):
__tablename__ = "user_notifications"
# id auto
user_id = Column(Integer, ForeignKey("user.id"))
kind = Column(String, ForeignKey("notification.kind"))
values = Column(JSONType, nullable=True) # [ <var1>, .. ]
class UserRating(Base):
__tablename__ = "user_rating"
__tablename__ = "user_rating"
id = None # type: ignore
rater = Column(ForeignKey("user.slug"), primary_key=True)
user = Column(ForeignKey("user.slug"), primary_key=True)
value = Column(Integer)
id = None
rater = Column(ForeignKey('user.slug'), primary_key = True)
user = Column(ForeignKey('user.slug'), primary_key = True)
value = Column(Integer)
class UserRole(Base):
__tablename__ = "user_role"
__tablename__ = "user_role"
id = None # type: ignore
user_id = Column(ForeignKey("user.id"), primary_key=True)
role_id = Column(ForeignKey("role.id"), primary_key=True)
id = None
user_id = Column(ForeignKey('user.id'), primary_key = True)
role_id = Column(ForeignKey('role.id'), primary_key = True)
class AuthorFollower(Base):
__tablename__ = "author_follower"
id = None
follower = Column(ForeignKey('user.slug'), primary_key = True)
author = Column(ForeignKey('user.slug'), primary_key = True)
createdAt = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
__tablename__ = "author_follower"
id = None # type: ignore
follower = Column(ForeignKey("user.slug"), primary_key=True)
author = Column(ForeignKey("user.slug"), primary_key=True)
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
class User(Base):
__tablename__ = "user"
__tablename__ = "user"
email: str = Column(String, unique=True, nullable=False, comment="Email")
username: str = Column(String, nullable=False, comment="Login")
password: str = Column(String, nullable=True, comment="Password")
bio: str = Column(String, nullable=True, comment="Bio")
userpic: str = Column(String, nullable=True, comment="Userpic")
name: str = Column(String, nullable=True, comment="Display name")
slug: str = Column(String, unique=True, comment="User's slug")
muted: bool = Column(Boolean, default=False)
emailConfirmed: bool = Column(Boolean, default=False)
createdAt: DateTime = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
wasOnlineAt: DateTime = Column(DateTime, nullable=False, default = datetime.now, comment="Was online at")
deletedAt: DateTime = Column(DateTime, nullable=True, comment="Deleted at")
links: JSONType = Column(JSONType, nullable=True, comment="Links")
oauth: str = Column(String, nullable=True)
notifications = relationship(lambda: UserNotifications)
ratings = relationship(UserRating, foreign_keys=UserRating.user)
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
oid: str = Column(String, nullable = True)
@staticmethod
def init_table():
with local_session() as session:
default = session.query(User).filter(User.slug == "discours").first()
if not default:
default = User.create(
id = 0,
email = "welcome@discours.io",
username = "welcome@discours.io",
name = "Дискурс",
slug = "discours",
userpic = 'https://discours.io/images/logo-mini.svg',
)
email = Column(String, unique=True, nullable=False, comment="Email")
username = Column(String, nullable=False, comment="Login")
password = Column(String, nullable=True, comment="Password")
bio = Column(String, nullable=True, comment="Bio")
userpic = Column(String, nullable=True, comment="Userpic")
name = Column(String, nullable=True, comment="Display name")
slug = Column(String, unique=True, comment="User's slug")
muted = Column(Boolean, default=False)
emailConfirmed = Column(Boolean, default=False)
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
wasOnlineAt = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
links = Column(JSONType, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
notifications = relationship(lambda: UserNotifications)
ratings = relationship(UserRating, foreign_keys=UserRating.user)
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
oid = Column(String, nullable=True)
User.default_user = default
@staticmethod
def init_table():
with local_session() as session:
default = session.query(User).filter(User.slug == "discours").first()
if not default:
default = User.create(
id=0,
email="welcome@discours.io",
username="welcome@discours.io",
name="Дискурс",
slug="discours",
userpic="https://discours.io/images/logo-mini.svg",
)
async def get_permission(self):
scope = {}
for user_role in self.roles:
role = await RoleStorage.get_role(user_role.id)
for p in role.permissions:
if not p.resource_id in scope:
scope[p.resource_id] = set()
scope[p.resource_id].add(p.operation_id)
return scope
User.default_user = default
async def get_permission(self):
scope = {}
for user_role in self.roles:
role: Role = await RoleStorage.get_role(user_role.id) # type: ignore
for p in role.permissions:
if p.resource_id not in scope:
scope[p.resource_id] = set()
scope[p.resource_id].add(p.operation_id)
return scope
if __name__ == "__main__":
print(User.get_permission(user_id=1))
print(User.get_permission(user_id=1)) # type: ignore

View File

@ -16,4 +16,5 @@ transliterate
requests
bcrypt
websockets
bson
bson
flake8

View File

@ -1,76 +1,109 @@
from resolvers.auth import login, sign_out, is_email_used, register, confirm, auth_forget, auth_reset
from resolvers.zine import get_shout_by_slug, follow, unfollow, view_shout, \
top_month, top_overall, recent_published, recent_all, top_viewed, \
shouts_by_authors, shouts_by_topics, shouts_by_communities
from resolvers.profile import get_users_by_slugs, get_current_user, get_user_reacted_shouts, get_user_roles
from resolvers.topics import topic_follow, topic_unfollow, topics_by_author, topics_by_community, topics_all
from resolvers.auth import (
login,
sign_out,
is_email_used,
register,
confirm,
auth_forget,
auth_reset,
)
from resolvers.zine import (
get_shout_by_slug,
follow,
unfollow,
view_shout,
top_month,
top_overall,
recent_published,
recent_all,
top_viewed,
shouts_by_authors,
shouts_by_topics,
shouts_by_communities,
)
from resolvers.profile import (
get_users_by_slugs,
get_current_user,
get_user_reacted_shouts,
get_user_roles,
)
from resolvers.topics import (
topic_follow,
topic_unfollow,
topics_by_author,
topics_by_community,
topics_all,
)
# from resolvers.feed import shouts_for_feed, my_candidates
from resolvers.reactions import create_reaction, delete_reaction, update_reaction, get_all_reactions
from resolvers.reactions import (
create_reaction,
delete_reaction,
update_reaction,
get_all_reactions,
)
from resolvers.collab import invite_author, remove_author
from resolvers.editor import create_shout, delete_shout, update_shout
from resolvers.community import create_community, delete_community, get_community, get_communities
from resolvers.community import (
create_community,
delete_community,
get_community,
get_communities,
)
__all__ = [
"follow",
"unfollow",
# auth
"login",
"register",
"is_email_used",
"confirm",
"auth_forget",
"auth_reset"
"sign_out",
# profile
"get_current_user",
"get_users_by_slugs",
# zine
"shouts_for_feed",
"my_candidates",
"recent_published",
"recent_reacted",
"recent_all",
"shouts_by_topics",
"shouts_by_authors",
"shouts_by_communities",
"get_user_reacted_shouts",
"top_month",
"top_overall",
"top_viewed",
"view_shout",
"view_reaction",
"get_shout_by_slug",
# editor
"create_shout",
"update_shout",
"delete_shout",
# collab
"invite_author",
"remove_author"
# topics
"topics_all",
"topics_by_community",
"topics_by_author",
"topic_follow",
"topic_unfollow",
# communities
"get_community",
"get_communities",
"create_community",
"delete_community",
# reactions
"get_shout_reactions",
"reactions_follow",
"reactions_unfollow",
"create_reaction",
"update_reaction",
"delete_reaction",
"get_all_reactions",
]
# auth
"login",
"register",
"is_email_used",
"confirm",
"auth_forget",
"auth_reset" "sign_out",
# profile
"get_current_user",
"get_users_by_slugs",
# zine
"shouts_for_feed",
"my_candidates",
"recent_published",
"recent_reacted",
"recent_all",
"shouts_by_topics",
"shouts_by_authors",
"shouts_by_communities",
"get_user_reacted_shouts",
"top_month",
"top_overall",
"top_viewed",
"view_shout",
"view_reaction",
"get_shout_by_slug",
# editor
"create_shout",
"update_shout",
"delete_shout",
# collab
"invite_author",
"remove_author"
# topics
"topics_all",
"topics_by_community",
"topics_by_author",
"topic_follow",
"topic_unfollow",
# communities
"get_community",
"get_communities",
"create_community",
"delete_community",
# reactions
"get_shout_reactions",
"reactions_follow",
"reactions_unfollow",
"create_reaction",
"update_reaction",
"delete_reaction",
"get_all_reactions",
]

View File

@ -13,119 +13,126 @@ from resolvers.profile import get_user_info
from base.exceptions import InvalidPassword, InvalidToken
from settings import JWT_AUTH_HEADER
@mutation.field("confirmEmail")
async def confirm(*_, confirm_token):
''' confirm owning email address '''
auth_token, user = await Authorize.confirm(confirm_token)
if auth_token:
user.emailConfirmed = True
user.save()
return { "token": auth_token, "user" : user}
else:
return { "error": "email not confirmed"}
"""confirm owning email address"""
auth_token, user = await Authorize.confirm(confirm_token)
if auth_token:
user.emailConfirmed = True
user.save()
return {"token": auth_token, "user": user}
else:
return {"error": "email not confirmed"}
@mutation.field("registerUser")
async def register(*_, email: str, password: str = ""):
''' creates new user account '''
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if user:
return {"error" : "user already exist"}
"""creates new user account"""
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if user:
return {"error": "user already exist"}
user_dict = { "email": email }
username = email.split('@')[0]
user_dict["username"] = username
user_dict["slug"] = quote_plus(translit(username, 'ru', reversed=True).replace('.', '-').lower())
if password:
user_dict["password"] = Password.encode(password)
user = User(**user_dict)
user.roles.append(Role.default_role)
with local_session() as session:
session.add(user)
session.commit()
user_dict = {"email": email}
username = email.split("@")[0]
user_dict["username"] = username
user_dict["slug"] = quote_plus(
translit(username, "ru", reversed=True).replace(".", "-").lower()
)
if password:
user_dict["password"] = Password.encode(password)
user = User(**user_dict)
user.roles.append(Role.default_role)
with local_session() as session:
session.add(user)
session.commit()
await send_confirm_email(user)
await send_confirm_email(user)
return {"user": user}
return { "user": user }
@mutation.field("requestPasswordUpdate")
async def auth_forget(_, info, email):
''' send email to recover account '''
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if not user:
return {"error" : "user not exist"}
"""send email to recover account"""
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if not user:
return {"error": "user not exist"}
await send_reset_password_email(user)
await send_reset_password_email(user)
return {}
return {}
@mutation.field("updatePassword")
async def auth_reset(_, info, password, resetToken):
''' set the new password '''
try:
user_id = await ResetPassword.verify(resetToken)
except InvalidToken as e:
return {"error" : e.message}
"""set the new password"""
try:
user_id = await ResetPassword.verify(resetToken)
except InvalidToken as e:
return {"error": e.message}
with local_session() as session:
user = session.query(User).filter_by(id = user_id).first()
if not user:
return {"error" : "user not exist"}
user.password = Password.encode(password)
session.commit()
with local_session() as session:
user = session.query(User).filter_by(id=user_id).first()
if not user:
return {"error": "user not exist"}
user.password = Password.encode(password)
session.commit()
return {}
return {}
@query.field("signIn")
async def login(_, info: GraphQLResolveInfo, email: str, password: str = ""):
with local_session() as session:
orm_user = session.query(User).filter(User.email == email).first()
if orm_user is None:
print(f"signIn {email}: email not found")
return {"error" : "email not found"}
with local_session() as session:
orm_user = session.query(User).filter(User.email == email).first()
if orm_user is None:
print(f"signIn {email}: email not found")
return {"error": "email not found"}
if not password:
print(f"signIn {email}: send auth email")
await send_auth_email(orm_user)
return {}
if not password:
print(f"signIn {email}: send auth email")
await send_auth_email(orm_user)
return {}
if not orm_user.emailConfirmed:
return {"error" : "email not confirmed"}
if not orm_user.emailConfirmed:
return {"error": "email not confirmed"}
try:
device = info.context["request"].headers['device']
except KeyError:
device = "pc"
auto_delete = False if device == "mobile" else True # why autodelete with mobile?
try:
device = info.context["request"].headers["device"]
except KeyError:
device = "pc"
auto_delete = False if device == "mobile" else True # why autodelete with mobile?
try:
user = Identity.identity(orm_user, password)
except InvalidPassword:
print(f"signIn {email}: invalid password")
return {"error" : "invalid password"}
token = await Authorize.authorize(user, device=device, auto_delete=auto_delete)
print(f"signIn {email}: OK")
try:
user = Identity.identity(orm_user, password)
except InvalidPassword:
print(f"signIn {email}: invalid password")
return {"error": "invalid password"}
return {
"token" : token,
"user": orm_user,
"info": await get_user_info(orm_user.slug)
}
token = await Authorize.authorize(user, device=device, auto_delete=auto_delete)
print(f"signIn {email}: OK")
return {
"token": token,
"user": orm_user,
"info": await get_user_info(orm_user.slug),
}
@query.field("signOut")
@login_required
async def sign_out(_, info: GraphQLResolveInfo):
token = info.context["request"].headers[JWT_AUTH_HEADER]
status = await Authorize.revoke(token)
return True
token = info.context["request"].headers[JWT_AUTH_HEADER]
status = await Authorize.revoke(token)
return True
@query.field("isEmailUsed")
async def is_email_used(_, info, email):
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
return not user is None
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
return not user is None

View File

@ -6,67 +6,69 @@ from orm.user import User
from base.resolvers import query, mutation
from auth.authenticate import login_required
@query.field("getCollabs")
@login_required
async def get_collabs(_, info):
auth = info.context["request"].auth
user_id = auth.user_id
collabs = []
with local_session() as session:
user = session.query(User).where(User.id == user_id).first()
collabs = session.query(Collab).filter(user.slug in Collab.authors)
return collabs
auth = info.context["request"].auth
user_id = auth.user_id
collabs = []
with local_session() as session:
user = session.query(User).where(User.id == user_id).first()
collabs = session.query(Collab).filter(user.slug in Collab.authors)
return collabs
@mutation.field("inviteAuthor")
@login_required
async def invite_author(_, info, author, shout):
auth = info.context["request"].auth
user_id = auth.user_id
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
shout = session.query(Shout).filter(Shout.slug == shout).first()
if not shout:
return {"error": "invalid shout slug"}
authors = [a.id for a in shout.authors]
if user_id not in authors:
return {"error": "access denied"}
author = session.query(User).filter(User.slug == author).first()
if author.id in authors:
return {"error": "already added"}
shout.authors.append(author)
shout.updated_at = datetime.now()
shout.save()
session.commit()
with local_session() as session:
shout = session.query(Shout).filter(Shout.slug == shout).first()
if not shout:
return {"error": "invalid shout slug"}
authors = [a.id for a in shout.authors]
if user_id not in authors:
return {"error": "access denied"}
author = session.query(User).filter(User.slug == author).first()
if author.id in authors:
return {"error": "already added"}
shout.authors.append(author)
shout.updated_at = datetime.now()
shout.save()
session.commit()
# TODO: email notify
# TODO: email notify
return {}
return {}
@mutation.field("removeAuthor")
@login_required
async def remove_author(_, info, author, shout):
auth = info.context["request"].auth
user_id = auth.user_id
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
shout = session.query(Shout).filter(Shout.slug == shout).first()
if not shout:
return {"error": "invalid shout slug"}
authors = [author.id for author in shout.authors]
if user_id not in authors:
return {"error": "access denied"}
author = session.query(User).filter(User.slug == author).first()
if author.id not in authors:
return {"error": "not in authors"}
shout.authors.remove(author)
shout.updated_at = datetime.now()
shout.save()
session.commit()
with local_session() as session:
shout = session.query(Shout).filter(Shout.slug == shout).first()
if not shout:
return {"error": "invalid shout slug"}
authors = [author.id for author in shout.authors]
if user_id not in authors:
return {"error": "access denied"}
author = session.query(User).filter(User.slug == author).first()
if author.id not in authors:
return {"error": "not in authors"}
shout.authors.remove(author)
shout.updated_at = datetime.now()
shout.save()
session.commit()
# result = Result("INVITED")
# FIXME: await ShoutStorage.put(result)
# result = Result("INVITED")
# FIXME: await ShoutStorage.put(result)
# TODO: email notify
# TODO: email notify
return {}
return {}

View File

@ -7,93 +7,121 @@ from datetime import datetime
from typing import Collection
from sqlalchemy import and_
@mutation.field("createCollection")
@login_required
async def create_collection(_, info, input):
auth = info.context["request"].auth
user_id = auth.user_id
collection = Collection.create(
slug = input.get('slug', ''),
title = input.get('title', ''),
desc = input.get('desc', ''),
pic = input.get('pic', '')
)
auth = info.context["request"].auth
user_id = auth.user_id
collection = Collection.create(
slug=input.get("slug", ""),
title=input.get("title", ""),
desc=input.get("desc", ""),
pic=input.get("pic", ""),
)
return {"collection": collection}
return {"collection": collection}
@mutation.field("updateCollection")
@login_required
async def update_collection(_, info, input):
auth = info.context["request"].auth
user_id = auth.user_id
collection_slug = input.get('slug', '')
with local_session() as session:
owner = session.query(User).filter(User.id == user_id) # note list here
collection = session.query(Collection).filter(Collection.slug == collection_slug).first()
editors = [e.slug for e in collection.editors]
if not collection:
return {"error": "invalid collection id"}
if collection.createdBy not in (owner + editors):
return {"error": "access denied"}
collection.title = input.get('title', '')
collection.desc = input.get('desc', '')
collection.pic = input.get('pic', '')
collection.updatedAt = datetime.now()
session.commit()
auth = info.context["request"].auth
user_id = auth.user_id
collection_slug = input.get("slug", "")
with local_session() as session:
owner = session.query(User).filter(User.id == user_id) # note list here
collection = (
session.query(Collection).filter(Collection.slug == collection_slug).first()
)
editors = [e.slug for e in collection.editors]
if not collection:
return {"error": "invalid collection id"}
if collection.createdBy not in (owner + editors):
return {"error": "access denied"}
collection.title = input.get("title", "")
collection.desc = input.get("desc", "")
collection.pic = input.get("pic", "")
collection.updatedAt = datetime.now()
session.commit()
@mutation.field("deleteCollection")
@login_required
async def delete_collection(_, info, slug):
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
collection = session.query(Collection).filter(Collection.slug == slug).first()
if not collection:
return {"error": "invalid collection slug"}
if collection.owner != user_id:
return {"error": "access denied"}
collection.deletedAt = datetime.now()
session.commit()
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
collection = session.query(Collection).filter(Collection.slug == slug).first()
if not collection:
return {"error": "invalid collection slug"}
if collection.owner != user_id:
return {"error": "access denied"}
collection.deletedAt = datetime.now()
session.commit()
return {}
return {}
@query.field("getUserCollections")
async def get_user_collections(_, info, userslug):
collections = []
with local_session() as session:
user = session.query(User).filter(User.slug == userslug).first()
if user:
# TODO: check rights here
collections = session.\
query(Collection).\
where(and_(Collection.createdBy == userslug, Collection.publishedAt != None)).\
all()
for c in collections:
shouts = session.query(ShoutCollection).filter(ShoutCollection.collection == c.id).all()
c.amount = len(shouts)
return collections
collections = []
with local_session() as session:
user = session.query(User).filter(User.slug == userslug).first()
if user:
# TODO: check rights here
collections = (
session.query(Collection)
.where(
and_(
Collection.createdBy == userslug, Collection.publishedAt != None
)
)
.all()
)
for c in collections:
shouts = (
session.query(ShoutCollection)
.filter(ShoutCollection.collection == c.id)
.all()
)
c.amount = len(shouts)
return collections
@query.field("getMyCollections")
async def get_user_collections(_, info, userslug):
collections = []
with local_session() as session:
user = session.query(User).filter(User.slug == userslug).first()
if user:
# TODO: check rights here
collections = session.\
query(Collection).\
where(and_(Collection.createdBy == userslug, Collection.publishedAt != None)).\
all()
for c in collections:
shouts = session.query(ShoutCollection).filter(ShoutCollection.collection == c.id).all()
c.amount = len(shouts)
return collections
collections = []
with local_session() as session:
user = session.query(User).filter(User.slug == userslug).first()
if user:
# TODO: check rights here
collections = (
session.query(Collection)
.where(
and_(
Collection.createdBy == userslug, Collection.publishedAt != None
)
)
.all()
)
for c in collections:
shouts = (
session.query(ShoutCollection)
.filter(ShoutCollection.collection == c.id)
.all()
)
c.amount = len(shouts)
return collections
@query.field("getMyColelctions")
@login_required
async def get_my_collections(_, info):
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
collections = session.query(Collection).when(Collection.createdBy == user_id).all()
return collections
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
collections = (
session.query(Collection).when(Collection.createdBy == user_id).all()
)
return collections

View File

@ -7,96 +7,112 @@ from datetime import datetime
from typing import List
from sqlalchemy import and_
@mutation.field("createCommunity")
@login_required
async def create_community(_, info, input):
auth = info.context["request"].auth
user_id = auth.user_id
auth = info.context["request"].auth
user_id = auth.user_id
community = Community.create(
slug = input.get('slug', ''),
title = input.get('title', ''),
desc = input.get('desc', ''),
pic = input.get('pic', '')
)
community = Community.create(
slug=input.get("slug", ""),
title=input.get("title", ""),
desc=input.get("desc", ""),
pic=input.get("pic", ""),
)
return {"community": community}
return {"community": community}
@mutation.field("updateCommunity")
@login_required
async def update_community(_, info, input):
auth = info.context["request"].auth
user_id = auth.user_id
community_slug = input.get('slug', '')
auth = info.context["request"].auth
user_id = auth.user_id
community_slug = input.get("slug", "")
with local_session() as session:
owner = session.query(User).filter(User.id == user_id) # note list here
community = (
session.query(Community).filter(Community.slug == community_slug).first()
)
editors = [e.slug for e in community.editors]
if not community:
return {"error": "invalid community id"}
if community.createdBy not in (owner + editors):
return {"error": "access denied"}
community.title = input.get("title", "")
community.desc = input.get("desc", "")
community.pic = input.get("pic", "")
community.updatedAt = datetime.now()
session.commit()
with local_session() as session:
owner = session.query(User).filter(User.id == user_id) # note list here
community = session.query(Community).filter(Community.slug == community_slug).first()
editors = [e.slug for e in community.editors]
if not community:
return {"error": "invalid community id"}
if community.createdBy not in (owner + editors):
return {"error": "access denied"}
community.title = input.get('title', '')
community.desc = input.get('desc', '')
community.pic = input.get('pic', '')
community.updatedAt = datetime.now()
session.commit()
@mutation.field("deleteCommunity")
@login_required
async def delete_community(_, info, slug):
auth = info.context["request"].auth
user_id = auth.user_id
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
community = session.query(Community).filter(Community.slug == slug).first()
if not community:
return {"error": "invalid community slug"}
if community.owner != user_id:
return {"error": "access denied"}
community.deletedAt = datetime.now()
session.commit()
with local_session() as session:
community = session.query(Community).filter(Community.slug == slug).first()
if not community:
return {"error": "invalid community slug"}
if community.owner != user_id:
return {"error": "access denied"}
community.deletedAt = datetime.now()
session.commit()
return {}
return {}
@query.field("getCommunity")
async def get_community(_, info, slug):
with local_session() as session:
community = session.query(Community).filter(Community.slug == slug).first()
if not community:
return {"error": "invalid community id"}
with local_session() as session:
community = session.query(Community).filter(Community.slug == slug).first()
if not community:
return {"error": "invalid community id"}
return community
return community
@query.field("getCommunities")
async def get_communities(_, info):
with local_session() as session:
communities = session.query(Community)
return communities
with local_session() as session:
communities = session.query(Community)
return communities
def community_follow(user, slug):
CommunityFollower.create(
follower = user.slug,
community = slug
)
CommunityFollower.create(follower=user.slug, community=slug)
def community_unfollow(user, slug):
with local_session() as session:
following = session.query(CommunityFollower).\
filter(and_(CommunityFollower.follower == user.slug, CommunityFollower.community == slug)).\
first()
if not following:
raise Exception("[orm.community] following was not exist")
session.delete(following)
session.commit()
with local_session() as session:
following = (
session.query(CommunityFollower)
.filter(
and_(
CommunityFollower.follower == user.slug,
CommunityFollower.community == slug,
)
)
.first()
)
if not following:
raise Exception("[orm.community] following was not exist")
session.delete(following)
session.commit()
@query.field("userFollowedCommunities")
def get_followed_communities(_, user_slug) -> List[Community]:
ccc = []
with local_session() as session:
ccc = session.query(Community.slug).\
join(CommunityFollower).\
where(CommunityFollower.follower == user_slug).\
all()
return ccc
ccc = []
with local_session() as session:
ccc = (
session.query(Community.slug)
.join(CommunityFollower)
.where(CommunityFollower.follower == user_slug)
.all()
)
return ccc

View File

@ -13,106 +13,82 @@ from services.zine.gittask import GitTask
@mutation.field("createShout")
@login_required
async def create_shout(_, info, input):
user = info.context["request"].user
user = info.context["request"].user
topic_slugs = input.get("topic_slugs", [])
if topic_slugs:
del input["topic_slugs"]
topic_slugs = input.get("topic_slugs", [])
if topic_slugs:
del input["topic_slugs"]
new_shout = Shout.create(**input)
ShoutAuthor.create(
shout = new_shout.slug,
user = user.slug
)
new_shout = Shout.create(**input)
ShoutAuthor.create(shout=new_shout.slug, user=user.slug)
reactions_follow(user, new_shout.slug, True)
reactions_follow(user, new_shout.slug, True)
if "mainTopic" in input:
topic_slugs.append(input["mainTopic"])
if "mainTopic" in input:
topic_slugs.append(input["mainTopic"])
for slug in topic_slugs:
topic = ShoutTopic.create(
shout = new_shout.slug,
topic = slug)
new_shout.topic_slugs = topic_slugs
for slug in topic_slugs:
topic = ShoutTopic.create(shout=new_shout.slug, topic=slug)
new_shout.topic_slugs = topic_slugs
task = GitTask(
input,
user.username,
user.email,
"new shout %s" % (new_shout.slug)
)
# await ShoutCommentsStorage.send_shout(new_shout)
task = GitTask(input, user.username, user.email, "new shout %s" % (new_shout.slug))
# await ShoutCommentsStorage.send_shout(new_shout)
return {"shout": new_shout}
return {
"shout" : new_shout
}
@mutation.field("updateShout")
@login_required
async def update_shout(_, info, input):
auth = info.context["request"].auth
user_id = auth.user_id
auth = info.context["request"].auth
user_id = auth.user_id
slug = input["slug"]
slug = input["slug"]
session = local_session()
user = session.query(User).filter(User.id == user_id).first()
shout = session.query(Shout).filter(Shout.slug == slug).first()
session = local_session()
user = session.query(User).filter(User.id == user_id).first()
shout = session.query(Shout).filter(Shout.slug == slug).first()
if not shout:
return {
"error" : "shout not found"
}
if not shout:
return {"error": "shout not found"}
authors = [author.id for author in shout.authors]
if not user_id in authors:
scopes = auth.scopes
print(scopes)
if not Resource.shout_id in scopes:
return {
"error" : "access denied"
}
authors = [author.id for author in shout.authors]
if not user_id in authors:
scopes = auth.scopes
print(scopes)
if not Resource.shout_id in scopes:
return {"error": "access denied"}
shout.update(input)
shout.updatedAt = datetime.now()
session.commit()
session.close()
shout.update(input)
shout.updatedAt = datetime.now()
session.commit()
session.close()
for topic in input.get("topic_slugs", []):
ShoutTopic.create(
shout = slug,
topic = topic)
for topic in input.get("topic_slugs", []):
ShoutTopic.create(shout=slug, topic=topic)
task = GitTask(
input,
user.username,
user.email,
"update shout %s" % (slug)
)
task = GitTask(input, user.username, user.email, "update shout %s" % (slug))
return {"shout": shout}
return {
"shout" : shout
}
@mutation.field("deleteShout")
@login_required
async def delete_shout(_, info, slug):
auth = info.context["request"].auth
user_id = auth.user_id
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
shout = session.query(Shout).filter(Shout.slug == slug).first()
authors = [a.id for a in shout.authors]
if not shout:
return {"error": "invalid shout slug"}
if user_id not in authors:
return {"error": "access denied"}
for a in authors:
reactions_unfollow(a.slug, slug, True)
shout.deletedAt = datetime.now()
session.commit()
with local_session() as session:
shout = session.query(Shout).filter(Shout.slug == slug).first()
authors = [a.id for a in shout.authors]
if not shout:
return {"error": "invalid shout slug"}
if user_id not in authors:
return {"error": "access denied"}
for a in authors:
reactions_unfollow(a.slug, slug, True)
shout.deletedAt = datetime.now()
session.commit()
return {}
return {}

View File

@ -7,36 +7,44 @@ from orm.topic import TopicFollower
from orm.user import AuthorFollower
from typing import List
@query.field("shoutsForFeed")
@login_required
def get_user_feed(_, info, page, size) -> List[Shout]:
user = info.context["request"].user
shouts = []
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutAuthor).\
join(AuthorFollower).\
where(AuthorFollower.follower == user.slug).\
order_by(desc(Shout.createdAt))
topicrows = session.query(Shout).\
join(ShoutTopic).\
join(TopicFollower).\
where(TopicFollower.follower == user.slug).\
order_by(desc(Shout.createdAt))
shouts = shouts.union(topicrows).limit(size).offset(page * size).all()
return shouts
user = info.context["request"].user
shouts = []
with local_session() as session:
shouts = (
session.query(Shout)
.join(ShoutAuthor)
.join(AuthorFollower)
.where(AuthorFollower.follower == user.slug)
.order_by(desc(Shout.createdAt))
)
topicrows = (
session.query(Shout)
.join(ShoutTopic)
.join(TopicFollower)
.where(TopicFollower.follower == user.slug)
.order_by(desc(Shout.createdAt))
)
shouts = shouts.union(topicrows).limit(size).offset(page * size).all()
return shouts
@query.field("myCandidates")
@login_required
async def user_unpublished_shouts(_, info, page = 1, size = 10) -> List[Shout]:
user = info.context["request"].user
shouts = []
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutAuthor).\
where(and_(Shout.publishedAt == None, ShoutAuthor.user == user.slug)).\
order_by(desc(Shout.createdAt)).\
limit(size).\
offset( page * size).\
all()
return shouts
async def user_unpublished_shouts(_, info, page=1, size=10) -> List[Shout]:
user = info.context["request"].user
shouts = []
with local_session() as session:
shouts = (
session.query(Shout)
.join(ShoutAuthor)
.where(and_(Shout.publishedAt == None, ShoutAuthor.user == user.slug))
.order_by(desc(Shout.createdAt))
.limit(size)
.offset(page * size)
.all()
)
return shouts

View File

@ -4,263 +4,283 @@ import asyncio, uuid, json
from datetime import datetime
from base.redis import redis
class ChatFollowing:
queue = asyncio.Queue()
def __init__(self, chat_id):
self.chat_id = chat_id
class ChatFollowing:
queue = asyncio.Queue()
def __init__(self, chat_id):
self.chat_id = chat_id
class MessagesStorage:
lock = asyncio.Lock()
chats = []
lock = asyncio.Lock()
chats = []
@staticmethod
async def register_chat(chat):
async with MessagesStorage.lock:
MessagesStorage.chats.append(chat)
@staticmethod
async def remove_chat(chat):
async with MessagesStorage.lock:
MessagesStorage.chats.remove(chat)
@staticmethod
async def put(message_result):
async with MessagesStorage.lock:
for chat in MessagesStorage.chats:
if message_result.message["chatId"] == chat.chat_id:
chat.queue.put_nowait(message_result)
@staticmethod
async def register_chat(chat):
async with MessagesStorage.lock:
MessagesStorage.chats.append(chat)
@staticmethod
async def remove_chat(chat):
async with MessagesStorage.lock:
MessagesStorage.chats.remove(chat)
@staticmethod
async def put(message_result):
async with MessagesStorage.lock:
for chat in MessagesStorage.chats:
if message_result.message["chatId"] == chat.chat_id:
chat.queue.put_nowait(message_result)
class MessageResult:
def __init__(self, status, message):
self.status = status
self.message = message
def __init__(self, status, message):
self.status = status
self.message = message
async def get_unread_counter(user_slug):
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
if not chats:
return 0
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
if not chats:
return 0
chats = json.loads(chats)
unread = 0
for chat_id in chats:
n = await redis.execute("LLEN", f"chats/{chat_id}/unread/{user_slug}")
unread += n
chats = json.loads(chats)
unread = 0
for chat_id in chats:
n = await redis.execute("LLEN", f"chats/{chat_id}/unread/{user_slug}")
unread += n
return unread
return unread
async def add_user_to_chat(user_slug, chat_id, chat = None):
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
if not chats:
chats = set()
else:
chats = set(json.loads(chats))
chats.add(str(chat_id))
chats = list(chats)
await redis.execute("SET", f"chats_by_user/{user_slug}", json.dumps(chats))
if chat:
users = set(chat["users"])
users.add(user_slug)
chat["users"] = list(users)
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
async def add_user_to_chat(user_slug, chat_id, chat=None):
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
if not chats:
chats = set()
else:
chats = set(json.loads(chats))
chats.add(str(chat_id))
chats = list(chats)
await redis.execute("SET", f"chats_by_user/{user_slug}", json.dumps(chats))
if chat:
users = set(chat["users"])
users.add(user_slug)
chat["users"] = list(users)
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
@mutation.field("createChat")
@login_required
async def create_chat(_, info, description):
user = info.context["request"].user
user = info.context["request"].user
chat_id = uuid.uuid4()
chat = {
"description" : description,
"createdAt" : str(datetime.now),
"createdBy" : user.slug,
"id" : str(chat_id),
"users" : [user.slug]
}
chat_id = uuid.uuid4()
chat = {
"description": description,
"createdAt": str(datetime.now),
"createdBy": user.slug,
"id": str(chat_id),
"users": [user.slug],
}
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
await redis.execute("SET", f"chats/{chat_id}/next_message_id", 0)
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
await redis.execute("SET", f"chats/{chat_id}/next_message_id", 0)
await add_user_to_chat(user.slug, chat_id)
await add_user_to_chat(user.slug, chat_id)
return {"chatId": chat_id}
return { "chatId" : chat_id }
async def load_messages(chatId, size, page):
message_ids = await redis.lrange(f"chats/{chatId}/message_ids",
size * (page -1), size * page - 1)
messages = []
if message_ids:
message_keys = [f"chats/{chatId}/messages/{id.decode('UTF-8')}" for id in message_ids]
messages = await redis.mget(*message_keys)
messages = [json.loads(msg) for msg in messages]
return messages
message_ids = await redis.lrange(
f"chats/{chatId}/message_ids", size * (page - 1), size * page - 1
)
messages = []
if message_ids:
message_keys = [
f"chats/{chatId}/messages/{id.decode('UTF-8')}" for id in message_ids
]
messages = await redis.mget(*message_keys)
messages = [json.loads(msg) for msg in messages]
return messages
@query.field("userChats")
@login_required
async def user_chats(_, info):
user = info.context["request"].user
user = info.context["request"].user
chats = await redis.execute("GET", f"chats_by_user/{user.slug}")
if not chats:
chats = list()
else:
chats = list(json.loads(chats))
chats = await redis.execute("GET", f"chats_by_user/{user.slug}")
if not chats:
chats = list()
else:
chats = list(json.loads(chats))
return {"chats": chats}
return {"chats" : chats}
@query.field("enterChat")
@login_required
async def enter_chat(_, info, chatId, size):
user = info.context["request"].user
user = info.context["request"].user
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return { "error" : "chat not exist" }
chat = json.loads(chat)
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return {"error": "chat not exist"}
chat = json.loads(chat)
messages = await load_messages(chatId, size, 1)
messages = await load_messages(chatId, size, 1)
await add_user_to_chat(user.slug, chatId, chat)
await add_user_to_chat(user.slug, chatId, chat)
return {"chat": chat, "messages": messages}
return {
"chat" : chat,
"messages" : messages
}
@mutation.field("createMessage")
@login_required
async def create_message(_, info, chatId, body, replyTo = None):
user = info.context["request"].user
async def create_message(_, info, chatId, body, replyTo=None):
user = info.context["request"].user
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return { "error" : "chat not exist" }
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return {"error": "chat not exist"}
message_id = await redis.execute("GET", f"chats/{chatId}/next_message_id")
message_id = int(message_id)
message_id = await redis.execute("GET", f"chats/{chatId}/next_message_id")
message_id = int(message_id)
new_message = {
"chatId" : chatId,
"id" : message_id,
"author" : user.slug,
"body" : body,
"replyTo" : replyTo,
"createdAt" : datetime.now().isoformat()
}
new_message = {
"chatId": chatId,
"id": message_id,
"author": user.slug,
"body": body,
"replyTo": replyTo,
"createdAt": datetime.now().isoformat(),
}
await redis.execute("SET", f"chats/{chatId}/messages/{message_id}", json.dumps(new_message))
await redis.execute("LPUSH", f"chats/{chatId}/message_ids", str(message_id))
await redis.execute("SET", f"chats/{chatId}/next_message_id", str(message_id + 1))
await redis.execute(
"SET", f"chats/{chatId}/messages/{message_id}", json.dumps(new_message)
)
await redis.execute("LPUSH", f"chats/{chatId}/message_ids", str(message_id))
await redis.execute("SET", f"chats/{chatId}/next_message_id", str(message_id + 1))
chat = json.loads(chat)
users = chat["users"]
for user_slug in users:
await redis.execute("LPUSH", f"chats/{chatId}/unread/{user_slug}", str(message_id))
chat = json.loads(chat)
users = chat["users"]
for user_slug in users:
await redis.execute(
"LPUSH", f"chats/{chatId}/unread/{user_slug}", str(message_id)
)
result = MessageResult("NEW", new_message)
await MessagesStorage.put(result)
result = MessageResult("NEW", new_message)
await MessagesStorage.put(result)
return {"message": new_message}
return {"message" : new_message}
@query.field("getMessages")
@login_required
async def get_messages(_, info, chatId, size, page):
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return { "error" : "chat not exist" }
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return {"error": "chat not exist"}
messages = await load_messages(chatId, size, page)
messages = await load_messages(chatId, size, page)
return messages
return messages
@mutation.field("updateMessage")
@login_required
async def update_message(_, info, chatId, id, body):
user = info.context["request"].user
user = info.context["request"].user
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return { "error" : "chat not exist" }
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return {"error": "chat not exist"}
message = await redis.execute("GET", f"chats/{chatId}/messages/{id}")
if not message:
return { "error" : "message not exist" }
message = await redis.execute("GET", f"chats/{chatId}/messages/{id}")
if not message:
return {"error": "message not exist"}
message = json.loads(message)
if message["author"] != user.slug:
return { "error" : "access denied" }
message = json.loads(message)
if message["author"] != user.slug:
return {"error": "access denied"}
message["body"] = body
message["updatedAt"] = datetime.now().isoformat()
message["body"] = body
message["updatedAt"] = datetime.now().isoformat()
await redis.execute("SET", f"chats/{chatId}/messages/{id}", json.dumps(message))
await redis.execute("SET", f"chats/{chatId}/messages/{id}", json.dumps(message))
result = MessageResult("UPDATED", message)
await MessagesStorage.put(result)
result = MessageResult("UPDATED", message)
await MessagesStorage.put(result)
return {"message": message}
return {"message" : message}
@mutation.field("deleteMessage")
@login_required
async def delete_message(_, info, chatId, id):
user = info.context["request"].user
user = info.context["request"].user
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return { "error" : "chat not exist" }
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return {"error": "chat not exist"}
message = await redis.execute("GET", f"chats/{chatId}/messages/{id}")
if not message:
return { "error" : "message not exist" }
message = json.loads(message)
if message["author"] != user.slug:
return { "error" : "access denied" }
message = await redis.execute("GET", f"chats/{chatId}/messages/{id}")
if not message:
return {"error": "message not exist"}
message = json.loads(message)
if message["author"] != user.slug:
return {"error": "access denied"}
await redis.execute("LREM", f"chats/{chatId}/message_ids", 0, str(id))
await redis.execute("DEL", f"chats/{chatId}/messages/{id}")
await redis.execute("LREM", f"chats/{chatId}/message_ids", 0, str(id))
await redis.execute("DEL", f"chats/{chatId}/messages/{id}")
chat = json.loads(chat)
users = chat["users"]
for user_slug in users:
await redis.execute("LREM", f"chats/{chatId}/unread/{user_slug}", 0, str(id))
chat = json.loads(chat)
users = chat["users"]
for user_slug in users:
await redis.execute("LREM", f"chats/{chatId}/unread/{user_slug}", 0, str(id))
result = MessageResult("DELETED", message)
await MessagesStorage.put(result)
result = MessageResult("DELETED", message)
await MessagesStorage.put(result)
return {}
return {}
@mutation.field("markAsRead")
@login_required
async def mark_as_read(_, info, chatId, ids):
user = info.context["request"].user
user = info.context["request"].user
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return { "error" : "chat not exist" }
chat = await redis.execute("GET", f"chats/{chatId}")
if not chat:
return {"error": "chat not exist"}
chat = json.loads(chat)
users = set(chat["users"])
if not user.slug in users:
return { "error" : "access denied" }
chat = json.loads(chat)
users = set(chat["users"])
if not user.slug in users:
return {"error": "access denied"}
for id in ids:
await redis.execute("LREM", f"chats/{chatId}/unread/{user.slug}", 0, str(id))
for id in ids:
await redis.execute("LREM", f"chats/{chatId}/unread/{user.slug}", 0, str(id))
return {}
return {}
@subscription.source("chatUpdated")
@login_required
async def message_generator(obj, info, chatId):
try:
following_chat = ChatFollowing(chatId)
await MessagesStorage.register_chat(following_chat)
while True:
msg = await following_chat.queue.get()
yield msg
finally:
await MessagesStorage.remove_chat(following_chat)
try:
following_chat = ChatFollowing(chatId)
await MessagesStorage.register_chat(following_chat)
while True:
msg = await following_chat.queue.get()
yield msg
finally:
await MessagesStorage.remove_chat(following_chat)
@subscription.field("chatUpdated")
def message_resolver(message, info, chatId):
return message
return message

View File

@ -14,154 +14,176 @@ from sqlalchemy import and_, desc
from sqlalchemy.orm import selectinload
from typing import List
@query.field("userReactedShouts")
async def get_user_reacted_shouts(_, info, slug, page, size) -> List[Shout]:
user = await UserStorage.get_user_by_slug(slug)
if not user: return {}
with local_session() as session:
shouts = session.query(Shout).\
join(Reaction).\
where(Reaction.createdBy == user.slug).\
order_by(desc(Reaction.createdAt)).\
limit(size).\
offset(page * size).all()
return shouts
user = await UserStorage.get_user_by_slug(slug)
if not user:
return []
with local_session() as session:
shouts = (
session.query(Shout)
.join(Reaction)
.where(Reaction.createdBy == user.slug)
.order_by(desc(Reaction.createdAt))
.limit(size)
.offset(page * size)
.all()
)
return shouts
@query.field("userFollowedTopics")
@login_required
def get_followed_topics(_, slug) -> List[Topic]:
rows = []
with local_session() as session:
rows = session.query(Topic).\
join(TopicFollower).\
where(TopicFollower.follower == slug).\
all()
return rows
rows = []
with local_session() as session:
rows = (
session.query(Topic)
.join(TopicFollower)
.where(TopicFollower.follower == slug)
.all()
)
return rows
@query.field("userFollowedAuthors")
def get_followed_authors(_, slug) -> List[User]:
authors = []
with local_session() as session:
authors = session.query(User).\
join(AuthorFollower, User.slug == AuthorFollower.author).\
where(AuthorFollower.follower == slug).\
all()
return authors
authors = []
with local_session() as session:
authors = (
session.query(User)
.join(AuthorFollower, User.slug == AuthorFollower.author)
.where(AuthorFollower.follower == slug)
.all()
)
return authors
@query.field("userFollowers")
async def user_followers(_, slug) -> List[User]:
with local_session() as session:
users = session.query(User).\
join(AuthorFollower, User.slug == AuthorFollower.follower).\
where(AuthorFollower.author == slug).\
all()
return users
with local_session() as session:
users = (
session.query(User)
.join(AuthorFollower, User.slug == AuthorFollower.follower)
.where(AuthorFollower.author == slug)
.all()
)
return users
# for mutation.field("refreshSession")
async def get_user_info(slug):
return {
"unread": await get_unread_counter(slug),
"topics": [t.slug for t in get_followed_topics(0, slug)],
"authors": [a.slug for a in get_followed_authors(0, slug)],
"reactions": [r.shout for r in get_shout_reactions(0, slug)],
"communities": [c.slug for c in get_followed_communities(0, slug)]
}
return {
"unread": await get_unread_counter(slug),
"topics": [t.slug for t in get_followed_topics(0, slug)],
"authors": [a.slug for a in get_followed_authors(0, slug)],
"reactions": [r.shout for r in get_shout_reactions(0, slug)],
"communities": [c.slug for c in get_followed_communities(0, slug)],
}
@mutation.field("refreshSession")
@login_required
async def get_current_user(_, info):
user = info.context["request"].user
with local_session() as session:
user.lastSeen = datetime.now()
user.save()
session.commit()
return {
"token": "", # same token?
"user": user,
"info": await get_user_info(user.slug)
}
user = info.context["request"].user
with local_session() as session:
user.lastSeen = datetime.now()
user.save()
session.commit()
return {
"token": "", # same token?
"user": user,
"info": await get_user_info(user.slug),
}
@query.field("getUsersBySlugs")
async def get_users_by_slugs(_, info, slugs):
with local_session() as session:
users = session.query(User).\
options(selectinload(User.ratings)).\
filter(User.slug.in_(slugs)).all()
return users
with local_session() as session:
users = (
session.query(User)
.options(selectinload(User.ratings))
.filter(User.slug in slugs)
.all()
)
return users
@query.field("getUserRoles")
async def get_user_roles(_, info, slug):
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
roles = session.query(Role).\
options(selectinload(Role.permissions)).\
join(UserRole).\
where(UserRole.user_id == user.id).all()
return roles
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
roles = (
session.query(Role)
.options(selectinload(Role.permissions))
.join(UserRole)
.where(UserRole.user_id == user.id)
.all()
)
return roles
@mutation.field("updateProfile")
@login_required
async def update_profile(_, info, profile):
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
user = session.query(User).filter(User.id == user_id).first()
user.update(profile)
session.commit()
return {}
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
user = session.query(User).filter(User.id == user_id).first()
if user:
User.update(user, **profile)
session.commit()
return {}
@mutation.field("rateUser")
@login_required
async def rate_user(_, info, slug, value):
user = info.context["request"].user
with local_session() as session:
rating = session.query(UserRating).\
filter(and_(UserRating.rater == user.slug, UserRating.user == slug)).\
first()
if rating:
rating.value = value
session.commit()
return {}
try:
UserRating.create(
rater=user.slug,
user=slug,
value=value
)
except Exception as err:
return {"error": err}
return {}
user = info.context["request"].user
with local_session() as session:
rating = (
session.query(UserRating)
.filter(and_(UserRating.rater == user.slug, UserRating.user == slug))
.first()
)
if rating:
rating.value = value
session.commit()
return {}
try:
UserRating.create(rater=user.slug, user=slug, value=value)
except Exception as err:
return {"error": err}
return {}
# for mutation.field("follow")
def author_follow(user, slug):
AuthorFollower.create(
follower=user.slug,
author=slug
)
AuthorFollower.create(follower=user.slug, author=slug)
# for mutation.field("unfollow")
def author_unfollow(user, slug):
with local_session() as session:
flw = session.query(AuthorFollower).\
filter(and_(AuthorFollower.follower == user.slug, AuthorFollower.author == slug)).\
first()
if not flw:
raise Exception("[resolvers.profile] follower not exist, cant unfollow")
else:
session.delete(flw)
session.commit()
with local_session() as session:
flw = (
session.query(AuthorFollower)
.filter(
and_(
AuthorFollower.follower == user.slug, AuthorFollower.author == slug
)
)
.first()
)
if not flw:
raise Exception("[resolvers.profile] follower not exist, cant unfollow")
else:
session.delete(flw)
session.commit()
@query.field("authorsAll")
def get_authors_all(_, info, page, size):
end = page * size
start = end - size
return UserStorage.get_all_users()[start:end]
end = page * size
start = end - size
return list(UserStorage.get_all_users())[start:end] # type: ignore

View File

@ -10,11 +10,17 @@ from datetime import datetime
from services.auth.users import UserStorage
from services.stat.reacted import ReactedStorage
def reactions_follow(user, slug, auto=False):
with local_session() as session:
fw = session.query(ShoutReactionsFollower).\
filter(ShoutReactionsFollower.follower == user.slug, ShoutReactionsFollower.shout == slug).\
first()
fw = (
session.query(ShoutReactionsFollower)
.filter(
ShoutReactionsFollower.follower == user.slug,
ShoutReactionsFollower.shout == slug,
)
.first()
)
if auto and fw:
return
elif not auto and fw:
@ -25,17 +31,19 @@ def reactions_follow(user, slug, auto=False):
return
# print("[resolvers.reactions] was followed before")
ShoutReactionsFollower.create(
follower=user.slug,
shout=slug,
auto=auto)
ShoutReactionsFollower.create(follower=user.slug, shout=slug, auto=auto)
def reactions_unfollow(user, slug):
with local_session() as session:
following = session.query(ShoutReactionsFollower).\
filter(ShoutReactionsFollower.follower == user.slug, ShoutReactionsFollower.shout == slug).\
first()
following = (
session.query(ShoutReactionsFollower)
.filter(
ShoutReactionsFollower.follower == user.slug,
ShoutReactionsFollower.shout == slug,
)
.first()
)
if not following:
# print("[resolvers.reactions] was not followed", slug)
return
@ -56,7 +64,7 @@ async def create_reaction(_, info, inp):
reaction = Reaction.create(**inp)
ReactedStorage.increment(reaction.shout, reaction.replyTo)
try:
reactions_follow(user, inp['shout'], True)
reactions_follow(user, inp["shout"], True)
except Exception as e:
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
@ -76,13 +84,13 @@ async def update_reaction(_, info, inp):
return {"error": "invalid reaction id"}
if reaction.createdBy != user.slug:
return {"error": "access denied"}
reaction.body = inp['body']
reaction.body = inp["body"]
reaction.updatedAt = datetime.now()
if reaction.kind != inp['kind']:
if reaction.kind != inp["kind"]:
# NOTE: change mind detection can be here
pass
if inp.get('range'):
reaction.range = inp.get('range')
if inp.get("range"):
reaction.range = inp.get("range")
session.commit()
return {"reaction": reaction}
@ -104,29 +112,39 @@ async def delete_reaction(_, info, id):
session.commit()
return {}
@query.field("reactionsByShout")
async def get_shout_reactions(_, info, slug, page, size):
offset = page * size
reactions = []
with local_session() as session:
reactions = session.query(Reaction).\
filter(Reaction.shout == slug).\
limit(size).offset(offset).all()
reactions = (
session.query(Reaction)
.filter(Reaction.shout == slug)
.limit(size)
.offset(offset)
.all()
)
for r in reactions:
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
return reactions
@query.field("reactionsForSlugs")
async def get_shout_reactions(_, info, slugs, page, size):
offset = page * size
reactions = []
with local_session() as session:
for slug in slugs:
reactions += session.query(Reaction).\
filter(Reaction.shout == slug).\
limit(size).offset(offset).all()
for slug in slugs:
reactions += (
session.query(Reaction)
.filter(Reaction.shout == slug)
.limit(size)
.offset(offset)
.all()
)
for r in reactions:
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
return reactions
@ -135,22 +153,31 @@ async def get_all_reactions(_, info, page=1, size=10):
offset = page * size
reactions = []
with local_session() as session:
reactions = session.query(Reaction).\
filter(Reaction.deletedAt == None).\
order_by(desc("createdAt")).\
offset(offset).limit(size)
reactions = (
session.query(Reaction)
.filter(Reaction.deletedAt == None)
.order_by(desc("createdAt"))
.offset(offset)
.limit(size)
)
for r in reactions:
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
reactions = list(reactions)
reactions.sort(key=lambda x: x.createdAt, reverse=True)
return reactions
@query.field("reactionsByAuthor")
async def get_reactions_by_author(_, info, slug, page=1, size=50):
offset = page * size
reactions = []
with local_session() as session:
reactions = session.query(Reaction).filter(Reaction.createdBy == slug).limit(size).offset(offset)
reactions = (
session.query(Reaction)
.filter(Reaction.createdBy == slug)
.limit(size)
.offset(offset)
)
for r in reactions:
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
return reactions

View File

@ -8,68 +8,76 @@ from base.resolvers import mutation, query
from auth.authenticate import login_required
from sqlalchemy import and_
@query.field("topicsAll")
async def topics_all(_, info, page = 1, size = 50):
topics = await TopicStorage.get_topics_all(page, size)
for topic in topics:
topic.stat = await TopicStat.get_stat(topic.slug)
return topics
async def topics_all(_, info, page=1, size=50):
topics = await TopicStorage.get_topics_all(page, size)
for topic in topics:
topic.stat = await TopicStat.get_stat(topic.slug)
return topics
@query.field("topicsByCommunity")
async def topics_by_community(_, info, community):
topics = await TopicStorage.get_topics_by_community(community)
for topic in topics:
topic.stat = await TopicStat.get_stat(topic.slug)
return topics
topics = await TopicStorage.get_topics_by_community(community)
for topic in topics:
topic.stat = await TopicStat.get_stat(topic.slug)
return topics
@query.field("topicsByAuthor")
async def topics_by_author(_, info, author):
slugs = set()
with local_session() as session:
shouts = session.query(Shout).\
filter(Shout.authors.any(User.slug == author))
for shout in shouts:
slugs.update([topic.slug for topic in shout.topics])
return await TopicStorage.get_topics(slugs)
slugs = set()
with local_session() as session:
shouts = session.query(Shout).filter(Shout.authors.any(User.slug == author))
for shout in shouts:
slugs.update([topic.slug for topic in shout.topics])
return await TopicStorage.get_topics(slugs)
@mutation.field("createTopic")
@login_required
async def create_topic(_, info, input):
new_topic = Topic.create(**input)
await TopicStorage.add_topic(new_topic)
new_topic = Topic.create(**input)
await TopicStorage.add_topic(new_topic)
return {"topic": new_topic}
return { "topic" : new_topic }
@mutation.field("updateTopic")
@login_required
async def update_topic(_, info, input):
slug = input["slug"]
slug = input["slug"]
session = local_session()
topic = session.query(Topic).filter(Topic.slug == slug).first()
session = local_session()
topic = session.query(Topic).filter(Topic.slug == slug).first()
if not topic:
return { "error" : "topic not found" }
if not topic:
return {"error": "topic not found"}
topic.update(input)
session.commit()
session.close()
topic.update(input)
session.commit()
session.close()
await TopicStorage.add_topic(topic)
await TopicStorage.add_topic(topic)
return {"topic": topic}
return { "topic" : topic }
def topic_follow(user, slug):
TopicFollower.create(
follower = user.slug,
topic = slug)
TopicFollower.create(follower=user.slug, topic=slug)
def topic_unfollow(user, slug):
with local_session() as session:
sub = session.query(TopicFollower).\
filter(and_(TopicFollower.follower == user.slug, TopicFollower.topic == slug)).\
first()
if not sub:
raise Exception("[resolvers.topics] follower not exist")
session.delete(sub)
session.commit()
with local_session() as session:
sub = (
session.query(TopicFollower)
.filter(
and_(TopicFollower.follower == user.slug, TopicFollower.topic == slug)
)
.first()
)
if not sub:
raise Exception("[resolvers.topics] follower not exist")
session.delete(sub)
session.commit()

View File

@ -13,138 +13,171 @@ from resolvers.reactions import reactions_follow, reactions_unfollow
from auth.authenticate import login_required
from sqlalchemy import select, desc, and_, text
from sqlalchemy.orm import selectinload
from sqlalchemy.dialects import postgresql
@query.field("topViewed")
async def top_viewed(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.top_viewed[(page - 1) * size : page * size]
@query.field("topMonth")
async def top_month(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.top_month[(page - 1) * size : page * size]
@query.field("topOverall")
async def top_overall(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.top_overall[(page - 1) * size : page * size]
@query.field("recentPublished")
async def recent_published(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.recent_published[(page - 1) * size : page * size]
@query.field("recentAll")
async def recent_all(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.recent_all[(page - 1) * size : page * size]
@query.field("recentReacted")
async def recent_reacted(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.recent_reacted[(page - 1) * size : page * size]
@mutation.field("viewShout")
async def view_shout(_, info, slug):
await ViewedStorage.inc_shout(slug)
return {"error" : ""}
return {"error": ""}
@query.field("getShoutBySlug")
async def get_shout_by_slug(_, info, slug):
all_fields = [node.name.value for node in info.field_nodes[0].selection_set.selections]
all_fields = [
node.name.value for node in info.field_nodes[0].selection_set.selections
]
selected_fields = set(["authors", "topics"]).intersection(all_fields)
select_options = [selectinload(getattr(Shout, field)) for field in selected_fields]
shout = {}
with local_session() as session:
try: s = text(open('src/queries/shout-by-slug.sql', 'r').read() % slug)
except: pass
shout = session.query(Shout).\
options(select_options).\
filter(Shout.slug == slug).first()
try:
s = text(open("src/queries/shout-by-slug.sql", "r").read() % slug)
except:
pass
shout = (
session.query(Shout)
.options(select_options)
.filter(Shout.slug == slug)
.first()
)
if not shout:
print(f"shout with slug {slug} not exist")
return {"error" : "shout not found"}
return {"error": "shout not found"}
else:
for a in shout.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(slug, a.slug)
return shout
@query.field("shoutsByTopics")
async def shouts_by_topics(_, info, slugs, page, size):
page = page - 1
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutTopic).\
where(and_(ShoutTopic.topic.in_(slugs), Shout.publishedAt != None)).\
order_by(desc(Shout.publishedAt)).\
limit(size).\
offset(page * size)
shouts = (
session.query(Shout)
.join(ShoutTopic)
.where(and_(ShoutTopic.topic.in_(slugs), Shout.publishedAt != None))
.order_by(desc(Shout.publishedAt))
.limit(size)
.offset(page * size)
)
for s in shouts:
for a in s.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
return shouts
@query.field("shoutsByCollection")
async def shouts_by_topics(_, info, collection, page, size):
page = page - 1
shouts = []
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutCollection, ShoutCollection.collection == collection).\
where(and_(ShoutCollection.shout == Shout.slug, Shout.publishedAt != None)).\
order_by(desc(Shout.publishedAt)).\
limit(size).\
offset(page * size)
shouts = (
session.query(Shout)
.join(ShoutCollection, ShoutCollection.collection == collection)
.where(and_(ShoutCollection.shout == Shout.slug, Shout.publishedAt != None))
.order_by(desc(Shout.publishedAt))
.limit(size)
.offset(page * size)
)
for s in shouts:
for a in s.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
return shouts
@query.field("shoutsByAuthors")
async def shouts_by_authors(_, info, slugs, page, size):
page = page - 1
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutAuthor).\
where(and_(ShoutAuthor.user.in_(slugs), Shout.publishedAt != None)).\
order_by(desc(Shout.publishedAt)).\
limit(size).\
offset(page * size)
shouts = (
session.query(Shout)
.join(ShoutAuthor)
.where(and_(ShoutAuthor.user.in_(slugs), Shout.publishedAt != None))
.order_by(desc(Shout.publishedAt))
.limit(size)
.offset(page * size)
)
for s in shouts:
for a in s.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
return shouts
SINGLE_COMMUNITY = True
@query.field("shoutsByCommunities")
async def shouts_by_communities(_, info, slugs, page, size):
if SINGLE_COMMUNITY:
if SINGLE_COMMUNITY:
return recent_published(_, info, page, size)
else:
page = page - 1
with local_session() as session:
#TODO fix postgres high load
shouts = session.query(Shout).distinct().\
join(ShoutTopic).\
where(and_(Shout.publishedAt != None,\
ShoutTopic.topic.in_(\
select(Topic.slug).where(Topic.community.in_(slugs))\
))).\
order_by(desc(Shout.publishedAt)).\
limit(size).\
offset(page * size)
# TODO fix postgres high load
shouts = (
session.query(Shout)
.distinct()
.join(ShoutTopic)
.where(
and_(
Shout.publishedAt != None,
ShoutTopic.topic.in_(
select(Topic.slug).where(Topic.community.in_(slugs))
),
)
)
.order_by(desc(Shout.publishedAt))
.limit(size)
.offset(page * size)
)
for s in shouts:
for a in s.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
return shouts
@mutation.field("follow")
@login_required
async def follow(_, info, what, slug):
@ -159,10 +192,11 @@ async def follow(_, info, what, slug):
elif what == "REACTIONS":
reactions_follow(user, slug)
except Exception as e:
return {"error" : str(e)}
return {"error": str(e)}
return {}
@mutation.field("unfollow")
@login_required
async def unfollow(_, info, what, slug):
@ -178,6 +212,6 @@ async def unfollow(_, info, what, slug):
elif what == "REACTIONS":
reactions_unfollow(user, slug)
except Exception as e:
return {"error" : str(e)}
return {"error": str(e)}
return {}

View File

@ -1,35 +1,33 @@
import asyncio
from sqlalchemy.orm import selectinload
from orm.rbac import Role
class RoleStorage:
roles = {}
lock = asyncio.Lock()
roles = {}
lock = asyncio.Lock()
@staticmethod
def init(session):
self = RoleStorage
roles = session.query(Role).\
options(selectinload(Role.permissions)).all()
self.roles = dict([(role.id, role) for role in roles])
print('[auth.roles] %d precached' % len(roles))
@staticmethod
def init(session):
self = RoleStorage
roles = session.query(Role).options(selectinload(Role.permissions)).all()
self.roles = dict([(role.id, role) for role in roles])
print("[auth.roles] %d precached" % len(roles))
@staticmethod
async def get_role(id):
self = RoleStorage
async with self.lock:
return self.roles.get(id)
@staticmethod
async def get_role(id):
self = RoleStorage
async with self.lock:
return self.roles.get(id)
@staticmethod
async def add_role(role):
self = RoleStorage
async with self.lock:
self.roles[id] = role
@staticmethod
async def add_role(role):
self = RoleStorage
async with self.lock:
self.roles[id] = role
@staticmethod
async def del_role(id):
self = RoleStorage
async with self.lock:
del self.roles[id]
@staticmethod
async def del_role(id):
self = RoleStorage
async with self.lock:
del self.roles[id]

View File

@ -4,47 +4,50 @@ from orm.user import User
class UserStorage:
users = {}
lock = asyncio.Lock()
users = {}
lock = asyncio.Lock()
@staticmethod
def init(session):
self = UserStorage
users = session.query(User).\
options(selectinload(User.roles), selectinload(User.ratings)).all()
self.users = dict([(user.id, user) for user in users])
print('[auth.users] %d precached' % len(self.users))
@staticmethod
def init(session):
self = UserStorage
users = (
session.query(User)
.options(selectinload(User.roles), selectinload(User.ratings))
.all()
)
self.users = dict([(user.id, user) for user in users])
print("[auth.users] %d precached" % len(self.users))
@staticmethod
async def get_user(id):
self = UserStorage
async with self.lock:
return self.users.get(id)
@staticmethod
async def get_user(id):
self = UserStorage
async with self.lock:
return self.users.get(id)
@staticmethod
async def get_all_users():
self = UserStorage
async with self.lock:
aaa = list(self.users.values())
aaa.sort(key=lambda user: user.createdAt)
return aaa
@staticmethod
async def get_all_users():
self = UserStorage
async with self.lock:
aaa = list(self.users.values())
aaa.sort(key=lambda user: user.createdAt)
return aaa
@staticmethod
async def get_user_by_slug(slug):
self = UserStorage
async with self.lock:
for user in self.users.values():
if user.slug == slug:
return user
@staticmethod
async def get_user_by_slug(slug):
self = UserStorage
async with self.lock:
for user in self.users.values():
if user.slug == slug:
return user
@staticmethod
async def add_user(user):
self = UserStorage
async with self.lock:
self.users[user.id] = user
@staticmethod
async def add_user(user):
self = UserStorage
async with self.lock:
self.users[user.id] = user
@staticmethod
async def del_user(id):
self = UserStorage
async with self.lock:
del self.users[id]
@staticmethod
async def del_user(id):
self = UserStorage
async with self.lock:
del self.users[id]

View File

@ -2,180 +2,209 @@ import asyncio
from datetime import datetime
from sqlalchemy.types import Enum
from sqlalchemy import Column, DateTime, ForeignKey, Boolean
# from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy import Enum
import enum
from base.orm import Base, local_session
from orm.topic import ShoutTopic
class ReactionKind(enum.Enum):
AGREE = 1 # +1
DISAGREE = 2 # -1
PROOF = 3 # +1
DISPROOF = 4 # -1
ASK = 5 # +0 bookmark
PROPOSE = 6 # +0
QUOTE = 7 # +0 bookmark
COMMENT = 8 # +0
ACCEPT = 9 # +1
REJECT = 0 # -1
LIKE = 11 # +1
DISLIKE = 12 # -1
# TYPE = <reaction index> # rating diff
AGREE = 1 # +1
DISAGREE = 2 # -1
PROOF = 3 # +1
DISPROOF = 4 # -1
ASK = 5 # +0 bookmark
PROPOSE = 6 # +0
QUOTE = 7 # +0 bookmark
COMMENT = 8 # +0
ACCEPT = 9 # +1
REJECT = 0 # -1
LIKE = 11 # +1
DISLIKE = 12 # -1
# TYPE = <reaction index> # rating diff
def kind_to_rate(kind) -> int:
if kind in [
ReactionKind.AGREE,
ReactionKind.LIKE,
ReactionKind.PROOF,
ReactionKind.ACCEPT
]: return 1
elif kind in [
ReactionKind.DISAGREE,
ReactionKind.DISLIKE,
ReactionKind.DISPROOF,
ReactionKind.REJECT
]: return -1
else: return 0
class ReactedByDay(Base):
__tablename__ = "reacted_by_day"
if kind in [
ReactionKind.AGREE,
ReactionKind.LIKE,
ReactionKind.PROOF,
ReactionKind.ACCEPT,
]:
return 1
elif kind in [
ReactionKind.DISAGREE,
ReactionKind.DISLIKE,
ReactionKind.DISPROOF,
ReactionKind.REJECT,
]:
return -1
else:
return 0
class ReactedByDay(Base):
__tablename__ = "reacted_by_day"
id = None
reaction = Column(ForeignKey("reaction.id"), primary_key=True)
shout = Column(ForeignKey("shout.slug"), primary_key=True)
replyTo = Column(ForeignKey("reaction.id"), nullable=True)
kind: int = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
day = Column(DateTime, primary_key=True, default=datetime.now)
comment = Column(Boolean, default=False)
id = None
reaction = Column(ForeignKey("reaction.id"), primary_key = True)
shout = Column(ForeignKey('shout.slug'), primary_key=True)
replyTo = Column(ForeignKey('reaction.id'), nullable=True)
kind: int = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
day = Column(DateTime, primary_key=True, default=datetime.now)
comment = Column(Boolean, default=False)
class ReactedStorage:
reacted = {
'shouts': {},
'topics': {},
'reactions': {}
}
rating = {
'shouts': {},
'topics': {},
'reactions': {}
}
reactions = []
to_flush = []
period = 30*60 # sec
lock = asyncio.Lock()
reacted = {"shouts": {}, "topics": {}, "reactions": {}}
rating = {"shouts": {}, "topics": {}, "reactions": {}}
reactions = []
to_flush = []
period = 30 * 60 # sec
lock = asyncio.Lock()
@staticmethod
async def get_shout(shout_slug):
self = ReactedStorage
async with self.lock:
return self.reacted['shouts'].get(shout_slug, [])
@staticmethod
async def get_topic(topic_slug):
self = ReactedStorage
async with self.lock:
return self.reacted['topics'].get(topic_slug, [])
@staticmethod
async def get_shout(shout_slug):
self = ReactedStorage
async with self.lock:
return self.reacted["shouts"].get(shout_slug, [])
@staticmethod
async def get_comments(shout_slug):
self = ReactedStorage
async with self.lock:
return list(filter(lambda r: r.comment, self.reacted['shouts'].get(shout_slug, [])))
@staticmethod
async def get_topic(topic_slug):
self = ReactedStorage
async with self.lock:
return self.reacted["topics"].get(topic_slug, [])
@staticmethod
async def get_topic_comments(topic_slug):
self = ReactedStorage
async with self.lock:
return list(filter(lambda r: r.comment, self.reacted['topics'].get(topic_slug, [])))
@staticmethod
async def get_comments(shout_slug):
self = ReactedStorage
async with self.lock:
return list(
filter(lambda r: r.comment, self.reacted["shouts"].get(shout_slug, []))
)
@staticmethod
async def get_reaction_comments(reaction_id):
self = ReactedStorage
async with self.lock:
return list(filter(lambda r: r.comment, self.reacted['reactions'].get(reaction_id)))
@staticmethod
async def get_topic_comments(topic_slug):
self = ReactedStorage
async with self.lock:
return list(
filter(lambda r: r.comment, self.reacted["topics"].get(topic_slug, []))
)
@staticmethod
async def get_reaction(reaction_id):
self = ReactedStorage
async with self.lock:
return self.reacted['reactions'].get(reaction_id, [])
@staticmethod
async def get_reaction_comments(reaction_id):
self = ReactedStorage
async with self.lock:
return list(
filter(lambda r: r.comment, self.reacted["reactions"].get(reaction_id))
)
@staticmethod
async def get_rating(shout_slug):
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted['shouts'].get(shout_slug, []):
rating = rating + kind_to_rate(r.kind)
return rating
@staticmethod
async def get_reaction(reaction_id):
self = ReactedStorage
async with self.lock:
return self.reacted["reactions"].get(reaction_id, [])
@staticmethod
async def get_topic_rating(topic_slug):
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted['topics'].get(topic_slug, []):
rating = rating + kind_to_rate(r.kind)
return rating
@staticmethod
async def get_rating(shout_slug):
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted["shouts"].get(shout_slug, []):
rating = rating + kind_to_rate(r.kind)
return rating
@staticmethod
async def get_reaction_rating(reaction_id):
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted['reactions'].get(reaction_id, []):
rating = rating + kind_to_rate(r.kind)
return rating
@staticmethod
async def get_topic_rating(topic_slug):
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted["topics"].get(topic_slug, []):
rating = rating + kind_to_rate(r.kind)
return rating
@staticmethod
async def increment(reaction):
self = ReactedStorage
async with self.lock:
with local_session() as session:
r = {
"day": datetime.now().replace(hour=0, minute=0, second=0, microsecond=0),
"reaction": reaction.id,
"kind": reaction.kind,
"shout": reaction.shout
}
if reaction.replyTo: r['replyTo'] = reaction.replyTo
if reaction.body: r['comment'] = True
reaction = ReactedByDay.create(**r)
self.reacted['shouts'][reaction.shout] = self.reacted['shouts'].get(reaction.shout, [])
self.reacted['shouts'][reaction.shout].append(reaction)
if reaction.replyTo:
self.reacted['reaction'][reaction.replyTo] = self.reacted['reactions'].get(reaction.shout, [])
self.reacted['reaction'][reaction.replyTo].append(reaction)
self.rating['reactions'][reaction.replyTo] = self.rating['reactions'].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
else:
self.rating['shouts'][reaction.replyTo] = self.rating['shouts'].get(reaction.shout, 0) + kind_to_rate(reaction.kind)
@staticmethod
async def get_reaction_rating(reaction_id):
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted["reactions"].get(reaction_id, []):
rating = rating + kind_to_rate(r.kind)
return rating
@staticmethod
def init(session):
self = ReactedStorage
all_reactions = session.query(ReactedByDay).all()
print('[stat.reacted] %d reactions total' % len(all_reactions))
for reaction in all_reactions:
shout = reaction.shout
topics = session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout).all()
kind = reaction.kind
self.reacted['shouts'][shout] = self.reacted['shouts'].get(shout, [])
self.reacted['shouts'][shout].append(reaction)
self.rating['shouts'][shout] = self.rating['shouts'].get(shout, 0) + kind_to_rate(kind)
for t in topics:
self.reacted['topics'][t] = self.reacted['topics'].get(t, [])
self.reacted['topics'][t].append(reaction)
self.rating['topics'][t] = self.rating['topics'].get(t, 0) + kind_to_rate(kind) # rating
if reaction.replyTo:
self.reacted['reactions'][reaction.replyTo] = self.reacted['reactions'].get(reaction.replyTo, [])
self.reacted['reactions'][reaction.replyTo].append(reaction)
self.rating['reactions'][reaction.replyTo] = self.rating['reactions'].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
ttt = self.reacted['topics'].values()
print('[stat.reacted] %d topics reacted' % len(ttt))
print('[stat.reacted] %d shouts reacted' % len(self.reacted['shouts']))
print('[stat.reacted] %d reactions reacted' % len(self.reacted['reactions']))
@staticmethod
async def increment(reaction):
self = ReactedStorage
async with self.lock:
with local_session() as session:
r = {
"day": datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0
),
"reaction": reaction.id,
"kind": reaction.kind,
"shout": reaction.shout,
}
if reaction.replyTo:
r["replyTo"] = reaction.replyTo
if reaction.body:
r["comment"] = True
reaction = ReactedByDay.create(**r)
self.reacted["shouts"][reaction.shout] = self.reacted["shouts"].get(
reaction.shout, []
)
self.reacted["shouts"][reaction.shout].append(reaction)
if reaction.replyTo:
self.reacted["reaction"][reaction.replyTo] = self.reacted[
"reactions"
].get(reaction.shout, [])
self.reacted["reaction"][reaction.replyTo].append(reaction)
self.rating["reactions"][reaction.replyTo] = self.rating[
"reactions"
].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
else:
self.rating["shouts"][reaction.replyTo] = self.rating["shouts"].get(
reaction.shout, 0
) + kind_to_rate(reaction.kind)
@staticmethod
def init(session):
self = ReactedStorage
all_reactions = session.query(ReactedByDay).all()
print("[stat.reacted] %d reactions total" % len(all_reactions))
for reaction in all_reactions:
shout = reaction.shout
topics = (
session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout).all()
)
kind = reaction.kind
self.reacted["shouts"][shout] = self.reacted["shouts"].get(shout, [])
self.reacted["shouts"][shout].append(reaction)
self.rating["shouts"][shout] = self.rating["shouts"].get(
shout, 0
) + kind_to_rate(kind)
for t in topics:
self.reacted["topics"][t] = self.reacted["topics"].get(t, [])
self.reacted["topics"][t].append(reaction)
self.rating["topics"][t] = self.rating["topics"].get(
t, 0
) + kind_to_rate(
kind
) # rating
if reaction.replyTo:
self.reacted["reactions"][reaction.replyTo] = self.reacted[
"reactions"
].get(reaction.replyTo, [])
self.reacted["reactions"][reaction.replyTo].append(reaction)
self.rating["reactions"][reaction.replyTo] = self.rating[
"reactions"
].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
ttt = self.reacted["topics"].values()
print("[stat.reacted] %d topics reacted" % len(ttt))
print("[stat.reacted] %d shouts reacted" % len(self.reacted["shouts"]))
print("[stat.reacted] %d reactions reacted" % len(self.reacted["reactions"]))

View File

@ -5,81 +5,84 @@ from services.stat.viewed import ViewedStorage
from services.zine.shoutauthor import ShoutAuthorStorage
from orm.topic import ShoutTopic, TopicFollower
from typing import Dict
class TopicStat:
shouts_by_topic = {}
authors_by_topic = {}
followers_by_topic = {}
lock = asyncio.Lock()
period = 30*60 #sec
shouts_by_topic = {}
authors_by_topic = {}
followers_by_topic = {}
lock = asyncio.Lock()
period = 30 * 60 # sec
@staticmethod
async def load_stat(session):
self = TopicStat
self.shouts_by_topic = {}
self.authors_by_topic = {}
shout_topics = session.query(ShoutTopic).all()
for shout_topic in shout_topics:
topic = shout_topic.topic
shout = shout_topic.shout
if topic in self.shouts_by_topic:
self.shouts_by_topic[topic].append(shout)
else:
self.shouts_by_topic[topic] = [shout, ]
@staticmethod
async def load_stat(session):
self = TopicStat
self.shouts_by_topic = {}
self.authors_by_topic = {}
shout_topics = session.query(ShoutTopic).all()
for shout_topic in shout_topics:
topic = shout_topic.topic
shout = shout_topic.shout
if topic in self.shouts_by_topic:
self.shouts_by_topic[topic].append(shout)
else:
self.shouts_by_topic[topic] = [
shout,
]
authors = await ShoutAuthorStorage.get_authors(shout)
if topic in self.authors_by_topic:
self.authors_by_topic[topic].update(authors)
else:
self.authors_by_topic[topic] = set(authors)
authors = await ShoutAuthorStorage.get_authors(shout)
if topic in self.authors_by_topic:
self.authors_by_topic[topic].update(authors)
else:
self.authors_by_topic[topic] = set(authors)
print('[stat.topics] authors sorted')
print('[stat.topics] shouts sorted')
self.followers_by_topic = {}
followings = session.query(TopicFollower)
for flw in followings:
topic = flw.topic
user = flw.follower
if topic in self.followers_by_topic:
self.followers_by_topic[topic].append(user)
else:
self.followers_by_topic[topic] = [user]
print('[stat.topics] followers sorted')
print("[stat.topics] authors sorted")
print("[stat.topics] shouts sorted")
@staticmethod
async def get_shouts(topic):
self = TopicStat
async with self.lock:
return self.shouts_by_topic.get(topic, [])
self.followers_by_topic = {}
followings = session.query(TopicFollower)
for flw in followings:
topic = flw.topic
user = flw.follower
if topic in self.followers_by_topic:
self.followers_by_topic[topic].append(user)
else:
self.followers_by_topic[topic] = [user]
print("[stat.topics] followers sorted")
@staticmethod
async def get_stat(topic):
self = TopicStat
async with self.lock:
shouts = self.shouts_by_topic.get(topic, [])
followers = self.followers_by_topic.get(topic, [])
authors = self.authors_by_topic.get(topic, [])
@staticmethod
async def get_shouts(topic):
self = TopicStat
async with self.lock:
return self.shouts_by_topic.get(topic, [])
return {
"shouts" : len(shouts),
"authors" : len(authors),
"followers" : len(followers),
"viewed": await ViewedStorage.get_topic(topic),
"reacted" : len(await ReactedStorage.get_topic(topic)),
"commented": len(await ReactedStorage.get_topic_comments(topic)),
"rating" : await ReactedStorage.get_topic_rating(topic),
}
@staticmethod
async def get_stat(topic):
self = TopicStat
async with self.lock:
shouts = self.shouts_by_topic.get(topic, [])
followers = self.followers_by_topic.get(topic, [])
authors = self.authors_by_topic.get(topic, [])
@staticmethod
async def worker():
self = TopicStat
while True:
try:
with local_session() as session:
async with self.lock:
await self.load_stat(session)
print("[stat.topics] periodical update")
except Exception as err:
print("[stat.topics] errror: %s" % (err))
await asyncio.sleep(self.period)
return {
"shouts": len(shouts),
"authors": len(authors),
"followers": len(followers),
"viewed": await ViewedStorage.get_topic(topic),
"reacted": len(await ReactedStorage.get_topic(topic)),
"commented": len(await ReactedStorage.get_topic_comments(topic)),
"rating": await ReactedStorage.get_topic_rating(topic),
}
@staticmethod
async def worker():
self = TopicStat
while True:
try:
with local_session() as session:
async with self.lock:
await self.load_stat(session)
print("[stat.topics] periodical update")
except Exception as err:
print("[stat.topics] errror: %s" % (err))
await asyncio.sleep(self.period)

View File

@ -7,109 +7,112 @@ from orm.topic import ShoutTopic
class ViewedByDay(Base):
__tablename__ = "viewed_by_day"
__tablename__ = "viewed_by_day"
id = None
shout = Column(ForeignKey('shout.slug'), primary_key=True)
day = Column(DateTime, primary_key=True, default=datetime.now)
value = Column(Integer)
id = None
shout = Column(ForeignKey("shout.slug"), primary_key=True)
day = Column(DateTime, primary_key=True, default=datetime.now)
value = Column(Integer)
class ViewedStorage:
viewed = {
'shouts': {},
'topics': {},
'reactions': {}
}
this_day_views = {}
to_flush = []
period = 30*60 # sec
lock = asyncio.Lock()
viewed = {"shouts": {}, "topics": {}, "reactions": {}}
this_day_views = {}
to_flush = []
period = 30 * 60 # sec
lock = asyncio.Lock()
@staticmethod
def init(session):
self = ViewedStorage
views = session.query(ViewedByDay).all()
@staticmethod
def init(session):
self = ViewedStorage
views = session.query(ViewedByDay).all()
for view in views:
shout = view.shout
topics = session.query(ShoutTopic.topic).filter(ShoutTopic.shout == shout).all()
value = view.value
if shout:
old_value = self.viewed['shouts'].get(shout, 0)
self.viewed['shouts'][shout] = old_value + value
for t in topics:
old_topic_value = self.viewed['topics'].get(t, 0)
self.viewed['topics'][t] = old_topic_value + value
if not shout in self.this_day_views:
self.this_day_views[shout] = view
this_day_view = self.this_day_views[shout]
if this_day_view.day < view.day:
self.this_day_views[shout] = view
print('[stat.viewed] %d shouts viewed' % len(views))
for view in views:
shout = view.shout
topics = (
session.query(ShoutTopic.topic).filter(ShoutTopic.shout == shout).all()
)
value = view.value
if shout:
old_value = self.viewed["shouts"].get(shout, 0)
self.viewed["shouts"][shout] = old_value + value
for t in topics:
old_topic_value = self.viewed["topics"].get(t, 0)
self.viewed["topics"][t] = old_topic_value + value
if not shout in self.this_day_views:
self.this_day_views[shout] = view
this_day_view = self.this_day_views[shout]
if this_day_view.day < view.day:
self.this_day_views[shout] = view
@staticmethod
async def get_shout(shout_slug):
self = ViewedStorage
async with self.lock:
return self.viewed['shouts'].get(shout_slug, 0)
print("[stat.viewed] %d shouts viewed" % len(views))
@staticmethod
async def get_topic(topic_slug):
self = ViewedStorage
async with self.lock:
return self.viewed['topics'].get(topic_slug, 0)
@staticmethod
async def get_shout(shout_slug):
self = ViewedStorage
async with self.lock:
return self.viewed["shouts"].get(shout_slug, 0)
@staticmethod
async def get_reaction(reaction_id):
self = ViewedStorage
async with self.lock:
return self.viewed['reactions'].get(reaction_id, 0)
@staticmethod
async def get_topic(topic_slug):
self = ViewedStorage
async with self.lock:
return self.viewed["topics"].get(topic_slug, 0)
@staticmethod
async def increment(shout_slug):
self = ViewedStorage
async with self.lock:
this_day_view = self.this_day_views.get(shout_slug)
day_start = datetime.now().replace(hour=0, minute=0, second=0)
if not this_day_view or this_day_view.day < day_start:
if this_day_view and getattr(this_day_view, "modified", False):
self.to_flush.append(this_day_view)
this_day_view = ViewedByDay.create(shout=shout_slug, value=1)
self.this_day_views[shout_slug] = this_day_view
else:
this_day_view.value = this_day_view.value + 1
this_day_view.modified = True
self.viewed['shouts'][shout_slug] = self.viewed['shouts'].get(shout_slug, 0) + 1
with local_session() as session:
topics = session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout_slug).all()
for t in topics:
self.viewed['topics'][t] = self.viewed['topics'].get(t, 0) + 1
flag_modified(this_day_view, "value")
@staticmethod
async def get_reaction(reaction_id):
self = ViewedStorage
async with self.lock:
return self.viewed["reactions"].get(reaction_id, 0)
@staticmethod
async def flush_changes(session):
self = ViewedStorage
async with self.lock:
for view in self.this_day_views.values():
if getattr(view, "modified", False):
session.add(view)
flag_modified(view, "value")
view.modified = False
for view in self.to_flush:
session.add(view)
self.to_flush.clear()
session.commit()
@staticmethod
async def increment(shout_slug):
self = ViewedStorage
async with self.lock:
this_day_view = self.this_day_views.get(shout_slug)
day_start = datetime.now().replace(hour=0, minute=0, second=0)
if not this_day_view or this_day_view.day < day_start:
if this_day_view and getattr(this_day_view, "modified", False):
self.to_flush.append(this_day_view)
this_day_view = ViewedByDay.create(shout=shout_slug, value=1)
self.this_day_views[shout_slug] = this_day_view
else:
this_day_view.value = this_day_view.value + 1
this_day_view.modified = True
self.viewed["shouts"][shout_slug] = (
self.viewed["shouts"].get(shout_slug, 0) + 1
)
with local_session() as session:
topics = (
session.query(ShoutTopic.topic)
.where(ShoutTopic.shout == shout_slug)
.all()
)
for t in topics:
self.viewed["topics"][t] = self.viewed["topics"].get(t, 0) + 1
flag_modified(this_day_view, "value")
@staticmethod
async def worker():
while True:
try:
with local_session() as session:
await ViewedStorage.flush_changes(session)
print("[stat.viewed] periodical flush")
except Exception as err:
print("[stat.viewed] errror: %s" % (err))
await asyncio.sleep(ViewedStorage.period)
@staticmethod
async def flush_changes(session):
self = ViewedStorage
async with self.lock:
for view in self.this_day_views.values():
if getattr(view, "modified", False):
session.add(view)
flag_modified(view, "value")
view.modified = False
for view in self.to_flush:
session.add(view)
self.to_flush.clear()
session.commit()
@staticmethod
async def worker():
while True:
try:
with local_session() as session:
await ViewedStorage.flush_changes(session)
print("[stat.viewed] periodical flush")
except Exception as err:
print("[stat.viewed] errror: %s" % (err))
await asyncio.sleep(ViewedStorage.period)

View File

@ -3,60 +3,67 @@ from pathlib import Path
import asyncio
from settings import SHOUTS_REPO
class GitTask:
''' every shout update use a new task '''
queue = asyncio.Queue()
"""every shout update use a new task"""
def __init__(self, input, username, user_email, comment):
self.slug = input["slug"]
self.shout_body = input["body"]
self.username = username
self.user_email = user_email
self.comment = comment
queue = asyncio.Queue()
GitTask.queue.put_nowait(self)
def init_repo(self):
repo_path = "%s" % (SHOUTS_REPO)
Path(repo_path).mkdir()
cmd = "cd %s && git init && " \
"git config user.name 'discours' && " \
"git config user.email 'discours@discours.io' && " \
"touch initial && git add initial && " \
"git commit -m 'init repo'" \
% (repo_path)
output = subprocess.check_output(cmd, shell=True)
print(output)
def __init__(self, input, username, user_email, comment):
self.slug = input["slug"]
self.shout_body = input["body"]
self.username = username
self.user_email = user_email
self.comment = comment
def execute(self):
repo_path = "%s" % (SHOUTS_REPO)
if not Path(repo_path).exists():
self.init_repo()
GitTask.queue.put_nowait(self)
#cmd = "cd %s && git checkout master" % (repo_path)
#output = subprocess.check_output(cmd, shell=True)
#print(output)
def init_repo(self):
repo_path = "%s" % (SHOUTS_REPO)
shout_filename = "%s.mdx" % (self.slug)
shout_full_filename = "%s/%s" % (repo_path, shout_filename)
with open(shout_full_filename, mode='w', encoding='utf-8') as shout_file:
shout_file.write(bytes(self.shout_body,'utf-8').decode('utf-8','ignore'))
Path(repo_path).mkdir()
author = "%s <%s>" % (self.username, self.user_email)
cmd = "cd %s && git add %s && git commit -m '%s' --author='%s'" % \
(repo_path, shout_filename, self.comment, author)
output = subprocess.check_output(cmd, shell=True)
print(output)
@staticmethod
async def git_task_worker():
print("[service.git] starting task worker")
while True:
task = await GitTask.queue.get()
try:
task.execute()
except Exception as err:
print("[service.git] worker error: %s" % (err))
cmd = (
"cd %s && git init && "
"git config user.name 'discours' && "
"git config user.email 'discours@discours.io' && "
"touch initial && git add initial && "
"git commit -m 'init repo'" % (repo_path)
)
output = subprocess.check_output(cmd, shell=True)
print(output)
def execute(self):
repo_path = "%s" % (SHOUTS_REPO)
if not Path(repo_path).exists():
self.init_repo()
# cmd = "cd %s && git checkout master" % (repo_path)
# output = subprocess.check_output(cmd, shell=True)
# print(output)
shout_filename = "%s.mdx" % (self.slug)
shout_full_filename = "%s/%s" % (repo_path, shout_filename)
with open(shout_full_filename, mode="w", encoding="utf-8") as shout_file:
shout_file.write(bytes(self.shout_body, "utf-8").decode("utf-8", "ignore"))
author = "%s <%s>" % (self.username, self.user_email)
cmd = "cd %s && git add %s && git commit -m '%s' --author='%s'" % (
repo_path,
shout_filename,
self.comment,
author,
)
output = subprocess.check_output(cmd, shell=True)
print(output)
@staticmethod
async def git_task_worker():
print("[service.git] starting task worker")
while True:
task = await GitTask.queue.get()
try:
task.execute()
except Exception as err:
print("[service.git] worker error: %s" % (err))

View File

@ -1,47 +1,46 @@
import asyncio
from base.orm import local_session
from orm.shout import ShoutAuthor
class ShoutAuthorStorage:
authors_by_shout = {}
lock = asyncio.Lock()
period = 30*60 #sec
authors_by_shout = {}
lock = asyncio.Lock()
period = 30 * 60 # sec
@staticmethod
async def load(session):
self = ShoutAuthorStorage
sas = session.query(ShoutAuthor).all()
for sa in sas:
self.authors_by_shout[sa.shout] = self.authors_by_shout.get(sa.shout, [])
self.authors_by_shout[sa.shout].append([sa.user, sa.caption])
print('[zine.authors] %d shouts preprocessed' % len(self.authors_by_shout))
@staticmethod
async def load(session):
self = ShoutAuthorStorage
sas = session.query(ShoutAuthor).all()
for sa in sas:
self.authors_by_shout[sa.shout] = self.authors_by_shout.get(sa.shout, [])
self.authors_by_shout[sa.shout].append([sa.user, sa.caption])
print("[zine.authors] %d shouts preprocessed" % len(self.authors_by_shout))
@staticmethod
async def get_authors(shout):
self = ShoutAuthorStorage
async with self.lock:
return self.authors_by_shout.get(shout, [])
@staticmethod
async def get_authors(shout):
self = ShoutAuthorStorage
async with self.lock:
return self.authors_by_shout.get(shout, [])
@staticmethod
async def get_author_caption(shout, author):
self = ShoutAuthorStorage
async with self.lock:
for a in self.authors_by_shout.get(shout, []):
if author in a:
return a[1]
return { "error": "author caption not found" }
@staticmethod
async def get_author_caption(shout, author):
self = ShoutAuthorStorage
async with self.lock:
for a in self.authors_by_shout.get(shout, []):
if author in a:
return a[1]
return {"error": "author caption not found"}
@staticmethod
async def worker():
self = ShoutAuthorStorage
while True:
try:
with local_session() as session:
async with self.lock:
await self.load(session)
print("[zine.authors] state updated")
except Exception as err:
print("[zine.authors] errror: %s" % (err))
await asyncio.sleep(self.period)
@staticmethod
async def worker():
self = ShoutAuthorStorage
while True:
try:
with local_session() as session:
async with self.lock:
await self.load(session)
print("[zine.authors] state updated")
except Exception as err:
print("[zine.authors] errror: %s" % (err))
await asyncio.sleep(self.period)

View File

@ -1,4 +1,3 @@
import asyncio
from datetime import datetime, timedelta
from sqlalchemy import and_, desc, func, select
@ -11,148 +10,159 @@ from services.stat.viewed import ViewedByDay
class ShoutsCache:
limit = 200
period = 60*60 #1 hour
lock = asyncio.Lock()
limit = 200
period = 60 * 60 # 1 hour
lock = asyncio.Lock()
@staticmethod
async def prepare_recent_published():
with local_session() as session:
stmt = select(Shout).\
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
where(Shout.publishedAt != None).\
order_by(desc("publishedAt")).\
limit(ShoutsCache.limit)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
async with ShoutsCache.lock:
ShoutsCache.recent_published = shouts
print("[zine.cache] %d recently published shouts " % len(shouts))
@staticmethod
async def prepare_recent_published():
with local_session() as session:
stmt = (
select(Shout)
.options(selectinload(Shout.authors), selectinload(Shout.topics))
.where(Shout.publishedAt != None)
.order_by(desc("publishedAt"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
async with ShoutsCache.lock:
ShoutsCache.recent_published = shouts
print("[zine.cache] %d recently published shouts " % len(shouts))
@staticmethod
async def prepare_recent_all():
with local_session() as session:
stmt = select(Shout).\
options(
selectinload(Shout.authors),
selectinload(Shout.topics)
).\
order_by(desc("createdAt")).\
limit(ShoutsCache.limit)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
# shout.topics = [t.slug for t in shout.topics]
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
async with ShoutsCache.lock:
ShoutsCache.recent_all = shouts
print("[zine.cache] %d recently created shouts " % len(shouts))
@staticmethod
async def prepare_recent_all():
with local_session() as session:
stmt = (
select(Shout)
.options(selectinload(Shout.authors), selectinload(Shout.topics))
.order_by(desc("createdAt"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
# shout.topics = [t.slug for t in shout.topics]
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
async with ShoutsCache.lock:
ShoutsCache.recent_all = shouts
print("[zine.cache] %d recently created shouts " % len(shouts))
@staticmethod
async def prepare_recent_reacted():
with local_session() as session:
stmt = select(Shout, func.max(Reaction.createdAt).label("reactionCreatedAt")).\
options(
selectinload(Shout.authors),
selectinload(Shout.topics),
).\
join(Reaction, Reaction.shout == Shout.slug).\
where(and_(Shout.publishedAt != None, Reaction.deletedAt == None)).\
group_by(Shout.slug).\
order_by(desc("reactionCreatedAt")).\
limit(ShoutsCache.limit)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
# shout.topics = [t.slug for t in shout.topics]
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
async with ShoutsCache.lock:
ShoutsCache.recent_reacted = shouts
print("[zine.cache] %d recently reacted shouts " % len(shouts))
@staticmethod
async def prepare_recent_reacted():
with local_session() as session:
stmt = (
select(Shout, func.max(Reaction.createdAt).label("reactionCreatedAt"))
.options(
selectinload(Shout.authors),
selectinload(Shout.topics),
)
.join(Reaction, Reaction.shout == Shout.slug)
.where(and_(Shout.publishedAt != None, Reaction.deletedAt == None))
.group_by(Shout.slug)
.order_by(desc("reactionCreatedAt"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
# shout.topics = [t.slug for t in shout.topics]
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
async with ShoutsCache.lock:
ShoutsCache.recent_reacted = shouts
print("[zine.cache] %d recently reacted shouts " % len(shouts))
@staticmethod
async def prepare_top_overall():
with local_session() as session:
# with reacted times counter
stmt = (
select(Shout, func.count(Reaction.id).label("reacted"))
.options(
selectinload(Shout.authors),
selectinload(Shout.topics),
selectinload(Shout.reactions),
)
.join(Reaction)
.where(and_(Shout.publishedAt != None, Reaction.deletedAt == None))
.group_by(Shout.slug)
.order_by(desc("reacted"))
.limit(ShoutsCache.limit)
)
shouts = []
# with rating synthetic counter
for row in session.execute(stmt):
shout = row.Shout
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
shouts.sort(key=lambda shout: shout.rating, reverse=True)
async with ShoutsCache.lock:
print("[zine.cache] %d top shouts " % len(shouts))
ShoutsCache.top_overall = shouts
@staticmethod
async def prepare_top_overall():
with local_session() as session:
# with reacted times counter
stmt = select(Shout,
func.count(Reaction.id).label("reacted")).\
options(selectinload(Shout.authors), selectinload(Shout.topics), selectinload(Shout.reactions)).\
join(Reaction).\
where(and_(Shout.publishedAt != None, Reaction.deletedAt == None)).\
group_by(Shout.slug).\
order_by(desc("reacted")).\
limit(ShoutsCache.limit)
shouts = []
# with rating synthetic counter
for row in session.execute(stmt):
shout = row.Shout
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
shouts.sort(key = lambda shout: shout.rating, reverse = True)
async with ShoutsCache.lock:
print("[zine.cache] %d top shouts " % len(shouts))
ShoutsCache.top_overall = shouts
@staticmethod
async def prepare_top_month():
month_ago = datetime.now() - timedelta(days=30)
with local_session() as session:
stmt = (
select(Shout, func.count(Reaction.id).label("reacted"))
.options(selectinload(Shout.authors), selectinload(Shout.topics))
.join(Reaction)
.where(and_(Shout.createdAt > month_ago, Shout.publishedAt != None))
.group_by(Shout.slug)
.order_by(desc("reacted"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
shouts.sort(key=lambda shout: shout.rating, reverse=True)
async with ShoutsCache.lock:
print("[zine.cache] %d top month shouts " % len(shouts))
ShoutsCache.top_month = shouts
@staticmethod
async def prepare_top_month():
month_ago = datetime.now() - timedelta(days = 30)
with local_session() as session:
stmt = select(Shout, func.count(Reaction.id).label("reacted")).\
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
join(Reaction).\
where(and_(Shout.createdAt > month_ago, Shout.publishedAt != None)).\
group_by(Shout.slug).\
order_by(desc("reacted")).\
limit(ShoutsCache.limit)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
shouts.sort(key = lambda shout: shout.rating, reverse = True)
async with ShoutsCache.lock:
print("[zine.cache] %d top month shouts " % len(shouts))
ShoutsCache.top_month = shouts
@staticmethod
async def prepare_top_viewed():
month_ago = datetime.now() - timedelta(days=30)
with local_session() as session:
stmt = (
select(Shout, func.sum(ViewedByDay.value).label("viewed"))
.options(selectinload(Shout.authors), selectinload(Shout.topics))
.join(ViewedByDay)
.where(and_(ViewedByDay.day > month_ago, Shout.publishedAt != None))
.group_by(Shout.slug)
.order_by(desc("viewed"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
# shouts.sort(key = lambda shout: shout.viewed, reverse = True)
async with ShoutsCache.lock:
print("[zine.cache] %d top viewed shouts " % len(shouts))
ShoutsCache.top_viewed = shouts
@staticmethod
async def prepare_top_viewed():
month_ago = datetime.now() - timedelta(days = 30)
with local_session() as session:
stmt = select(Shout, func.sum(ViewedByDay.value).label("viewed")).\
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
join(ViewedByDay).\
where(and_(ViewedByDay.day > month_ago, Shout.publishedAt != None)).\
group_by(Shout.slug).\
order_by(desc("viewed")).\
limit(ShoutsCache.limit)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
shouts.append(shout)
# shouts.sort(key = lambda shout: shout.viewed, reverse = True)
async with ShoutsCache.lock:
print("[zine.cache] %d top viewed shouts " % len(shouts))
ShoutsCache.top_viewed = shouts
@staticmethod
async def worker():
while True:
try:
await ShoutsCache.prepare_top_month()
await ShoutsCache.prepare_top_overall()
await ShoutsCache.prepare_top_viewed()
await ShoutsCache.prepare_recent_published()
await ShoutsCache.prepare_recent_all()
await ShoutsCache.prepare_recent_reacted()
print("[zine.cache] periodical update")
except Exception as err:
print("[zine.cache] error: %s" % (err))
raise err
await asyncio.sleep(ShoutsCache.period)
@staticmethod
async def worker():
while True:
try:
await ShoutsCache.prepare_top_month()
await ShoutsCache.prepare_top_overall()
await ShoutsCache.prepare_top_viewed()
await ShoutsCache.prepare_recent_published()
await ShoutsCache.prepare_recent_all()
await ShoutsCache.prepare_recent_reacted()
print("[zine.cache] periodical update")
except Exception as err:
print("[zine.cache] error: %s" % (err))
raise err
await asyncio.sleep(ShoutsCache.period)

View File

@ -3,56 +3,58 @@ from orm.topic import Topic
class TopicStorage:
topics = {}
lock = asyncio.Lock()
topics = {}
lock = asyncio.Lock()
@staticmethod
def init(session):
self = TopicStorage
topics = session.query(Topic)
self.topics = dict([(topic.slug, topic) for topic in topics])
for topic in self.topics.values():
self.load_parents(topic)
print('[zine.topics] %d precached' % len(self.topics.keys()))
@staticmethod
def init(session):
self = TopicStorage
topics = session.query(Topic)
self.topics = dict([(topic.slug, topic) for topic in topics])
for topic in self.topics.values():
self.load_parents(topic)
@staticmethod
def load_parents(topic):
self = TopicStorage
parents = []
for parent in self.topics.values():
if topic.slug in parent.children:
parents.append(parent.slug)
topic.parents = parents
return topic
print("[zine.topics] %d precached" % len(self.topics.keys()))
@staticmethod
async def get_topics_all(page, size):
end = page * size
start = end - size
self = TopicStorage
async with self.lock:
return list(self.topics.values())[start:end]
@staticmethod
def load_parents(topic):
self = TopicStorage
parents = []
for parent in self.topics.values():
if topic.slug in parent.children:
parents.append(parent.slug)
topic.parents = parents
return topic
@staticmethod
async def get_topics_by_slugs(slugs):
self = TopicStorage
async with self.lock:
if not slugs:
return self.topics.values()
topics = filter(lambda topic: topic.slug in slugs, self.topics.values())
return list(topics)
@staticmethod
async def get_topics_all(page, size):
end = page * size
start = end - size
self = TopicStorage
async with self.lock:
return list(self.topics.values())[start:end]
@staticmethod
async def get_topics_by_community(community):
self = TopicStorage
async with self.lock:
topics = filter(lambda topic: topic.community == community, self.topics.values())
return list(topics)
@staticmethod
async def get_topics_by_slugs(slugs):
self = TopicStorage
async with self.lock:
if not slugs:
return self.topics.values()
topics = filter(lambda topic: topic.slug in slugs, self.topics.values())
return list(topics)
@staticmethod
async def add_topic(topic):
self = TopicStorage
async with self.lock:
self.topics[topic.slug] = topic
self.load_parents(topic)
@staticmethod
async def get_topics_by_community(community):
self = TopicStorage
async with self.lock:
topics = filter(
lambda topic: topic.community == community, self.topics.values()
)
return list(topics)
@staticmethod
async def add_topic(topic):
self = TopicStorage
async with self.lock:
self.topics[topic.slug] = topic
self.load_parents(topic)

View File

@ -1,4 +1,3 @@
from pathlib import Path
from os import environ
PORT = 8080
@ -8,9 +7,16 @@ BACKEND_URL = environ.get("BACKEND_URL") or "https://localhost:8080"
OAUTH_CALLBACK_URL = environ.get("OAUTH_CALLBACK_URL") or "https://localhost:8080"
RESET_PWD_URL = environ.get("RESET_PWD_URL") or "https://localhost:8080/reset_pwd"
CONFIRM_EMAIL_URL = environ.get("CONFIRM_EMAIL_URL") or "https://new.discours.io"
ERROR_URL_ON_FRONTEND = environ.get("ERROR_URL_ON_FRONTEND") or "https://new.discours.io"
ERROR_URL_ON_FRONTEND = (
environ.get("ERROR_URL_ON_FRONTEND") or "https://new.discours.io"
)
DB_URL = environ.get("DATABASE_URL") or environ.get("DB_URL") or "postgresql://postgres@localhost:5432/discoursio" or "sqlite:///db.sqlite3"
DB_URL = (
environ.get("DATABASE_URL")
or environ.get("DB_URL")
or "postgresql://postgres@localhost:5432/discoursio"
or "sqlite:///db.sqlite3"
)
JWT_ALGORITHM = "HS256"
JWT_SECRET_KEY = "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key"
JWT_LIFE_SPAN = 24 * 60 * 60 # seconds
@ -24,9 +30,9 @@ MAILGUN_DOMAIN = environ.get("MAILGUN_DOMAIN")
OAUTH_PROVIDERS = ("GITHUB", "FACEBOOK", "GOOGLE")
OAUTH_CLIENTS = {}
for provider in OAUTH_PROVIDERS:
OAUTH_CLIENTS[provider] = {
"id" : environ.get(provider + "_OAUTH_ID"),
"key" : environ.get(provider + "_OAUTH_KEY")
}
OAUTH_CLIENTS[provider] = {
"id": environ.get(provider + "_OAUTH_ID"),
"key": environ.get(provider + "_OAUTH_KEY"),
}
SHOUTS_REPO = "content"