format and lint orm
This commit is contained in:
parent
85892a88bc
commit
a89a44f660
|
@ -2,7 +2,7 @@ root = true
|
||||||
|
|
||||||
[*]
|
[*]
|
||||||
indent_style = tabs
|
indent_style = tabs
|
||||||
indent_size = 1
|
indent_size = 2
|
||||||
end_of_line = lf
|
end_of_line = lf
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
trim_trailing_whitespace=true
|
trim_trailing_whitespace=true
|
||||||
|
|
5
.flake8
Normal file
5
.flake8
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
[flake8]
|
||||||
|
ignore = D203
|
||||||
|
exclude = .git,__pycache__
|
||||||
|
max-complexity = 10
|
||||||
|
max-line-length = 108
|
|
@ -16,119 +16,126 @@ from settings import JWT_AUTH_HEADER, EMAIL_TOKEN_LIFE_SPAN
|
||||||
|
|
||||||
|
|
||||||
class _Authenticate:
|
class _Authenticate:
|
||||||
@classmethod
|
@classmethod
|
||||||
async def verify(cls, token: str):
|
async def verify(cls, token: str):
|
||||||
"""
|
"""
|
||||||
Rules for a token to be valid.
|
Rules for a token to be valid.
|
||||||
1. token format is legal &&
|
1. token format is legal &&
|
||||||
token exists in redis database &&
|
token exists in redis database &&
|
||||||
token is not expired
|
token is not expired
|
||||||
2. token format is legal &&
|
2. token format is legal &&
|
||||||
token exists in redis database &&
|
token exists in redis database &&
|
||||||
token is expired &&
|
token is expired &&
|
||||||
token is of specified type
|
token is of specified type
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
payload = JWTCodec.decode(token)
|
payload = JWTCodec.decode(token)
|
||||||
except ExpiredSignatureError:
|
except ExpiredSignatureError:
|
||||||
payload = JWTCodec.decode(token, verify_exp=False)
|
payload = JWTCodec.decode(token, verify_exp=False)
|
||||||
if not await cls.exists(payload.user_id, token):
|
if not await cls.exists(payload.user_id, token):
|
||||||
raise InvalidToken("Login expired, please login again")
|
raise InvalidToken("Login expired, please login again")
|
||||||
if payload.device == "mobile": # noqa
|
if payload.device == "mobile": # noqa
|
||||||
"we cat set mobile token to be valid forever"
|
"we cat set mobile token to be valid forever"
|
||||||
return payload
|
return payload
|
||||||
except DecodeError as e:
|
except DecodeError as e:
|
||||||
raise InvalidToken("token format error") from e
|
raise InvalidToken("token format error") from e
|
||||||
else:
|
else:
|
||||||
if not await cls.exists(payload.user_id, token):
|
if not await cls.exists(payload.user_id, token):
|
||||||
raise InvalidToken("Login expired, please login again")
|
raise InvalidToken("Login expired, please login again")
|
||||||
return payload
|
return payload
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def exists(cls, user_id, token):
|
async def exists(cls, user_id, token):
|
||||||
return await TokenStorage.exist(f"{user_id}-{token}")
|
return await TokenStorage.exist(f"{user_id}-{token}")
|
||||||
|
|
||||||
|
|
||||||
class JWTAuthenticate(AuthenticationBackend):
|
class JWTAuthenticate(AuthenticationBackend):
|
||||||
async def authenticate(
|
async def authenticate(
|
||||||
self, request: HTTPConnection
|
self, request: HTTPConnection
|
||||||
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
|
) -> Optional[Tuple[AuthCredentials, AuthUser]]:
|
||||||
if JWT_AUTH_HEADER not in request.headers:
|
if JWT_AUTH_HEADER not in request.headers:
|
||||||
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
||||||
|
|
||||||
token = request.headers[JWT_AUTH_HEADER]
|
token = request.headers[JWT_AUTH_HEADER]
|
||||||
try:
|
try:
|
||||||
payload = await _Authenticate.verify(token)
|
payload = await _Authenticate.verify(token)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
return AuthCredentials(scopes=[], error_message=str(exc)), AuthUser(user_id=None)
|
return AuthCredentials(scopes=[], error_message=str(exc)), AuthUser(
|
||||||
|
user_id=None
|
||||||
|
)
|
||||||
|
|
||||||
if payload is None:
|
if payload is None:
|
||||||
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
||||||
|
|
||||||
if not payload.device in ("pc", "mobile"):
|
if not payload.device in ("pc", "mobile"):
|
||||||
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
||||||
|
|
||||||
user = await UserStorage.get_user(payload.user_id)
|
user = await UserStorage.get_user(payload.user_id)
|
||||||
if not user:
|
if not user:
|
||||||
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
||||||
|
|
||||||
|
scopes = await user.get_permission()
|
||||||
|
return (
|
||||||
|
AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
|
||||||
|
user,
|
||||||
|
)
|
||||||
|
|
||||||
scopes = await user.get_permission()
|
|
||||||
return AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True), user
|
|
||||||
|
|
||||||
class EmailAuthenticate:
|
class EmailAuthenticate:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_email_token(user):
|
async def get_email_token(user):
|
||||||
token = await Authorize.authorize(
|
token = await Authorize.authorize(
|
||||||
user,
|
user, device="email", life_span=EMAIL_TOKEN_LIFE_SPAN
|
||||||
device="email",
|
)
|
||||||
life_span=EMAIL_TOKEN_LIFE_SPAN
|
return token
|
||||||
)
|
|
||||||
return token
|
@staticmethod
|
||||||
|
async def authenticate(token):
|
||||||
|
payload = await _Authenticate.verify(token)
|
||||||
|
if payload is None:
|
||||||
|
raise InvalidToken("invalid token")
|
||||||
|
if payload.device != "email":
|
||||||
|
raise InvalidToken("invalid token")
|
||||||
|
with local_session() as session:
|
||||||
|
user = session.query(User).filter_by(id=payload.user_id).first()
|
||||||
|
if not user:
|
||||||
|
raise Exception("user not exist")
|
||||||
|
if not user.emailConfirmed:
|
||||||
|
user.emailConfirmed = True
|
||||||
|
session.commit()
|
||||||
|
auth_token = await Authorize.authorize(user)
|
||||||
|
return (auth_token, user)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def authenticate(token):
|
|
||||||
payload = await _Authenticate.verify(token)
|
|
||||||
if payload is None:
|
|
||||||
raise InvalidToken("invalid token")
|
|
||||||
if payload.device != "email":
|
|
||||||
raise InvalidToken("invalid token")
|
|
||||||
with local_session() as session:
|
|
||||||
user = session.query(User).filter_by(id=payload.user_id).first()
|
|
||||||
if not user:
|
|
||||||
raise Exception("user not exist")
|
|
||||||
if not user.emailConfirmed:
|
|
||||||
user.emailConfirmed = True
|
|
||||||
session.commit()
|
|
||||||
auth_token = await Authorize.authorize(user)
|
|
||||||
return (auth_token, user)
|
|
||||||
|
|
||||||
class ResetPassword:
|
class ResetPassword:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_reset_token(user):
|
async def get_reset_token(user):
|
||||||
exp = datetime.utcnow() + timedelta(seconds=EMAIL_TOKEN_LIFE_SPAN)
|
exp = datetime.utcnow() + timedelta(seconds=EMAIL_TOKEN_LIFE_SPAN)
|
||||||
token = JWTCodec.encode(user, exp=exp, device="pc")
|
token = JWTCodec.encode(user, exp=exp, device="pc")
|
||||||
await TokenStorage.save(f"{user.id}-reset-{token}", EMAIL_TOKEN_LIFE_SPAN, True)
|
await TokenStorage.save(f"{user.id}-reset-{token}", EMAIL_TOKEN_LIFE_SPAN, True)
|
||||||
return token
|
return token
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def verify(token):
|
async def verify(token):
|
||||||
try:
|
try:
|
||||||
payload = JWTCodec.decode(token)
|
payload = JWTCodec.decode(token)
|
||||||
except ExpiredSignatureError:
|
except ExpiredSignatureError:
|
||||||
raise InvalidToken("Login expired, please login again")
|
raise InvalidToken("Login expired, please login again")
|
||||||
except DecodeError as e:
|
except DecodeError as e:
|
||||||
raise InvalidToken("token format error") from e
|
raise InvalidToken("token format error") from e
|
||||||
else:
|
else:
|
||||||
if not await TokenStorage.exist(f"{payload.user_id}-reset-{token}"):
|
if not await TokenStorage.exist(f"{payload.user_id}-reset-{token}"):
|
||||||
raise InvalidToken("Login expired, please login again")
|
raise InvalidToken("Login expired, please login again")
|
||||||
|
|
||||||
|
return payload.user_id
|
||||||
|
|
||||||
return payload.user_id
|
|
||||||
|
|
||||||
def login_required(func):
|
def login_required(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
||||||
auth: AuthCredentials = info.context["request"].auth
|
auth: AuthCredentials = info.context["request"].auth
|
||||||
if not auth.logged_in:
|
if not auth.logged_in:
|
||||||
return {"error" : auth.error_message or "Please login"}
|
return {"error": auth.error_message or "Please login"}
|
||||||
return await func(parent, info, *args, **kwargs)
|
return await func(parent, info, *args, **kwargs)
|
||||||
return wrap
|
|
||||||
|
return wrap
|
||||||
|
|
|
@ -5,38 +5,41 @@ from base.redis import redis
|
||||||
from settings import JWT_LIFE_SPAN
|
from settings import JWT_LIFE_SPAN
|
||||||
from auth.validations import User
|
from auth.validations import User
|
||||||
|
|
||||||
class TokenStorage:
|
|
||||||
@staticmethod
|
|
||||||
async def save(token_key, life_span, auto_delete=True):
|
|
||||||
await redis.execute("SET", token_key, "True")
|
|
||||||
if auto_delete:
|
|
||||||
expire_at = (datetime.now() + timedelta(seconds=life_span)).timestamp()
|
|
||||||
await redis.execute("EXPIREAT", token_key, int(expire_at))
|
|
||||||
|
|
||||||
@staticmethod
|
class TokenStorage:
|
||||||
async def exist(token_key):
|
@staticmethod
|
||||||
return await redis.execute("GET", token_key)
|
async def save(token_key, life_span, auto_delete=True):
|
||||||
|
await redis.execute("SET", token_key, "True")
|
||||||
|
if auto_delete:
|
||||||
|
expire_at = (datetime.now() + timedelta(seconds=life_span)).timestamp()
|
||||||
|
await redis.execute("EXPIREAT", token_key, int(expire_at))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def exist(token_key):
|
||||||
|
return await redis.execute("GET", token_key)
|
||||||
|
|
||||||
|
|
||||||
class Authorize:
|
class Authorize:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def authorize(user: User, device: str = "pc", life_span = JWT_LIFE_SPAN, auto_delete=True) -> str:
|
async def authorize(
|
||||||
exp = datetime.utcnow() + timedelta(seconds=life_span)
|
user: User, device: str = "pc", life_span=JWT_LIFE_SPAN, auto_delete=True
|
||||||
token = JWTCodec.encode(user, exp=exp, device=device)
|
) -> str:
|
||||||
await TokenStorage.save(f"{user.id}-{token}", life_span, auto_delete)
|
exp = datetime.utcnow() + timedelta(seconds=life_span)
|
||||||
return token
|
token = JWTCodec.encode(user, exp=exp, device=device)
|
||||||
|
await TokenStorage.save(f"{user.id}-{token}", life_span, auto_delete)
|
||||||
|
return token
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def revoke(token: str) -> bool:
|
async def revoke(token: str) -> bool:
|
||||||
try:
|
try:
|
||||||
payload = JWTCodec.decode(token)
|
payload = JWTCodec.decode(token)
|
||||||
except: # noqa
|
except: # noqa
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
await redis.execute("DEL", f"{payload.user_id}-{token}")
|
await redis.execute("DEL", f"{payload.user_id}-{token}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def revoke_all(user: User):
|
async def revoke_all(user: User):
|
||||||
tokens = await redis.execute("KEYS", f"{user.id}-*")
|
tokens = await redis.execute("KEYS", f"{user.id}-*")
|
||||||
await redis.execute("DEL", *tokens)
|
await redis.execute("DEL", *tokens)
|
||||||
|
|
106
auth/email.py
106
auth/email.py
|
@ -2,71 +2,83 @@ import requests
|
||||||
from starlette.responses import RedirectResponse
|
from starlette.responses import RedirectResponse
|
||||||
from auth.authenticate import EmailAuthenticate, ResetPassword
|
from auth.authenticate import EmailAuthenticate, ResetPassword
|
||||||
from base.orm import local_session
|
from base.orm import local_session
|
||||||
from settings import BACKEND_URL, MAILGUN_API_KEY, MAILGUN_DOMAIN, RESET_PWD_URL, \
|
from settings import (
|
||||||
CONFIRM_EMAIL_URL, ERROR_URL_ON_FRONTEND
|
BACKEND_URL,
|
||||||
|
MAILGUN_API_KEY,
|
||||||
|
MAILGUN_DOMAIN,
|
||||||
|
RESET_PWD_URL,
|
||||||
|
CONFIRM_EMAIL_URL,
|
||||||
|
ERROR_URL_ON_FRONTEND,
|
||||||
|
)
|
||||||
|
|
||||||
MAILGUN_API_URL = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN)
|
MAILGUN_API_URL = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN)
|
||||||
MAILGUN_FROM = "discours.io <noreply@%s>" % (MAILGUN_DOMAIN)
|
MAILGUN_FROM = "discours.io <noreply@%s>" % (MAILGUN_DOMAIN)
|
||||||
|
|
||||||
AUTH_URL = "%s/email_authorize" % (BACKEND_URL)
|
AUTH_URL = "%s/email_authorize" % (BACKEND_URL)
|
||||||
|
|
||||||
email_templates = {"confirm_email" : "", "auth_email" : "", "reset_password_email" : ""}
|
email_templates = {"confirm_email": "", "auth_email": "", "reset_password_email": ""}
|
||||||
|
|
||||||
|
|
||||||
def load_email_templates():
|
def load_email_templates():
|
||||||
for name in email_templates:
|
for name in email_templates:
|
||||||
filename = "auth/templates/%s.tmpl" % name
|
filename = "auth/templates/%s.tmpl" % name
|
||||||
with open(filename) as f:
|
with open(filename) as f:
|
||||||
email_templates[name] = f.read()
|
email_templates[name] = f.read()
|
||||||
print("[auth.email] templates loaded")
|
print("[auth.email] templates loaded")
|
||||||
|
|
||||||
|
|
||||||
async def send_confirm_email(user):
|
async def send_confirm_email(user):
|
||||||
text = email_templates["confirm_email"]
|
text = email_templates["confirm_email"]
|
||||||
token = await EmailAuthenticate.get_email_token(user)
|
token = await EmailAuthenticate.get_email_token(user)
|
||||||
await send_email(user, AUTH_URL, text, token)
|
await send_email(user, AUTH_URL, text, token)
|
||||||
|
|
||||||
|
|
||||||
async def send_auth_email(user):
|
async def send_auth_email(user):
|
||||||
text = email_templates["auth_email"]
|
text = email_templates["auth_email"]
|
||||||
token = await EmailAuthenticate.get_email_token(user)
|
token = await EmailAuthenticate.get_email_token(user)
|
||||||
await send_email(user, AUTH_URL, text, token)
|
await send_email(user, AUTH_URL, text, token)
|
||||||
|
|
||||||
|
|
||||||
async def send_reset_password_email(user):
|
async def send_reset_password_email(user):
|
||||||
text = email_templates["reset_password_email"]
|
text = email_templates["reset_password_email"]
|
||||||
token = await ResetPassword.get_reset_token(user)
|
token = await ResetPassword.get_reset_token(user)
|
||||||
await send_email(user, RESET_PWD_URL, text, token)
|
await send_email(user, RESET_PWD_URL, text, token)
|
||||||
|
|
||||||
|
|
||||||
async def send_email(user, url, text, token):
|
async def send_email(user, url, text, token):
|
||||||
to = "%s <%s>" % (user.username, user.email)
|
to = "%s <%s>" % (user.username, user.email)
|
||||||
url_with_token = "%s?token=%s" % (url, token)
|
url_with_token = "%s?token=%s" % (url, token)
|
||||||
text = text % (url_with_token)
|
text = text % (url_with_token)
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
MAILGUN_API_URL,
|
MAILGUN_API_URL,
|
||||||
auth = ("api", MAILGUN_API_KEY),
|
auth=("api", MAILGUN_API_KEY),
|
||||||
data = {
|
data={
|
||||||
"from": MAILGUN_FROM,
|
"from": MAILGUN_FROM,
|
||||||
"to": to,
|
"to": to,
|
||||||
"subject": "authorize log in",
|
"subject": "authorize log in",
|
||||||
"html": text
|
"html": text,
|
||||||
}
|
},
|
||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
|
|
||||||
async def email_authorize(request):
|
async def email_authorize(request):
|
||||||
token = request.query_params.get('token')
|
token = request.query_params.get("token")
|
||||||
if not token:
|
if not token:
|
||||||
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
|
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
|
||||||
return RedirectResponse(url = url_with_error)
|
return RedirectResponse(url=url_with_error)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
auth_token, user = await EmailAuthenticate.authenticate(token)
|
auth_token, user = await EmailAuthenticate.authenticate(token)
|
||||||
except:
|
except:
|
||||||
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
|
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
|
||||||
return RedirectResponse(url = url_with_error)
|
return RedirectResponse(url=url_with_error)
|
||||||
|
|
||||||
if not user.emailConfirmed:
|
if not user.emailConfirmed:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user.emailConfirmed = True
|
user.emailConfirmed = True
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
response = RedirectResponse(url = CONFIRM_EMAIL_URL)
|
response = RedirectResponse(url=CONFIRM_EMAIL_URL)
|
||||||
response.set_cookie("token", auth_token)
|
response.set_cookie("token", auth_token)
|
||||||
return response
|
return response
|
||||||
|
|
|
@ -8,26 +8,32 @@ from sqlalchemy import or_
|
||||||
|
|
||||||
|
|
||||||
class Identity:
|
class Identity:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def identity(orm_user: OrmUser, password: str) -> User:
|
def identity(orm_user: OrmUser, password: str) -> User:
|
||||||
user = User(**orm_user.dict())
|
user = User(**orm_user.dict())
|
||||||
if user.password is None:
|
if user.password is None:
|
||||||
raise InvalidPassword("Wrong user password")
|
raise InvalidPassword("Wrong user password")
|
||||||
if not Password.verify(password, user.password):
|
if not Password.verify(password, user.password):
|
||||||
raise InvalidPassword("Wrong user password")
|
raise InvalidPassword("Wrong user password")
|
||||||
return user
|
return user
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def identity_oauth(input) -> User:
|
def identity_oauth(input) -> User:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(OrmUser).filter(
|
user = (
|
||||||
or_(OrmUser.oauth == input["oauth"], OrmUser.email == input["email"])
|
session.query(OrmUser)
|
||||||
).first()
|
.filter(
|
||||||
if not user:
|
or_(
|
||||||
user = OrmUser.create(**input)
|
OrmUser.oauth == input["oauth"], OrmUser.email == input["email"]
|
||||||
if not user.oauth:
|
)
|
||||||
user.oauth = input["oauth"]
|
)
|
||||||
session.commit()
|
.first()
|
||||||
|
)
|
||||||
|
if not user:
|
||||||
|
user = OrmUser.create(**input)
|
||||||
|
if not user.oauth:
|
||||||
|
user.oauth = input["oauth"]
|
||||||
|
session.commit()
|
||||||
|
|
||||||
user = User(**user.dict())
|
user = User(**user.dict())
|
||||||
return user
|
return user
|
||||||
|
|
|
@ -5,17 +5,22 @@ from auth.validations import PayLoad, User
|
||||||
|
|
||||||
|
|
||||||
class JWTCodec:
|
class JWTCodec:
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def encode(user: User, exp: datetime, device: str = "pc") -> str:
|
def encode(user: User, exp: datetime, device: str = "pc") -> str:
|
||||||
payload = {"user_id": user.id, "device": device, "exp": exp, "iat": datetime.utcnow()}
|
payload = {
|
||||||
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
|
"user_id": user.id,
|
||||||
|
"device": device,
|
||||||
|
"exp": exp,
|
||||||
|
"iat": datetime.utcnow(),
|
||||||
|
}
|
||||||
|
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def decode(token: str, verify_exp: bool = True) -> PayLoad:
|
def decode(token: str, verify_exp: bool = True) -> PayLoad:
|
||||||
payload = jwt.decode(
|
payload = jwt.decode(
|
||||||
token,
|
token,
|
||||||
key=JWT_SECRET_KEY,
|
key=JWT_SECRET_KEY,
|
||||||
options={"verify_exp": verify_exp},
|
options={"verify_exp": verify_exp},
|
||||||
algorithms=[JWT_ALGORITHM],
|
algorithms=[JWT_ALGORITHM],
|
||||||
)
|
)
|
||||||
return PayLoad(**payload)
|
return PayLoad(**payload)
|
||||||
|
|
113
auth/oauth.py
113
auth/oauth.py
|
@ -8,79 +8,84 @@ from settings import OAUTH_CLIENTS, BACKEND_URL, OAUTH_CALLBACK_URL
|
||||||
oauth = OAuth()
|
oauth = OAuth()
|
||||||
|
|
||||||
oauth.register(
|
oauth.register(
|
||||||
name='facebook',
|
name="facebook",
|
||||||
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
|
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
|
||||||
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
|
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
|
||||||
access_token_url='https://graph.facebook.com/v11.0/oauth/access_token',
|
access_token_url="https://graph.facebook.com/v11.0/oauth/access_token",
|
||||||
access_token_params=None,
|
access_token_params=None,
|
||||||
authorize_url='https://www.facebook.com/v11.0/dialog/oauth',
|
authorize_url="https://www.facebook.com/v11.0/dialog/oauth",
|
||||||
authorize_params=None,
|
authorize_params=None,
|
||||||
api_base_url='https://graph.facebook.com/',
|
api_base_url="https://graph.facebook.com/",
|
||||||
client_kwargs={'scope': 'public_profile email'},
|
client_kwargs={"scope": "public_profile email"},
|
||||||
)
|
)
|
||||||
|
|
||||||
oauth.register(
|
oauth.register(
|
||||||
name='github',
|
name="github",
|
||||||
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
|
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
|
||||||
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
|
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
|
||||||
access_token_url='https://github.com/login/oauth/access_token',
|
access_token_url="https://github.com/login/oauth/access_token",
|
||||||
access_token_params=None,
|
access_token_params=None,
|
||||||
authorize_url='https://github.com/login/oauth/authorize',
|
authorize_url="https://github.com/login/oauth/authorize",
|
||||||
authorize_params=None,
|
authorize_params=None,
|
||||||
api_base_url='https://api.github.com/',
|
api_base_url="https://api.github.com/",
|
||||||
client_kwargs={'scope': 'user:email'},
|
client_kwargs={"scope": "user:email"},
|
||||||
)
|
)
|
||||||
|
|
||||||
oauth.register(
|
oauth.register(
|
||||||
name='google',
|
name="google",
|
||||||
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
|
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
|
||||||
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
|
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
|
||||||
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
|
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
|
||||||
client_kwargs={'scope': 'openid email profile'}
|
client_kwargs={"scope": "openid email profile"},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def google_profile(client, request, token):
|
async def google_profile(client, request, token):
|
||||||
profile = await client.parse_id_token(request, token)
|
profile = await client.parse_id_token(request, token)
|
||||||
profile["id"] = profile["sub"]
|
profile["id"] = profile["sub"]
|
||||||
return profile
|
return profile
|
||||||
|
|
||||||
|
|
||||||
async def facebook_profile(client, request, token):
|
async def facebook_profile(client, request, token):
|
||||||
profile = await client.get('me?fields=name,id,email', token=token)
|
profile = await client.get("me?fields=name,id,email", token=token)
|
||||||
return profile.json()
|
return profile.json()
|
||||||
|
|
||||||
|
|
||||||
async def github_profile(client, request, token):
|
async def github_profile(client, request, token):
|
||||||
profile = await client.get('user', token=token)
|
profile = await client.get("user", token=token)
|
||||||
return profile.json()
|
return profile.json()
|
||||||
|
|
||||||
|
|
||||||
profile_callbacks = {
|
profile_callbacks = {
|
||||||
"google" : google_profile,
|
"google": google_profile,
|
||||||
"facebook" : facebook_profile,
|
"facebook": facebook_profile,
|
||||||
"github" : github_profile
|
"github": github_profile,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def oauth_login(request):
|
async def oauth_login(request):
|
||||||
provider = request.path_params['provider']
|
provider = request.path_params["provider"]
|
||||||
request.session['provider'] = provider
|
request.session["provider"] = provider
|
||||||
client = oauth.create_client(provider)
|
client = oauth.create_client(provider)
|
||||||
redirect_uri = "%s/%s" % (BACKEND_URL, 'oauth_authorize')
|
redirect_uri = "%s/%s" % (BACKEND_URL, "oauth_authorize")
|
||||||
return await client.authorize_redirect(request, redirect_uri)
|
return await client.authorize_redirect(request, redirect_uri)
|
||||||
|
|
||||||
|
|
||||||
async def oauth_authorize(request):
|
async def oauth_authorize(request):
|
||||||
provider = request.session['provider']
|
provider = request.session["provider"]
|
||||||
client = oauth.create_client(provider)
|
client = oauth.create_client(provider)
|
||||||
token = await client.authorize_access_token(request)
|
token = await client.authorize_access_token(request)
|
||||||
get_profile = profile_callbacks[provider]
|
get_profile = profile_callbacks[provider]
|
||||||
profile = await get_profile(client, request, token)
|
profile = await get_profile(client, request, token)
|
||||||
user_oauth_info = "%s:%s" % (provider, profile["id"])
|
user_oauth_info = "%s:%s" % (provider, profile["id"])
|
||||||
user_input = {
|
user_input = {
|
||||||
"oauth" : user_oauth_info,
|
"oauth": user_oauth_info,
|
||||||
"email" : profile["email"],
|
"email": profile["email"],
|
||||||
"username" : profile["name"]
|
"username": profile["name"],
|
||||||
}
|
}
|
||||||
user = Identity.identity_oauth(user_input)
|
user = Identity.identity_oauth(user_input)
|
||||||
token = await Authorize.authorize(user, device="pc")
|
token = await Authorize.authorize(user, device="pc")
|
||||||
|
|
||||||
response = RedirectResponse(url = OAUTH_CALLBACK_URL)
|
response = RedirectResponse(url=OAUTH_CALLBACK_URL)
|
||||||
response.set_cookie("token", token)
|
response.set_cookie("token", token)
|
||||||
return response
|
return response
|
||||||
|
|
64
base/orm.py
64
base/orm.py
|
@ -5,50 +5,52 @@ from sqlalchemy.orm import Session
|
||||||
from sqlalchemy.sql.schema import Table
|
from sqlalchemy.sql.schema import Table
|
||||||
from settings import DB_URL
|
from settings import DB_URL
|
||||||
|
|
||||||
if DB_URL.startswith('sqlite'):
|
if DB_URL.startswith("sqlite"):
|
||||||
engine = create_engine(DB_URL)
|
engine = create_engine(DB_URL)
|
||||||
else:
|
else:
|
||||||
engine = create_engine(DB_URL, convert_unicode=True, echo=False, \
|
engine = create_engine(
|
||||||
pool_size=10, max_overflow=20)
|
DB_URL, convert_unicode=True, echo=False, pool_size=10, max_overflow=20
|
||||||
|
)
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
REGISTRY: Dict[str, type] = {}
|
REGISTRY: Dict[str, type] = {}
|
||||||
|
|
||||||
|
|
||||||
def local_session():
|
def local_session():
|
||||||
return Session(bind=engine, expire_on_commit=False)
|
return Session(bind=engine, expire_on_commit=False)
|
||||||
|
|
||||||
|
|
||||||
class Base(declarative_base()):
|
class Base(declarative_base()):
|
||||||
__table__: Table
|
__table__: Table
|
||||||
__tablename__: str
|
__tablename__: str
|
||||||
__new__: Callable
|
__new__: Callable
|
||||||
__init__: Callable
|
__init__: Callable
|
||||||
|
|
||||||
__abstract__: bool = True
|
__abstract__: bool = True
|
||||||
__table_args__ = {"extend_existing": True}
|
__table_args__ = {"extend_existing": True}
|
||||||
id: int = Column(Integer, primary_key=True)
|
id: int = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
def __init_subclass__(cls, **kwargs):
|
def __init_subclass__(cls, **kwargs):
|
||||||
REGISTRY[cls.__name__] = cls
|
REGISTRY[cls.__name__] = cls
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls: Generic[T], **kwargs) -> Generic[T]:
|
def create(cls: Generic[T], **kwargs) -> Generic[T]:
|
||||||
instance = cls(**kwargs)
|
instance = cls(**kwargs)
|
||||||
return instance.save()
|
return instance.save()
|
||||||
|
|
||||||
def save(self) -> Generic[T]:
|
def save(self) -> Generic[T]:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
session.add(self)
|
session.add(self)
|
||||||
session.commit()
|
session.commit()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def update(self, input):
|
def update(self, input):
|
||||||
column_names = self.__table__.columns.keys()
|
column_names = self.__table__.columns.keys()
|
||||||
for (name, value) in input.items():
|
for (name, value) in input.items():
|
||||||
if name in column_names:
|
if name in column_names:
|
||||||
setattr(self, name, value)
|
setattr(self, name, value)
|
||||||
|
|
||||||
def dict(self) -> Dict[str, Any]:
|
def dict(self) -> Dict[str, Any]:
|
||||||
column_names = self.__table__.columns.keys()
|
column_names = self.__table__.columns.keys()
|
||||||
return {c: getattr(self, c) for c in column_names}
|
return {c: getattr(self, c) for c in column_names}
|
||||||
|
|
|
@ -1,34 +1,34 @@
|
||||||
import aioredis
|
import aioredis
|
||||||
from settings import REDIS_URL
|
from settings import REDIS_URL
|
||||||
|
|
||||||
|
|
||||||
class Redis:
|
class Redis:
|
||||||
def __init__(self, uri=REDIS_URL):
|
def __init__(self, uri=REDIS_URL):
|
||||||
self._uri: str = uri
|
self._uri: str = uri
|
||||||
self._instance = None
|
self._instance = None
|
||||||
|
|
||||||
async def connect(self):
|
async def connect(self):
|
||||||
if self._instance is not None:
|
if self._instance is not None:
|
||||||
return
|
return
|
||||||
self._instance = aioredis.from_url(self._uri, encoding="utf-8")
|
self._instance = aioredis.from_url(self._uri, encoding="utf-8")
|
||||||
|
|
||||||
async def disconnect(self):
|
async def disconnect(self):
|
||||||
if self._instance is None:
|
if self._instance is None:
|
||||||
return
|
return
|
||||||
self._instance.close()
|
self._instance.close()
|
||||||
await self._instance.wait_closed()
|
await self._instance.wait_closed()
|
||||||
self._instance = None
|
self._instance = None
|
||||||
|
|
||||||
async def execute(self, command, *args, **kwargs):
|
async def execute(self, command, *args, **kwargs):
|
||||||
return await self._instance.execute_command(command, *args, **kwargs)
|
return await self._instance.execute_command(command, *args, **kwargs)
|
||||||
|
|
||||||
async def lrange(self, key, start, stop):
|
async def lrange(self, key, start, stop):
|
||||||
return await self._instance.lrange(key, start, stop)
|
return await self._instance.lrange(key, start, stop)
|
||||||
|
|
||||||
async def mget(self, key, *keys):
|
async def mget(self, key, *keys):
|
||||||
return await self._instance.mget(key, *keys)
|
return await self._instance.mget(key, *keys)
|
||||||
|
|
||||||
|
|
||||||
redis = Redis()
|
redis = Redis()
|
||||||
|
|
||||||
__all__ = ['redis']
|
__all__ = ["redis"]
|
||||||
|
|
||||||
|
|
|
@ -3,9 +3,11 @@ from ariadne import MutationType, QueryType, SubscriptionType, ScalarType
|
||||||
|
|
||||||
datetime_scalar = ScalarType("DateTime")
|
datetime_scalar = ScalarType("DateTime")
|
||||||
|
|
||||||
|
|
||||||
@datetime_scalar.serializer
|
@datetime_scalar.serializer
|
||||||
def serialize_datetime(value):
|
def serialize_datetime(value):
|
||||||
return value.isoformat()
|
return value.isoformat()
|
||||||
|
|
||||||
|
|
||||||
query = QueryType()
|
query = QueryType()
|
||||||
mutation = MutationType()
|
mutation = MutationType()
|
||||||
|
|
41
main.py
41
main.py
|
@ -19,31 +19,40 @@ from services.stat.topicstat import TopicStat
|
||||||
from services.zine.shoutauthor import ShoutAuthorStorage
|
from services.zine.shoutauthor import ShoutAuthorStorage
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
import_module('resolvers')
|
import_module("resolvers")
|
||||||
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers)
|
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers) # type: ignore
|
||||||
|
|
||||||
middleware = [
|
middleware = [
|
||||||
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
|
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
|
||||||
Middleware(SessionMiddleware, secret_key="!secret")
|
Middleware(SessionMiddleware, secret_key="!secret"),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
async def start_up():
|
async def start_up():
|
||||||
await redis.connect()
|
await redis.connect()
|
||||||
viewed_storage_task = asyncio.create_task(ViewedStorage.worker())
|
viewed_storage_task = asyncio.create_task(ViewedStorage.worker())
|
||||||
# reacted_storage_task = asyncio.create_task(ReactedStorage.worker())
|
# reacted_storage_task = asyncio.create_task(ReactedStorage.worker())
|
||||||
shouts_cache_task = asyncio.create_task(ShoutsCache.worker())
|
shouts_cache_task = asyncio.create_task(ShoutsCache.worker())
|
||||||
shout_author_task = asyncio.create_task(ShoutAuthorStorage.worker())
|
shout_author_task = asyncio.create_task(ShoutAuthorStorage.worker())
|
||||||
topic_stat_task = asyncio.create_task(TopicStat.worker())
|
topic_stat_task = asyncio.create_task(TopicStat.worker())
|
||||||
git_task = asyncio.create_task(GitTask.git_task_worker())
|
git_task = asyncio.create_task(GitTask.git_task_worker())
|
||||||
|
|
||||||
|
|
||||||
async def shutdown():
|
async def shutdown():
|
||||||
await redis.disconnect()
|
await redis.disconnect()
|
||||||
|
|
||||||
|
|
||||||
routes = [
|
routes = [
|
||||||
Route("/oauth/{provider}", endpoint=oauth_login),
|
Route("/oauth/{provider}", endpoint=oauth_login),
|
||||||
Route("/oauth_authorize", endpoint=oauth_authorize),
|
Route("/oauth_authorize", endpoint=oauth_authorize),
|
||||||
Route("/email_authorize", endpoint=email_authorize)
|
Route("/email_authorize", endpoint=email_authorize),
|
||||||
]
|
]
|
||||||
|
|
||||||
app = Starlette(debug=True, on_startup=[start_up], on_shutdown=[shutdown], middleware=middleware, routes=routes)
|
app = Starlette(
|
||||||
|
debug=True,
|
||||||
|
on_startup=[start_up],
|
||||||
|
on_shutdown=[shutdown],
|
||||||
|
middleware=middleware,
|
||||||
|
routes=routes,
|
||||||
|
)
|
||||||
app.mount("/", GraphQL(schema, debug=True))
|
app.mount("/", GraphQL(schema, debug=True))
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
''' cmd managed migration '''
|
""" cmd managed migration """
|
||||||
import csv
|
import csv
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
@ -8,6 +8,7 @@ import sys
|
||||||
import os
|
import os
|
||||||
import bs4
|
import bs4
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
# from export import export_email_subscriptions
|
# from export import export_email_subscriptions
|
||||||
from .export import export_mdx, export_slug
|
from .export import export_mdx, export_slug
|
||||||
from orm.reaction import Reaction
|
from orm.reaction import Reaction
|
||||||
|
@ -21,293 +22,308 @@ from .tables.comments import migrate_2stage as migrateComment_2stage
|
||||||
from settings import DB_URL
|
from settings import DB_URL
|
||||||
|
|
||||||
|
|
||||||
TODAY = datetime.strftime(datetime.now(), '%Y%m%d')
|
TODAY = datetime.strftime(datetime.now(), "%Y%m%d")
|
||||||
|
|
||||||
OLD_DATE = '2016-03-05 22:22:00.350000'
|
OLD_DATE = "2016-03-05 22:22:00.350000"
|
||||||
|
|
||||||
|
|
||||||
def users_handle(storage):
|
def users_handle(storage):
|
||||||
''' migrating users first '''
|
"""migrating users first"""
|
||||||
counter = 0
|
counter = 0
|
||||||
id_map = {}
|
id_map = {}
|
||||||
print('[migration] migrating %d users' % (len(storage['users']['data'])))
|
print("[migration] migrating %d users" % (len(storage["users"]["data"])))
|
||||||
for entry in storage['users']['data']:
|
for entry in storage["users"]["data"]:
|
||||||
oid = entry['_id']
|
oid = entry["_id"]
|
||||||
user = migrateUser(entry)
|
user = migrateUser(entry)
|
||||||
storage['users']['by_oid'][oid] = user # full
|
storage["users"]["by_oid"][oid] = user # full
|
||||||
del user['password']
|
del user["password"]
|
||||||
del user['notifications']
|
del user["notifications"]
|
||||||
del user['emailConfirmed']
|
del user["emailConfirmed"]
|
||||||
del user['username']
|
del user["username"]
|
||||||
del user['email']
|
del user["email"]
|
||||||
storage['users']['by_slug'][user['slug']] = user # public
|
storage["users"]["by_slug"][user["slug"]] = user # public
|
||||||
id_map[user['oid']] = user['slug']
|
id_map[user["oid"]] = user["slug"]
|
||||||
counter += 1
|
counter += 1
|
||||||
ce = 0
|
ce = 0
|
||||||
for entry in storage['users']['data']:
|
for entry in storage["users"]["data"]:
|
||||||
ce += migrateUser_2stage(entry, id_map)
|
ce += migrateUser_2stage(entry, id_map)
|
||||||
return storage
|
return storage
|
||||||
|
|
||||||
|
|
||||||
def topics_handle(storage):
|
def topics_handle(storage):
|
||||||
''' topics from categories and tags '''
|
"""topics from categories and tags"""
|
||||||
counter = 0
|
counter = 0
|
||||||
for t in (storage['topics']['tags'] + storage['topics']['cats']):
|
for t in storage["topics"]["tags"] + storage["topics"]["cats"]:
|
||||||
if t['slug'] in storage['replacements']:
|
if t["slug"] in storage["replacements"]:
|
||||||
t['slug'] = storage['replacements'][t['slug']]
|
t["slug"] = storage["replacements"][t["slug"]]
|
||||||
topic = migrateTopic(t)
|
topic = migrateTopic(t)
|
||||||
storage['topics']['by_oid'][t['_id']] = topic
|
storage["topics"]["by_oid"][t["_id"]] = topic
|
||||||
storage['topics']['by_slug'][t['slug']] = topic
|
storage["topics"]["by_slug"][t["slug"]] = topic
|
||||||
counter += 1
|
counter += 1
|
||||||
else:
|
else:
|
||||||
print('[migration] topic ' + t['slug'] + ' ignored')
|
print("[migration] topic " + t["slug"] + " ignored")
|
||||||
for oldslug, newslug in storage['replacements'].items():
|
for oldslug, newslug in storage["replacements"].items():
|
||||||
if oldslug != newslug and oldslug in storage['topics']['by_slug']:
|
if oldslug != newslug and oldslug in storage["topics"]["by_slug"]:
|
||||||
oid = storage['topics']['by_slug'][oldslug]['_id']
|
oid = storage["topics"]["by_slug"][oldslug]["_id"]
|
||||||
del storage['topics']['by_slug'][oldslug]
|
del storage["topics"]["by_slug"][oldslug]
|
||||||
storage['topics']['by_oid'][oid] = storage['topics']['by_slug'][newslug]
|
storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug]
|
||||||
print('[migration] ' + str(counter) + ' topics migrated')
|
print("[migration] " + str(counter) + " topics migrated")
|
||||||
print('[migration] ' + str(len(storage['topics']
|
print(
|
||||||
['by_oid'].values())) + ' topics by oid')
|
"[migration] "
|
||||||
print('[migration] ' + str(len(storage['topics']
|
+ str(len(storage["topics"]["by_oid"].values()))
|
||||||
['by_slug'].values())) + ' topics by slug')
|
+ " topics by oid"
|
||||||
# raise Exception
|
)
|
||||||
return storage
|
print(
|
||||||
|
"[migration] "
|
||||||
|
+ str(len(storage["topics"]["by_slug"].values()))
|
||||||
|
+ " topics by slug"
|
||||||
|
)
|
||||||
|
# raise Exception
|
||||||
|
return storage
|
||||||
|
|
||||||
|
|
||||||
async def shouts_handle(storage, args):
|
async def shouts_handle(storage, args):
|
||||||
''' migrating content items one by one '''
|
"""migrating content items one by one"""
|
||||||
counter = 0
|
counter = 0
|
||||||
discours_author = 0
|
discours_author = 0
|
||||||
pub_counter = 0
|
pub_counter = 0
|
||||||
topics_dataset_bodies = []
|
topics_dataset_bodies = []
|
||||||
topics_dataset_tlist = []
|
topics_dataset_tlist = []
|
||||||
for entry in storage['shouts']['data']:
|
for entry in storage["shouts"]["data"]:
|
||||||
# slug
|
# slug
|
||||||
slug = get_shout_slug(entry)
|
slug = get_shout_slug(entry)
|
||||||
|
|
||||||
# single slug mode
|
# single slug mode
|
||||||
if '-' in args and slug not in args: continue
|
if "-" in args and slug not in args:
|
||||||
|
continue
|
||||||
|
|
||||||
# migrate
|
# migrate
|
||||||
shout = await migrateShout(entry, storage)
|
shout = await migrateShout(entry, storage)
|
||||||
storage['shouts']['by_oid'][entry['_id']] = shout
|
storage["shouts"]["by_oid"][entry["_id"]] = shout
|
||||||
storage['shouts']['by_slug'][shout['slug']] = shout
|
storage["shouts"]["by_slug"][shout["slug"]] = shout
|
||||||
# shouts.topics
|
# shouts.topics
|
||||||
if not shout['topics']: print('[migration] no topics!')
|
if not shout["topics"]:
|
||||||
|
print("[migration] no topics!")
|
||||||
|
|
||||||
# wuth author
|
# wuth author
|
||||||
author = shout['authors'][0].slug
|
author = shout["authors"][0].slug
|
||||||
if author == 'discours': discours_author += 1
|
if author == "discours":
|
||||||
# print('[migration] ' + shout['slug'] + ' with author ' + author)
|
discours_author += 1
|
||||||
|
# print('[migration] ' + shout['slug'] + ' with author ' + author)
|
||||||
|
|
||||||
if entry.get('published'):
|
if entry.get("published"):
|
||||||
if 'mdx' in args: export_mdx(shout)
|
if "mdx" in args:
|
||||||
pub_counter += 1
|
export_mdx(shout)
|
||||||
|
pub_counter += 1
|
||||||
|
|
||||||
# print main counter
|
# print main counter
|
||||||
counter += 1
|
counter += 1
|
||||||
line = str(counter+1) + ': ' + shout['slug'] + " @" + author
|
line = str(counter + 1) + ": " + shout["slug"] + " @" + author
|
||||||
print(line)
|
print(line)
|
||||||
b = bs4.BeautifulSoup(shout['body'], 'html.parser')
|
b = bs4.BeautifulSoup(shout["body"], "html.parser")
|
||||||
texts = []
|
texts = []
|
||||||
texts.append(shout['title'].lower().replace(r'[^а-яА-Яa-zA-Z]', ''))
|
texts.append(shout["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", ""))
|
||||||
texts = b.findAll(text=True)
|
texts = b.findAll(text=True)
|
||||||
topics_dataset_bodies.append(u" ".join([x.strip().lower() for x in texts]))
|
topics_dataset_bodies.append(" ".join([x.strip().lower() for x in texts]))
|
||||||
topics_dataset_tlist.append(shout['topics'])
|
topics_dataset_tlist.append(shout["topics"])
|
||||||
|
|
||||||
# np.savetxt('topics_dataset.csv', (topics_dataset_bodies, topics_dataset_tlist), delimiter=',', fmt='%s')
|
# np.savetxt('topics_dataset.csv', (topics_dataset_bodies, topics_dataset_tlist), delimiter=',', fmt='%s')
|
||||||
|
|
||||||
print('[migration] ' + str(counter) + ' content items were migrated')
|
print("[migration] " + str(counter) + " content items were migrated")
|
||||||
print('[migration] ' + str(pub_counter) + ' have been published')
|
print("[migration] " + str(pub_counter) + " have been published")
|
||||||
print('[migration] ' + str(discours_author) + ' authored by @discours')
|
print("[migration] " + str(discours_author) + " authored by @discours")
|
||||||
return storage
|
return storage
|
||||||
|
|
||||||
|
|
||||||
async def comments_handle(storage):
|
async def comments_handle(storage):
|
||||||
id_map = {}
|
id_map = {}
|
||||||
ignored_counter = 0
|
ignored_counter = 0
|
||||||
missed_shouts = {}
|
missed_shouts = {}
|
||||||
for oldcomment in storage['reactions']['data']:
|
for oldcomment in storage["reactions"]["data"]:
|
||||||
if not oldcomment.get('deleted'):
|
if not oldcomment.get("deleted"):
|
||||||
reaction = await migrateComment(oldcomment, storage)
|
reaction = await migrateComment(oldcomment, storage)
|
||||||
if type(reaction) == str:
|
if type(reaction) == str:
|
||||||
missed_shouts[reaction] = oldcomment
|
missed_shouts[reaction] = oldcomment
|
||||||
elif type(reaction) == Reaction:
|
elif type(reaction) == Reaction:
|
||||||
reaction = reaction.dict()
|
reaction = reaction.dict()
|
||||||
id = reaction['id']
|
id = reaction["id"]
|
||||||
oid = reaction['oid']
|
oid = reaction["oid"]
|
||||||
id_map[oid] = id
|
id_map[oid] = id
|
||||||
else:
|
else:
|
||||||
ignored_counter += 1
|
ignored_counter += 1
|
||||||
|
|
||||||
for reaction in storage['reactions']['data']: migrateComment_2stage(
|
for reaction in storage["reactions"]["data"]:
|
||||||
reaction, id_map)
|
migrateComment_2stage(reaction, id_map)
|
||||||
print('[migration] ' + str(len(id_map)) + ' comments migrated')
|
print("[migration] " + str(len(id_map)) + " comments migrated")
|
||||||
print('[migration] ' + str(ignored_counter) + ' comments ignored')
|
print("[migration] " + str(ignored_counter) + " comments ignored")
|
||||||
print('[migration] ' + str(len(missed_shouts.keys())) +
|
print("[migration] " + str(len(missed_shouts.keys())) + " commented shouts missed")
|
||||||
' commented shouts missed')
|
missed_counter = 0
|
||||||
missed_counter = 0
|
for missed in missed_shouts.values():
|
||||||
for missed in missed_shouts.values():
|
missed_counter += len(missed)
|
||||||
missed_counter += len(missed)
|
print("[migration] " + str(missed_counter) + " comments dropped")
|
||||||
print('[migration] ' + str(missed_counter) + ' comments dropped')
|
return storage
|
||||||
return storage
|
|
||||||
|
|
||||||
|
|
||||||
def bson_handle():
|
def bson_handle():
|
||||||
# decode bson # preparing data
|
# decode bson # preparing data
|
||||||
from migration import bson2json
|
from migration import bson2json
|
||||||
bson2json.json_tables()
|
|
||||||
|
bson2json.json_tables()
|
||||||
|
|
||||||
|
|
||||||
def export_one(slug, storage, args = None):
|
def export_one(slug, storage, args=None):
|
||||||
topics_handle(storage)
|
topics_handle(storage)
|
||||||
users_handle(storage)
|
users_handle(storage)
|
||||||
shouts_handle(storage, args)
|
shouts_handle(storage, args)
|
||||||
export_slug(slug, storage)
|
export_slug(slug, storage)
|
||||||
|
|
||||||
|
|
||||||
async def all_handle(storage, args):
|
async def all_handle(storage, args):
|
||||||
print('[migration] handle everything')
|
print("[migration] handle everything")
|
||||||
users_handle(storage)
|
users_handle(storage)
|
||||||
topics_handle(storage)
|
topics_handle(storage)
|
||||||
await shouts_handle(storage, args)
|
await shouts_handle(storage, args)
|
||||||
await comments_handle(storage)
|
await comments_handle(storage)
|
||||||
# export_email_subscriptions()
|
# export_email_subscriptions()
|
||||||
print('[migration] done!')
|
print("[migration] done!")
|
||||||
|
|
||||||
|
|
||||||
def data_load():
|
def data_load():
|
||||||
storage = {
|
storage = {
|
||||||
'content_items': {
|
"content_items": {
|
||||||
'by_oid': {},
|
"by_oid": {},
|
||||||
'by_slug': {},
|
"by_slug": {},
|
||||||
},
|
},
|
||||||
'shouts': {
|
"shouts": {"by_oid": {}, "by_slug": {}, "data": []},
|
||||||
'by_oid': {},
|
"reactions": {"by_oid": {}, "by_slug": {}, "by_content": {}, "data": []},
|
||||||
'by_slug': {},
|
"topics": {
|
||||||
'data': []
|
"by_oid": {},
|
||||||
},
|
"by_slug": {},
|
||||||
'reactions': {
|
"cats": [],
|
||||||
'by_oid': {},
|
"tags": [],
|
||||||
'by_slug': {},
|
},
|
||||||
'by_content': {},
|
"users": {"by_oid": {}, "by_slug": {}, "data": []},
|
||||||
'data': []
|
"replacements": json.loads(open("migration/tables/replacements.json").read()),
|
||||||
},
|
}
|
||||||
'topics': {
|
users_data = []
|
||||||
'by_oid': {},
|
tags_data = []
|
||||||
'by_slug': {},
|
cats_data = []
|
||||||
'cats': [],
|
comments_data = []
|
||||||
'tags': [],
|
content_data = []
|
||||||
},
|
try:
|
||||||
'users': {
|
users_data = json.loads(open("migration/data/users.json").read())
|
||||||
'by_oid': {},
|
print("[migration.load] " + str(len(users_data)) + " users ")
|
||||||
'by_slug': {},
|
tags_data = json.loads(open("migration/data/tags.json").read())
|
||||||
'data': []
|
storage["topics"]["tags"] = tags_data
|
||||||
},
|
print("[migration.load] " + str(len(tags_data)) + " tags ")
|
||||||
'replacements': json.loads(open('migration/tables/replacements.json').read())
|
cats_data = json.loads(
|
||||||
}
|
open("migration/data/content_item_categories.json").read()
|
||||||
users_data = []
|
)
|
||||||
tags_data = []
|
storage["topics"]["cats"] = cats_data
|
||||||
cats_data = []
|
print("[migration.load] " + str(len(cats_data)) + " cats ")
|
||||||
comments_data = []
|
comments_data = json.loads(open("migration/data/comments.json").read())
|
||||||
content_data = []
|
storage["reactions"]["data"] = comments_data
|
||||||
try:
|
print("[migration.load] " + str(len(comments_data)) + " comments ")
|
||||||
users_data = json.loads(open('migration/data/users.json').read())
|
content_data = json.loads(open("migration/data/content_items.json").read())
|
||||||
print('[migration.load] ' + str(len(users_data)) + ' users ')
|
storage["shouts"]["data"] = content_data
|
||||||
tags_data = json.loads(open('migration/data/tags.json').read())
|
print("[migration.load] " + str(len(content_data)) + " content items ")
|
||||||
storage['topics']['tags'] = tags_data
|
# fill out storage
|
||||||
print('[migration.load] ' + str(len(tags_data)) + ' tags ')
|
for x in users_data:
|
||||||
cats_data = json.loads(
|
storage["users"]["by_oid"][x["_id"]] = x
|
||||||
open('migration/data/content_item_categories.json').read())
|
# storage['users']['by_slug'][x['slug']] = x
|
||||||
storage['topics']['cats'] = cats_data
|
# no user.slug yet
|
||||||
print('[migration.load] ' + str(len(cats_data)) + ' cats ')
|
print(
|
||||||
comments_data = json.loads(open('migration/data/comments.json').read())
|
"[migration.load] "
|
||||||
storage['reactions']['data'] = comments_data
|
+ str(len(storage["users"]["by_oid"].keys()))
|
||||||
print('[migration.load] ' + str(len(comments_data)) + ' comments ')
|
+ " users by oid"
|
||||||
content_data = json.loads(open('migration/data/content_items.json').read())
|
)
|
||||||
storage['shouts']['data'] = content_data
|
for x in tags_data:
|
||||||
print('[migration.load] ' + str(len(content_data)) + ' content items ')
|
storage["topics"]["by_oid"][x["_id"]] = x
|
||||||
# fill out storage
|
storage["topics"]["by_slug"][x["slug"]] = x
|
||||||
for x in users_data:
|
for x in cats_data:
|
||||||
storage['users']['by_oid'][x['_id']] = x
|
storage["topics"]["by_oid"][x["_id"]] = x
|
||||||
# storage['users']['by_slug'][x['slug']] = x
|
storage["topics"]["by_slug"][x["slug"]] = x
|
||||||
# no user.slug yet
|
print(
|
||||||
print('[migration.load] ' + str(len(storage['users']
|
"[migration.load] "
|
||||||
['by_oid'].keys())) + ' users by oid')
|
+ str(len(storage["topics"]["by_slug"].keys()))
|
||||||
for x in tags_data:
|
+ " topics by slug"
|
||||||
storage['topics']['by_oid'][x['_id']] = x
|
)
|
||||||
storage['topics']['by_slug'][x['slug']] = x
|
for item in content_data:
|
||||||
for x in cats_data:
|
slug = get_shout_slug(item)
|
||||||
storage['topics']['by_oid'][x['_id']] = x
|
storage["content_items"]["by_slug"][slug] = item
|
||||||
storage['topics']['by_slug'][x['slug']] = x
|
storage["content_items"]["by_oid"][item["_id"]] = item
|
||||||
print('[migration.load] ' + str(len(storage['topics']
|
print("[migration.load] " + str(len(content_data)) + " content items")
|
||||||
['by_slug'].keys())) + ' topics by slug')
|
for x in comments_data:
|
||||||
for item in content_data:
|
storage["reactions"]["by_oid"][x["_id"]] = x
|
||||||
slug = get_shout_slug(item)
|
cid = x["contentItem"]
|
||||||
storage['content_items']['by_slug'][slug] = item
|
storage["reactions"]["by_content"][cid] = x
|
||||||
storage['content_items']['by_oid'][item['_id']] = item
|
ci = storage["content_items"]["by_oid"].get(cid, {})
|
||||||
print('[migration.load] ' + str(len(content_data)) + ' content items')
|
if "slug" in ci:
|
||||||
for x in comments_data:
|
storage["reactions"]["by_slug"][ci["slug"]] = x
|
||||||
storage['reactions']['by_oid'][x['_id']] = x
|
print(
|
||||||
cid = x['contentItem']
|
"[migration.load] "
|
||||||
storage['reactions']['by_content'][cid] = x
|
+ str(len(storage["reactions"]["by_content"].keys()))
|
||||||
ci = storage['content_items']['by_oid'].get(cid, {})
|
+ " with comments"
|
||||||
if 'slug' in ci: storage['reactions']['by_slug'][ci['slug']] = x
|
)
|
||||||
print('[migration.load] ' + str(len(storage['reactions']
|
except Exception as e:
|
||||||
['by_content'].keys())) + ' with comments')
|
raise e
|
||||||
except Exception as e: raise e
|
storage["users"]["data"] = users_data
|
||||||
storage['users']['data'] = users_data
|
storage["topics"]["tags"] = tags_data
|
||||||
storage['topics']['tags'] = tags_data
|
storage["topics"]["cats"] = cats_data
|
||||||
storage['topics']['cats'] = cats_data
|
storage["shouts"]["data"] = content_data
|
||||||
storage['shouts']['data'] = content_data
|
storage["reactions"]["data"] = comments_data
|
||||||
storage['reactions']['data'] = comments_data
|
return storage
|
||||||
return storage
|
|
||||||
|
|
||||||
|
|
||||||
def mongo_download(url):
|
def mongo_download(url):
|
||||||
if not url: raise Exception('\n\nYou should set MONGODB_URL enviroment variable\n')
|
if not url:
|
||||||
print('[migration] mongodump ' + url)
|
raise Exception("\n\nYou should set MONGODB_URL enviroment variable\n")
|
||||||
subprocess.call([
|
print("[migration] mongodump " + url)
|
||||||
'mongodump',
|
subprocess.call(
|
||||||
'--uri', url + '/?authSource=admin',
|
[
|
||||||
'--forceTableScan',
|
"mongodump",
|
||||||
], stderr = subprocess.STDOUT)
|
"--uri",
|
||||||
|
url + "/?authSource=admin",
|
||||||
|
"--forceTableScan",
|
||||||
|
],
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def create_pgdump():
|
def create_pgdump():
|
||||||
pgurl = DB_URL
|
pgurl = DB_URL
|
||||||
if not pgurl: raise Exception('\n\nYou should set DATABASE_URL enviroment variable\n')
|
if not pgurl:
|
||||||
subprocess.call(
|
raise Exception("\n\nYou should set DATABASE_URL enviroment variable\n")
|
||||||
[ 'pg_dump', pgurl, '-f', TODAY + '-pgdump.sql'],
|
subprocess.call(
|
||||||
stderr = subprocess.STDOUT
|
["pg_dump", pgurl, "-f", TODAY + "-pgdump.sql"], stderr=subprocess.STDOUT
|
||||||
)
|
)
|
||||||
subprocess.call([
|
subprocess.call(["scp", TODAY + "-pgdump.sql", "root@build.discours.io:/root/."])
|
||||||
'scp',
|
|
||||||
TODAY + '-pgdump.sql',
|
|
||||||
'root@build.discours.io:/root/.'
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
async def handle_auto():
|
async def handle_auto():
|
||||||
print('[migration] no command given, auto mode')
|
print("[migration] no command given, auto mode")
|
||||||
url = os.getenv('MONGODB_URL')
|
url = os.getenv("MONGODB_URL")
|
||||||
if url: mongo_download(url)
|
if url:
|
||||||
bson_handle()
|
mongo_download(url)
|
||||||
await all_handle(data_load(), sys.argv)
|
bson_handle()
|
||||||
create_pgdump()
|
await all_handle(data_load(), sys.argv)
|
||||||
|
create_pgdump()
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
if len(sys.argv) > 1:
|
if len(sys.argv) > 1:
|
||||||
cmd=sys.argv[1]
|
cmd = sys.argv[1]
|
||||||
if type(cmd) == str: print('[migration] command: ' + cmd)
|
if type(cmd) == str:
|
||||||
await handle_auto()
|
print("[migration] command: " + cmd)
|
||||||
else:
|
await handle_auto()
|
||||||
print('[migration] usage: python server.py migrate')
|
else:
|
||||||
|
print("[migration] usage: python server.py migrate")
|
||||||
|
|
||||||
|
|
||||||
def migrate():
|
def migrate():
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
loop.run_until_complete(main())
|
loop.run_until_complete(main())
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
migrate()
|
if __name__ == "__main__":
|
||||||
|
migrate()
|
||||||
|
|
|
@ -4,25 +4,27 @@ import json
|
||||||
|
|
||||||
from .utils import DateTimeEncoder
|
from .utils import DateTimeEncoder
|
||||||
|
|
||||||
def json_tables():
|
|
||||||
print('[migration] unpack dump/discours/*.bson to migration/data/*.json')
|
|
||||||
data = {
|
|
||||||
"content_items": [],
|
|
||||||
"content_item_categories": [],
|
|
||||||
"tags": [],
|
|
||||||
"email_subscriptions": [],
|
|
||||||
"users": [],
|
|
||||||
"comments": []
|
|
||||||
}
|
|
||||||
for table in data.keys():
|
|
||||||
lc = []
|
|
||||||
with open('dump/discours/'+table+'.bson', 'rb') as f:
|
|
||||||
bs = f.read()
|
|
||||||
f.close()
|
|
||||||
base = 0
|
|
||||||
while base < len(bs):
|
|
||||||
base, d = bson.decode_document(bs, base)
|
|
||||||
lc.append(d)
|
|
||||||
data[table] = lc
|
|
||||||
open(os.getcwd() + '/migration/data/'+table+'.json', 'w').write(json.dumps(lc,cls=DateTimeEncoder))
|
|
||||||
|
|
||||||
|
def json_tables():
|
||||||
|
print("[migration] unpack dump/discours/*.bson to migration/data/*.json")
|
||||||
|
data = {
|
||||||
|
"content_items": [],
|
||||||
|
"content_item_categories": [],
|
||||||
|
"tags": [],
|
||||||
|
"email_subscriptions": [],
|
||||||
|
"users": [],
|
||||||
|
"comments": [],
|
||||||
|
}
|
||||||
|
for table in data.keys():
|
||||||
|
lc = []
|
||||||
|
with open("dump/discours/" + table + ".bson", "rb") as f:
|
||||||
|
bs = f.read()
|
||||||
|
f.close()
|
||||||
|
base = 0
|
||||||
|
while base < len(bs):
|
||||||
|
base, d = bson.decode_document(bs, base)
|
||||||
|
lc.append(d)
|
||||||
|
data[table] = lc
|
||||||
|
open(os.getcwd() + "/migration/data/" + table + ".json", "w").write(
|
||||||
|
json.dumps(lc, cls=DateTimeEncoder)
|
||||||
|
)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
@ -6,100 +5,150 @@ import frontmatter
|
||||||
from .extract import extract_html, prepare_html_body
|
from .extract import extract_html, prepare_html_body
|
||||||
from .utils import DateTimeEncoder
|
from .utils import DateTimeEncoder
|
||||||
|
|
||||||
OLD_DATE = '2016-03-05 22:22:00.350000'
|
OLD_DATE = "2016-03-05 22:22:00.350000"
|
||||||
EXPORT_DEST = '../discoursio-web/data/'
|
EXPORT_DEST = "../discoursio-web/data/"
|
||||||
parentDir = '/'.join(os.getcwd().split('/')[:-1])
|
parentDir = "/".join(os.getcwd().split("/")[:-1])
|
||||||
contentDir = parentDir + '/discoursio-web/content/'
|
contentDir = parentDir + "/discoursio-web/content/"
|
||||||
ts = datetime.now()
|
ts = datetime.now()
|
||||||
|
|
||||||
|
|
||||||
def get_metadata(r):
|
def get_metadata(r):
|
||||||
authors = []
|
authors = []
|
||||||
for a in r['authors']:
|
for a in r["authors"]:
|
||||||
authors.append({ # a short version for public listings
|
authors.append(
|
||||||
'slug': a.slug or 'discours',
|
{ # a short version for public listings
|
||||||
'name': a.name or 'Дискурс',
|
"slug": a.slug or "discours",
|
||||||
'userpic': a.userpic or 'https://discours.io/static/img/discours.png'
|
"name": a.name or "Дискурс",
|
||||||
})
|
"userpic": a.userpic or "https://discours.io/static/img/discours.png",
|
||||||
metadata = {}
|
}
|
||||||
metadata['title'] = r.get('title', '').replace('{', '(').replace('}', ')')
|
)
|
||||||
metadata['authors'] = authors
|
metadata = {}
|
||||||
metadata['createdAt'] = r.get('createdAt', ts)
|
metadata["title"] = r.get("title", "").replace("{", "(").replace("}", ")")
|
||||||
metadata['layout'] = r['layout']
|
metadata["authors"] = authors
|
||||||
metadata['topics'] = [topic for topic in r['topics']]
|
metadata["createdAt"] = r.get("createdAt", ts)
|
||||||
metadata['topics'].sort()
|
metadata["layout"] = r["layout"]
|
||||||
if r.get('cover', False): metadata['cover'] = r.get('cover')
|
metadata["topics"] = [topic for topic in r["topics"]]
|
||||||
return metadata
|
metadata["topics"].sort()
|
||||||
|
if r.get("cover", False):
|
||||||
|
metadata["cover"] = r.get("cover")
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
|
||||||
def export_mdx(r):
|
def export_mdx(r):
|
||||||
# print('[export] mdx %s' % r['slug'])
|
# print('[export] mdx %s' % r['slug'])
|
||||||
content = ''
|
content = ""
|
||||||
metadata = get_metadata(r)
|
metadata = get_metadata(r)
|
||||||
content = frontmatter.dumps(frontmatter.Post(r['body'], **metadata))
|
content = frontmatter.dumps(frontmatter.Post(r["body"], **metadata))
|
||||||
ext = 'mdx'
|
ext = "mdx"
|
||||||
filepath = contentDir + r['slug']
|
filepath = contentDir + r["slug"]
|
||||||
bc = bytes(content,'utf-8').decode('utf-8','ignore')
|
bc = bytes(content, "utf-8").decode("utf-8", "ignore")
|
||||||
open(filepath + '.' + ext, 'w').write(bc)
|
open(filepath + "." + ext, "w").write(bc)
|
||||||
|
|
||||||
|
|
||||||
def export_body(shout, storage):
|
def export_body(shout, storage):
|
||||||
entry = storage['content_items']['by_oid'][shout['oid']]
|
entry = storage["content_items"]["by_oid"][shout["oid"]]
|
||||||
if entry:
|
if entry:
|
||||||
shout['body'] = prepare_html_body(entry) # prepare_md_body(entry)
|
shout["body"] = prepare_html_body(entry) # prepare_md_body(entry)
|
||||||
export_mdx(shout)
|
export_mdx(shout)
|
||||||
print('[export] html for %s' % shout['slug'])
|
print("[export] html for %s" % shout["slug"])
|
||||||
body = extract_html(entry)
|
body = extract_html(entry)
|
||||||
open(contentDir + shout['slug'] + '.html', 'w').write(body)
|
open(contentDir + shout["slug"] + ".html", "w").write(body)
|
||||||
else:
|
else:
|
||||||
raise Exception('no content_items entry found')
|
raise Exception("no content_items entry found")
|
||||||
|
|
||||||
|
|
||||||
def export_slug(slug, storage):
|
def export_slug(slug, storage):
|
||||||
shout = storage['shouts']['by_slug'][slug]
|
shout = storage["shouts"]["by_slug"][slug]
|
||||||
shout = storage['shouts']['by_slug'].get(slug)
|
shout = storage["shouts"]["by_slug"].get(slug)
|
||||||
assert shout, '[export] no shout found by slug: %s ' % slug
|
assert shout, "[export] no shout found by slug: %s " % slug
|
||||||
author = shout['authors'][0]
|
author = shout["authors"][0]
|
||||||
assert author, '[export] no author error'
|
assert author, "[export] no author error"
|
||||||
export_body(shout, storage)
|
export_body(shout, storage)
|
||||||
|
|
||||||
|
|
||||||
def export_email_subscriptions():
|
def export_email_subscriptions():
|
||||||
email_subscriptions_data = json.loads(open('migration/data/email_subscriptions.json').read())
|
email_subscriptions_data = json.loads(
|
||||||
for data in email_subscriptions_data:
|
open("migration/data/email_subscriptions.json").read()
|
||||||
# migrate_email_subscription(data)
|
)
|
||||||
pass
|
for data in email_subscriptions_data:
|
||||||
print('[migration] ' + str(len(email_subscriptions_data)) + ' email subscriptions exported')
|
# migrate_email_subscription(data)
|
||||||
|
pass
|
||||||
|
print(
|
||||||
|
"[migration] "
|
||||||
|
+ str(len(email_subscriptions_data))
|
||||||
|
+ " email subscriptions exported"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def export_shouts(storage):
|
def export_shouts(storage):
|
||||||
# update what was just migrated or load json again
|
# update what was just migrated or load json again
|
||||||
if len(storage['users']['by_slugs'].keys()) == 0:
|
if len(storage["users"]["by_slugs"].keys()) == 0:
|
||||||
storage['users']['by_slugs'] = json.loads(open(EXPORT_DEST + 'authors.json').read())
|
storage["users"]["by_slugs"] = json.loads(
|
||||||
print('[migration] ' + str(len(storage['users']['by_slugs'].keys())) + ' exported authors ')
|
open(EXPORT_DEST + "authors.json").read()
|
||||||
if len(storage['shouts']['by_slugs'].keys()) == 0:
|
)
|
||||||
storage['shouts']['by_slugs'] = json.loads(open(EXPORT_DEST + 'articles.json').read())
|
print(
|
||||||
print('[migration] ' + str(len(storage['shouts']['by_slugs'].keys())) + ' exported articles ')
|
"[migration] "
|
||||||
for slug in storage['shouts']['by_slugs'].keys(): export_slug(slug, storage)
|
+ str(len(storage["users"]["by_slugs"].keys()))
|
||||||
|
+ " exported authors "
|
||||||
|
)
|
||||||
|
if len(storage["shouts"]["by_slugs"].keys()) == 0:
|
||||||
|
storage["shouts"]["by_slugs"] = json.loads(
|
||||||
|
open(EXPORT_DEST + "articles.json").read()
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
"[migration] "
|
||||||
|
+ str(len(storage["shouts"]["by_slugs"].keys()))
|
||||||
|
+ " exported articles "
|
||||||
|
)
|
||||||
|
for slug in storage["shouts"]["by_slugs"].keys():
|
||||||
|
export_slug(slug, storage)
|
||||||
|
|
||||||
def export_json(export_articles = {}, export_authors = {}, export_topics = {}, export_comments = {}):
|
|
||||||
open(EXPORT_DEST + 'authors.json', 'w').write(json.dumps(export_authors,
|
|
||||||
cls=DateTimeEncoder,
|
|
||||||
indent=4,
|
|
||||||
sort_keys=True,
|
|
||||||
ensure_ascii=False))
|
|
||||||
print('[migration] ' + str(len(export_authors.items())) + ' authors exported')
|
|
||||||
open(EXPORT_DEST + 'topics.json', 'w').write(json.dumps(export_topics,
|
|
||||||
cls=DateTimeEncoder,
|
|
||||||
indent=4,
|
|
||||||
sort_keys=True,
|
|
||||||
ensure_ascii=False))
|
|
||||||
print('[migration] ' + str(len(export_topics.keys())) + ' topics exported')
|
|
||||||
|
|
||||||
open(EXPORT_DEST + 'articles.json', 'w').write(json.dumps(export_articles,
|
def export_json(
|
||||||
cls=DateTimeEncoder,
|
export_articles={}, export_authors={}, export_topics={}, export_comments={}
|
||||||
indent=4,
|
):
|
||||||
sort_keys=True,
|
open(EXPORT_DEST + "authors.json", "w").write(
|
||||||
ensure_ascii=False))
|
json.dumps(
|
||||||
print('[migration] ' + str(len(export_articles.items())) + ' articles exported')
|
export_authors,
|
||||||
open(EXPORT_DEST + 'comments.json', 'w').write(json.dumps(export_comments,
|
cls=DateTimeEncoder,
|
||||||
cls=DateTimeEncoder,
|
indent=4,
|
||||||
indent=4,
|
sort_keys=True,
|
||||||
sort_keys=True,
|
ensure_ascii=False,
|
||||||
ensure_ascii=False))
|
)
|
||||||
print('[migration] ' + str(len(export_comments.items())) + ' exported articles with comments')
|
)
|
||||||
|
print("[migration] " + str(len(export_authors.items())) + " authors exported")
|
||||||
|
open(EXPORT_DEST + "topics.json", "w").write(
|
||||||
|
json.dumps(
|
||||||
|
export_topics,
|
||||||
|
cls=DateTimeEncoder,
|
||||||
|
indent=4,
|
||||||
|
sort_keys=True,
|
||||||
|
ensure_ascii=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("[migration] " + str(len(export_topics.keys())) + " topics exported")
|
||||||
|
|
||||||
|
open(EXPORT_DEST + "articles.json", "w").write(
|
||||||
|
json.dumps(
|
||||||
|
export_articles,
|
||||||
|
cls=DateTimeEncoder,
|
||||||
|
indent=4,
|
||||||
|
sort_keys=True,
|
||||||
|
ensure_ascii=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("[migration] " + str(len(export_articles.items())) + " articles exported")
|
||||||
|
open(EXPORT_DEST + "comments.json", "w").write(
|
||||||
|
json.dumps(
|
||||||
|
export_comments,
|
||||||
|
cls=DateTimeEncoder,
|
||||||
|
indent=4,
|
||||||
|
sort_keys=True,
|
||||||
|
ensure_ascii=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
"[migration] "
|
||||||
|
+ str(len(export_comments.items()))
|
||||||
|
+ " exported articles with comments"
|
||||||
|
)
|
||||||
|
|
|
@ -3,322 +3,397 @@ import re
|
||||||
import base64
|
import base64
|
||||||
from .html2text import html2text
|
from .html2text import html2text
|
||||||
|
|
||||||
TOOLTIP_REGEX = r'(\/\/\/(.+)\/\/\/)'
|
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
|
||||||
contentDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'discoursio-web', 'content')
|
contentDir = os.path.join(
|
||||||
s3 = 'https://discours-io.s3.amazonaws.com/'
|
os.path.dirname(os.path.realpath(__file__)), "..", "..", "discoursio-web", "content"
|
||||||
cdn = 'https://assets.discours.io'
|
)
|
||||||
|
s3 = "https://discours-io.s3.amazonaws.com/"
|
||||||
|
cdn = "https://assets.discours.io"
|
||||||
|
|
||||||
|
|
||||||
def replace_tooltips(body):
|
def replace_tooltips(body):
|
||||||
# change if you prefer regexp
|
# change if you prefer regexp
|
||||||
newbody = body
|
newbody = body
|
||||||
matches = list(re.finditer(TOOLTIP_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
|
matches = list(re.finditer(TOOLTIP_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
|
||||||
for match in matches:
|
for match in matches:
|
||||||
newbody = body.replace(match.group(1), '<Tooltip text="' + match.group(2) + '" />') # NOTE: doesn't work
|
newbody = body.replace(
|
||||||
if len(matches) > 0:
|
match.group(1), '<Tooltip text="' + match.group(2) + '" />'
|
||||||
print('[extract] found %d tooltips' % len(matches))
|
) # NOTE: doesn't work
|
||||||
return newbody
|
if len(matches) > 0:
|
||||||
|
print("[extract] found %d tooltips" % len(matches))
|
||||||
|
return newbody
|
||||||
|
|
||||||
|
|
||||||
def place_tooltips(body):
|
def place_tooltips(body):
|
||||||
parts = body.split('&&&')
|
parts = body.split("&&&")
|
||||||
l = len(parts)
|
l = len(parts)
|
||||||
newparts = list(parts)
|
newparts = list(parts)
|
||||||
placed = False
|
placed = False
|
||||||
if l & 1:
|
if l & 1:
|
||||||
if l > 1:
|
if l > 1:
|
||||||
i = 1
|
i = 1
|
||||||
print('[extract] found %d tooltips' % (l-1))
|
print("[extract] found %d tooltips" % (l - 1))
|
||||||
for part in parts[1:]:
|
for part in parts[1:]:
|
||||||
if i & 1:
|
if i & 1:
|
||||||
placed = True
|
placed = True
|
||||||
if 'a class="footnote-url" href=' in part:
|
if 'a class="footnote-url" href=' in part:
|
||||||
print('[extract] footnote: ' + part)
|
print("[extract] footnote: " + part)
|
||||||
fn = 'a class="footnote-url" href="'
|
fn = 'a class="footnote-url" href="'
|
||||||
link = part.split(fn,1)[1].split('"', 1)[0]
|
link = part.split(fn, 1)[1].split('"', 1)[0]
|
||||||
extracted_part = part.split(fn,1)[0] + ' ' + part.split('/', 1)[-1]
|
extracted_part = (
|
||||||
newparts[i] = '<Tooltip' + (' link="' + link + '" ' if link else '') + '>' + extracted_part + '</Tooltip>'
|
part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
|
||||||
else:
|
)
|
||||||
newparts[i] = '<Tooltip>%s</Tooltip>' % part
|
newparts[i] = (
|
||||||
# print('[extract] ' + newparts[i])
|
"<Tooltip"
|
||||||
else:
|
+ (' link="' + link + '" ' if link else "")
|
||||||
# print('[extract] ' + part[:10] + '..')
|
+ ">"
|
||||||
newparts[i] = part
|
+ extracted_part
|
||||||
i += 1
|
+ "</Tooltip>"
|
||||||
return (''.join(newparts), placed)
|
)
|
||||||
|
else:
|
||||||
|
newparts[i] = "<Tooltip>%s</Tooltip>" % part
|
||||||
|
# print('[extract] ' + newparts[i])
|
||||||
|
else:
|
||||||
|
# print('[extract] ' + part[:10] + '..')
|
||||||
|
newparts[i] = part
|
||||||
|
i += 1
|
||||||
|
return ("".join(newparts), placed)
|
||||||
|
|
||||||
|
|
||||||
IMG_REGEX = r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}=|[A-Za-z\d+\/]{2}==)))\)"
|
IMG_REGEX = r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}=|[A-Za-z\d+\/]{2}==)))\)"
|
||||||
|
|
||||||
parentDir = '/'.join(os.getcwd().split('/')[:-1])
|
parentDir = "/".join(os.getcwd().split("/")[:-1])
|
||||||
public = parentDir + '/discoursio-web/public'
|
public = parentDir + "/discoursio-web/public"
|
||||||
cache = {}
|
cache = {}
|
||||||
|
|
||||||
|
|
||||||
def reextract_images(body, oid):
|
def reextract_images(body, oid):
|
||||||
# change if you prefer regexp
|
# change if you prefer regexp
|
||||||
matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
|
matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
|
||||||
i = 0
|
i = 0
|
||||||
for match in matches:
|
for match in matches:
|
||||||
print('[extract] image ' + match.group(1))
|
print("[extract] image " + match.group(1))
|
||||||
ext = match.group(3)
|
ext = match.group(3)
|
||||||
name = oid + str(i)
|
name = oid + str(i)
|
||||||
link = public + '/upload/image-' + name + '.' + ext
|
link = public + "/upload/image-" + name + "." + ext
|
||||||
img = match.group(4)
|
img = match.group(4)
|
||||||
title = match.group(1) # NOTE: this is not the title
|
title = match.group(1) # NOTE: this is not the title
|
||||||
if img not in cache:
|
if img not in cache:
|
||||||
content = base64.b64decode(img + '==')
|
content = base64.b64decode(img + "==")
|
||||||
print(str(len(img)) + ' image bytes been written')
|
print(str(len(img)) + " image bytes been written")
|
||||||
open('../' + link, 'wb').write(content)
|
open("../" + link, "wb").write(content)
|
||||||
cache[img] = name
|
cache[img] = name
|
||||||
i += 1
|
i += 1
|
||||||
else:
|
else:
|
||||||
print('[extract] image cached ' + cache[img])
|
print("[extract] image cached " + cache[img])
|
||||||
body.replace(str(match), '') # WARNING: this does not work
|
body.replace(
|
||||||
return body
|
str(match), ""
|
||||||
|
) # WARNING: this does not work
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
IMAGES = {
|
IMAGES = {
|
||||||
'data:image/png': 'png',
|
"data:image/png": "png",
|
||||||
'data:image/jpg': 'jpg',
|
"data:image/jpg": "jpg",
|
||||||
'data:image/jpeg': 'jpg',
|
"data:image/jpeg": "jpg",
|
||||||
}
|
}
|
||||||
|
|
||||||
b64 = ';base64,'
|
b64 = ";base64,"
|
||||||
|
|
||||||
|
|
||||||
def extract_imageparts(bodyparts, prefix):
|
def extract_imageparts(bodyparts, prefix):
|
||||||
# recursive loop
|
# recursive loop
|
||||||
newparts = list(bodyparts)
|
newparts = list(bodyparts)
|
||||||
for current in bodyparts:
|
for current in bodyparts:
|
||||||
i = bodyparts.index(current)
|
i = bodyparts.index(current)
|
||||||
for mime in IMAGES.keys():
|
for mime in IMAGES.keys():
|
||||||
if mime == current[-len(mime):] and (i + 1 < len(bodyparts)):
|
if mime == current[-len(mime) :] and (i + 1 < len(bodyparts)):
|
||||||
print('[extract] ' + mime)
|
print("[extract] " + mime)
|
||||||
next = bodyparts[i+1]
|
next = bodyparts[i + 1]
|
||||||
ext = IMAGES[mime]
|
ext = IMAGES[mime]
|
||||||
b64end = next.index(')')
|
b64end = next.index(")")
|
||||||
b64encoded = next[:b64end]
|
b64encoded = next[:b64end]
|
||||||
name = prefix + '-' + str(len(cache))
|
name = prefix + "-" + str(len(cache))
|
||||||
link = '/upload/image-' + name + '.' + ext
|
link = "/upload/image-" + name + "." + ext
|
||||||
print('[extract] name: ' + name)
|
print("[extract] name: " + name)
|
||||||
print('[extract] link: ' + link)
|
print("[extract] link: " + link)
|
||||||
print('[extract] %d bytes' % len(b64encoded))
|
print("[extract] %d bytes" % len(b64encoded))
|
||||||
if b64encoded not in cache:
|
if b64encoded not in cache:
|
||||||
try:
|
try:
|
||||||
content = base64.b64decode(b64encoded + '==')
|
content = base64.b64decode(b64encoded + "==")
|
||||||
open(public + link, 'wb').write(content)
|
open(public + link, "wb").write(content)
|
||||||
print('[extract] ' +str(len(content)) + ' image bytes been written')
|
print(
|
||||||
cache[b64encoded] = name
|
"[extract] "
|
||||||
except:
|
+ str(len(content))
|
||||||
raise Exception
|
+ " image bytes been written"
|
||||||
# raise Exception('[extract] error decoding image %r' %b64encoded)
|
)
|
||||||
else:
|
cache[b64encoded] = name
|
||||||
print('[extract] cached link ' + cache[b64encoded])
|
except:
|
||||||
name = cache[b64encoded]
|
raise Exception
|
||||||
link = cdn + '/upload/image-' + name + '.' + ext
|
# raise Exception('[extract] error decoding image %r' %b64encoded)
|
||||||
newparts[i] = current[:-len(mime)] + current[-len(mime):] + link + next[-b64end:]
|
else:
|
||||||
newparts[i+1] = next[:-b64end]
|
print("[extract] cached link " + cache[b64encoded])
|
||||||
break
|
name = cache[b64encoded]
|
||||||
return extract_imageparts(newparts[i] + newparts[i+1] + b64.join(bodyparts[i+2:]), prefix) \
|
link = cdn + "/upload/image-" + name + "." + ext
|
||||||
if len(bodyparts) > (i + 1) else ''.join(newparts)
|
newparts[i] = (
|
||||||
|
current[: -len(mime)]
|
||||||
|
+ current[-len(mime) :]
|
||||||
|
+ link
|
||||||
|
+ next[-b64end:]
|
||||||
|
)
|
||||||
|
newparts[i + 1] = next[:-b64end]
|
||||||
|
break
|
||||||
|
return (
|
||||||
|
extract_imageparts(
|
||||||
|
newparts[i] + newparts[i + 1] + b64.join(bodyparts[i + 2 :]), prefix
|
||||||
|
)
|
||||||
|
if len(bodyparts) > (i + 1)
|
||||||
|
else "".join(newparts)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def extract_dataimages(parts, prefix):
|
def extract_dataimages(parts, prefix):
|
||||||
newparts = list(parts)
|
newparts = list(parts)
|
||||||
for part in parts:
|
for part in parts:
|
||||||
i = parts.index(part)
|
i = parts.index(part)
|
||||||
if part.endswith(']('):
|
if part.endswith("]("):
|
||||||
[ext, rest] = parts[i+1].split(b64)
|
[ext, rest] = parts[i + 1].split(b64)
|
||||||
name = prefix + '-' + str(len(cache))
|
name = prefix + "-" + str(len(cache))
|
||||||
if ext == '/jpeg': ext = 'jpg'
|
if ext == "/jpeg":
|
||||||
else: ext = ext.replace('/', '')
|
ext = "jpg"
|
||||||
link = '/upload/image-' + name + '.' + ext
|
else:
|
||||||
print('[extract] filename: ' + link)
|
ext = ext.replace("/", "")
|
||||||
b64end = rest.find(')')
|
link = "/upload/image-" + name + "." + ext
|
||||||
if b64end !=-1:
|
print("[extract] filename: " + link)
|
||||||
b64encoded = rest[:b64end]
|
b64end = rest.find(")")
|
||||||
print('[extract] %d text bytes' % len(b64encoded))
|
if b64end != -1:
|
||||||
# write if not cached
|
b64encoded = rest[:b64end]
|
||||||
if b64encoded not in cache:
|
print("[extract] %d text bytes" % len(b64encoded))
|
||||||
try:
|
# write if not cached
|
||||||
content = base64.b64decode(b64encoded + '==')
|
if b64encoded not in cache:
|
||||||
open(public + link, 'wb').write(content)
|
try:
|
||||||
print('[extract] ' +str(len(content)) + ' image bytes')
|
content = base64.b64decode(b64encoded + "==")
|
||||||
cache[b64encoded] = name
|
open(public + link, "wb").write(content)
|
||||||
except:
|
print("[extract] " + str(len(content)) + " image bytes")
|
||||||
raise Exception
|
cache[b64encoded] = name
|
||||||
# raise Exception('[extract] error decoding image %r' %b64encoded)
|
except:
|
||||||
else:
|
raise Exception
|
||||||
print('[extract] 0 image bytes, cached for ' + cache[b64encoded])
|
# raise Exception('[extract] error decoding image %r' %b64encoded)
|
||||||
name = cache[b64encoded]
|
else:
|
||||||
|
print("[extract] 0 image bytes, cached for " + cache[b64encoded])
|
||||||
|
name = cache[b64encoded]
|
||||||
|
|
||||||
# update link with CDN
|
# update link with CDN
|
||||||
link = cdn + '/upload/image-' + name + '.' + ext
|
link = cdn + "/upload/image-" + name + "." + ext
|
||||||
|
|
||||||
# patch newparts
|
# patch newparts
|
||||||
newparts[i+1] = link + rest[b64end:]
|
newparts[i + 1] = link + rest[b64end:]
|
||||||
else:
|
else:
|
||||||
raise Exception('cannot find the end of base64 encoded string')
|
raise Exception("cannot find the end of base64 encoded string")
|
||||||
else:
|
else:
|
||||||
print('[extract] dataimage skipping part ' + str(i))
|
print("[extract] dataimage skipping part " + str(i))
|
||||||
continue
|
continue
|
||||||
return ''.join(newparts)
|
return "".join(newparts)
|
||||||
|
|
||||||
|
|
||||||
|
di = "data:image"
|
||||||
|
|
||||||
di = 'data:image'
|
|
||||||
|
|
||||||
def extract_md_images(body, oid):
|
def extract_md_images(body, oid):
|
||||||
newbody = ''
|
newbody = ""
|
||||||
body = body\
|
body = (
|
||||||
.replace('\n! []('+di, '\n \
|
body.replace("\n! [](" + di, "\n 
|
||||||
.replace('\n[]('+di, '\n\
|
.replace("\n[](" + di, "\n
|
||||||
.replace(' []('+di, ' 
|
.replace(" [](" + di, " 
|
||||||
parts = body.split(di)
|
)
|
||||||
i = 0
|
parts = body.split(di)
|
||||||
if len(parts) > 1: newbody = extract_dataimages(parts, oid)
|
i = 0
|
||||||
else: newbody = body
|
if len(parts) > 1:
|
||||||
return newbody
|
newbody = extract_dataimages(parts, oid)
|
||||||
|
else:
|
||||||
|
newbody = body
|
||||||
|
return newbody
|
||||||
|
|
||||||
|
|
||||||
def cleanup(body):
|
def cleanup(body):
|
||||||
newbody = body\
|
newbody = (
|
||||||
.replace('<', '').replace('>', '')\
|
body.replace("<", "")
|
||||||
.replace('{', '(').replace('}', ')')\
|
.replace(">", "")
|
||||||
.replace('…', '...')\
|
.replace("{", "(")
|
||||||
.replace(' __ ', ' ')\
|
.replace("}", ")")
|
||||||
.replace('_ _', ' ')\
|
.replace("…", "...")
|
||||||
.replace('****', '')\
|
.replace(" __ ", " ")
|
||||||
.replace('\u00a0', ' ')\
|
.replace("_ _", " ")
|
||||||
.replace('\u02c6', '^')\
|
.replace("****", "")
|
||||||
.replace('\u00a0',' ')\
|
.replace("\u00a0", " ")
|
||||||
.replace('\ufeff', '')\
|
.replace("\u02c6", "^")
|
||||||
.replace('\u200b', '')\
|
.replace("\u00a0", " ")
|
||||||
.replace('\u200c', '')\
|
.replace("\ufeff", "")
|
||||||
# .replace('\u2212', '-')
|
.replace("\u200b", "")
|
||||||
return newbody
|
.replace("\u200c", "")
|
||||||
|
) # .replace('\u2212', '-')
|
||||||
|
return newbody
|
||||||
|
|
||||||
|
|
||||||
def extract_md(body, oid):
|
def extract_md(body, oid):
|
||||||
newbody = body
|
newbody = body
|
||||||
if newbody:
|
if newbody:
|
||||||
newbody = extract_md_images(newbody, oid)
|
newbody = extract_md_images(newbody, oid)
|
||||||
if not newbody: raise Exception('extract_images error')
|
if not newbody:
|
||||||
newbody = cleanup(newbody)
|
raise Exception("extract_images error")
|
||||||
if not newbody: raise Exception('cleanup error')
|
newbody = cleanup(newbody)
|
||||||
newbody, placed = place_tooltips(newbody)
|
if not newbody:
|
||||||
if not newbody: raise Exception('place_tooltips error')
|
raise Exception("cleanup error")
|
||||||
if placed:
|
newbody, placed = place_tooltips(newbody)
|
||||||
newbody = 'import Tooltip from \'$/components/Article/Tooltip\'\n\n' + newbody
|
if not newbody:
|
||||||
return newbody
|
raise Exception("place_tooltips error")
|
||||||
|
if placed:
|
||||||
|
newbody = "import Tooltip from '$/components/Article/Tooltip'\n\n" + newbody
|
||||||
|
return newbody
|
||||||
|
|
||||||
|
|
||||||
def prepare_md_body(entry):
|
def prepare_md_body(entry):
|
||||||
# body modifications
|
# body modifications
|
||||||
body = ''
|
body = ""
|
||||||
kind = entry.get('type')
|
kind = entry.get("type")
|
||||||
addon = ''
|
addon = ""
|
||||||
if kind == 'Video':
|
if kind == "Video":
|
||||||
addon = ''
|
addon = ""
|
||||||
for m in entry.get('media', []):
|
for m in entry.get("media", []):
|
||||||
if 'youtubeId' in m: addon += '<VideoPlayer youtubeId=\'' + m['youtubeId'] + '\' />\n'
|
if "youtubeId" in m:
|
||||||
elif 'vimeoId' in m: addon += '<VideoPlayer vimeoId=\'' + m['vimeoId'] + '\' />\n'
|
addon += "<VideoPlayer youtubeId='" + m["youtubeId"] + "' />\n"
|
||||||
else:
|
elif "vimeoId" in m:
|
||||||
print('[extract] media is not supported')
|
addon += "<VideoPlayer vimeoId='" + m["vimeoId"] + "' />\n"
|
||||||
print(m)
|
else:
|
||||||
body = 'import VideoPlayer from \'$/components/Article/VideoPlayer\'\n\n' + addon
|
print("[extract] media is not supported")
|
||||||
|
print(m)
|
||||||
|
body = "import VideoPlayer from '$/components/Article/VideoPlayer'\n\n" + addon
|
||||||
|
|
||||||
elif kind == 'Music':
|
elif kind == "Music":
|
||||||
addon = ''
|
addon = ""
|
||||||
for m in entry.get('media', []):
|
for m in entry.get("media", []):
|
||||||
artist = m.get('performer')
|
artist = m.get("performer")
|
||||||
trackname = ''
|
trackname = ""
|
||||||
if artist: trackname += artist + ' - '
|
if artist:
|
||||||
if 'title' in m: trackname += m.get('title','')
|
trackname += artist + " - "
|
||||||
addon += '<MusicPlayer src=\"' + m.get('fileUrl','') + '\" title=\"' + trackname + '\" />\n'
|
if "title" in m:
|
||||||
body = 'import MusicPlayer from \'$/components/Article/MusicPlayer\'\n\n' + addon
|
trackname += m.get("title", "")
|
||||||
|
addon += (
|
||||||
|
'<MusicPlayer src="'
|
||||||
|
+ m.get("fileUrl", "")
|
||||||
|
+ '" title="'
|
||||||
|
+ trackname
|
||||||
|
+ '" />\n'
|
||||||
|
)
|
||||||
|
body = "import MusicPlayer from '$/components/Article/MusicPlayer'\n\n" + addon
|
||||||
|
|
||||||
|
body_orig = extract_html(entry)
|
||||||
|
if body_orig:
|
||||||
|
body += extract_md(html2text(body_orig), entry["_id"])
|
||||||
|
if not body:
|
||||||
|
print("[extract] empty MDX body")
|
||||||
|
return body
|
||||||
|
|
||||||
body_orig = extract_html(entry)
|
|
||||||
if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
|
|
||||||
if not body: print('[extract] empty MDX body')
|
|
||||||
return body
|
|
||||||
|
|
||||||
def prepare_html_body(entry):
|
def prepare_html_body(entry):
|
||||||
# body modifications
|
# body modifications
|
||||||
body = ''
|
body = ""
|
||||||
kind = entry.get('type')
|
kind = entry.get("type")
|
||||||
addon = ''
|
addon = ""
|
||||||
if kind == 'Video':
|
if kind == "Video":
|
||||||
addon = ''
|
addon = ""
|
||||||
for m in entry.get('media', []):
|
for m in entry.get("media", []):
|
||||||
if 'youtubeId' in m:
|
if "youtubeId" in m:
|
||||||
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
|
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
|
||||||
addon += m['youtubeId']
|
addon += m["youtubeId"]
|
||||||
addon += '?autoplay=1" frameborder="0" allowfullscreen></iframe>\n'
|
addon += '?autoplay=1" frameborder="0" allowfullscreen></iframe>\n'
|
||||||
elif 'vimeoId' in m:
|
elif "vimeoId" in m:
|
||||||
addon += '<iframe src="https://player.vimeo.com/video/'
|
addon += '<iframe src="https://player.vimeo.com/video/'
|
||||||
addon += m['vimeoId']
|
addon += m["vimeoId"]
|
||||||
addon += ' width="420" height="345" frameborder="0" allow="autoplay; fullscreen" allowfullscreen></iframe>'
|
addon += ' width="420" height="345" frameborder="0" allow="autoplay; fullscreen" allowfullscreen></iframe>'
|
||||||
else:
|
else:
|
||||||
print('[extract] media is not supported')
|
print("[extract] media is not supported")
|
||||||
print(m)
|
print(m)
|
||||||
body += addon
|
body += addon
|
||||||
|
|
||||||
elif kind == 'Music':
|
elif kind == "Music":
|
||||||
addon = ''
|
addon = ""
|
||||||
for m in entry.get('media', []):
|
for m in entry.get("media", []):
|
||||||
artist = m.get('performer')
|
artist = m.get("performer")
|
||||||
trackname = ''
|
trackname = ""
|
||||||
if artist: trackname += artist + ' - '
|
if artist:
|
||||||
if 'title' in m: trackname += m.get('title','')
|
trackname += artist + " - "
|
||||||
addon += '<figure><figcaption>'
|
if "title" in m:
|
||||||
addon += trackname
|
trackname += m.get("title", "")
|
||||||
addon += '</figcaption><audio controls src="'
|
addon += "<figure><figcaption>"
|
||||||
addon += m.get('fileUrl','')
|
addon += trackname
|
||||||
addon += '"></audio></figure>'
|
addon += '</figcaption><audio controls src="'
|
||||||
body += addon
|
addon += m.get("fileUrl", "")
|
||||||
|
addon += '"></audio></figure>'
|
||||||
|
body += addon
|
||||||
|
|
||||||
|
body = extract_html(entry)
|
||||||
|
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
|
||||||
|
if not body:
|
||||||
|
print("[extract] empty HTML body")
|
||||||
|
return body
|
||||||
|
|
||||||
body = extract_html(entry)
|
|
||||||
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
|
|
||||||
if not body: print('[extract] empty HTML body')
|
|
||||||
return body
|
|
||||||
|
|
||||||
def extract_html(entry):
|
def extract_html(entry):
|
||||||
body_orig = entry.get('body') or ''
|
body_orig = entry.get("body") or ""
|
||||||
media = entry.get('media', [])
|
media = entry.get("media", [])
|
||||||
kind = entry.get('type') or ''
|
kind = entry.get("type") or ""
|
||||||
print('[extract] kind: ' + kind)
|
print("[extract] kind: " + kind)
|
||||||
mbodies = set([])
|
mbodies = set([])
|
||||||
if media:
|
if media:
|
||||||
# print('[extract] media is found')
|
# print('[extract] media is found')
|
||||||
for m in media:
|
for m in media:
|
||||||
mbody = m.get('body', '')
|
mbody = m.get("body", "")
|
||||||
addon = ''
|
addon = ""
|
||||||
if kind == 'Literature':
|
if kind == "Literature":
|
||||||
mbody = m.get('literatureBody') or m.get('body', '')
|
mbody = m.get("literatureBody") or m.get("body", "")
|
||||||
elif kind == 'Image':
|
elif kind == "Image":
|
||||||
cover = ''
|
cover = ""
|
||||||
if 'thumborId' in entry: cover = cdn + '/unsafe/1600x/' + entry['thumborId']
|
if "thumborId" in entry:
|
||||||
if not cover:
|
cover = cdn + "/unsafe/1600x/" + entry["thumborId"]
|
||||||
if 'image' in entry: cover = entry['image'].get('url', '')
|
if not cover:
|
||||||
if 'cloudinary' in cover: cover = ''
|
if "image" in entry:
|
||||||
# else: print('[extract] cover: ' + cover)
|
cover = entry["image"].get("url", "")
|
||||||
title = m.get('title','').replace('\n', ' ').replace(' ', ' ')
|
if "cloudinary" in cover:
|
||||||
u = m.get('thumborId') or cover or ''
|
cover = ""
|
||||||
if title: addon += '<h4>' + title + '</h4>\n'
|
# else: print('[extract] cover: ' + cover)
|
||||||
if not u.startswith('http'): u = s3 + u
|
title = m.get("title", "").replace("\n", " ").replace(" ", " ")
|
||||||
if not u: print('[extract] no image url for ' + str(m))
|
u = m.get("thumborId") or cover or ""
|
||||||
if 'cloudinary' in u: u = 'img/lost.svg'
|
if title:
|
||||||
if u != cover or (u == cover and media.index(m) == 0):
|
addon += "<h4>" + title + "</h4>\n"
|
||||||
addon += '<img src=\"' + u + '\" alt=\"'+ title +'\" />\n'
|
if not u.startswith("http"):
|
||||||
if addon:
|
u = s3 + u
|
||||||
body_orig += addon
|
if not u:
|
||||||
# print('[extract] item addon: ' + addon)
|
print("[extract] no image url for " + str(m))
|
||||||
# if addon: print('[extract] addon: %s' % addon)
|
if "cloudinary" in u:
|
||||||
if mbody and mbody not in mbodies:
|
u = "img/lost.svg"
|
||||||
mbodies.add(mbody)
|
if u != cover or (u == cover and media.index(m) == 0):
|
||||||
body_orig += mbody
|
addon += '<img src="' + u + '" alt="' + title + '" />\n'
|
||||||
if len(list(mbodies)) != len(media):
|
if addon:
|
||||||
print('[extract] %d/%d media item bodies appended' % (len(list(mbodies)),len(media)))
|
body_orig += addon
|
||||||
# print('[extract] media items body: \n' + body_orig)
|
# print('[extract] item addon: ' + addon)
|
||||||
if not body_orig:
|
# if addon: print('[extract] addon: %s' % addon)
|
||||||
for up in entry.get('bodyHistory', []) or []:
|
if mbody and mbody not in mbodies:
|
||||||
body_orig = up.get('text', '') or ''
|
mbodies.add(mbody)
|
||||||
if body_orig:
|
body_orig += mbody
|
||||||
print('[extract] got html body from history')
|
if len(list(mbodies)) != len(media):
|
||||||
break
|
print(
|
||||||
if not body_orig: print('[extract] empty HTML body')
|
"[extract] %d/%d media item bodies appended"
|
||||||
# body_html = str(BeautifulSoup(body_orig, features="html.parser"))
|
% (len(list(mbodies)), len(media))
|
||||||
return body_orig
|
)
|
||||||
|
# print('[extract] media items body: \n' + body_orig)
|
||||||
|
if not body_orig:
|
||||||
|
for up in entry.get("bodyHistory", []) or []:
|
||||||
|
body_orig = up.get("text", "") or ""
|
||||||
|
if body_orig:
|
||||||
|
print("[extract] got html body from history")
|
||||||
|
break
|
||||||
|
if not body_orig:
|
||||||
|
print("[extract] empty HTML body")
|
||||||
|
# body_html = str(BeautifulSoup(body_orig, features="html.parser"))
|
||||||
|
return body_orig
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1 +1 @@
|
||||||
__all__ = ["users", "tags", "content_items", "comments"],
|
__all__ = (["users", "tags", "content_items", "comments"],)
|
||||||
|
|
|
@ -8,104 +8,128 @@ from services.stat.reacted import ReactedStorage
|
||||||
|
|
||||||
ts = datetime.now()
|
ts = datetime.now()
|
||||||
|
|
||||||
|
|
||||||
async def migrate(entry, storage):
|
async def migrate(entry, storage):
|
||||||
'''
|
"""
|
||||||
{
|
{
|
||||||
"_id": "hdtwS8fSyFLxXCgSC",
|
"_id": "hdtwS8fSyFLxXCgSC",
|
||||||
"body": "<p>",
|
"body": "<p>",
|
||||||
"contentItem": "mnK8KsJHPRi8DrybQ",
|
"contentItem": "mnK8KsJHPRi8DrybQ",
|
||||||
"createdBy": "bMFPuyNg6qAD2mhXe",
|
"createdBy": "bMFPuyNg6qAD2mhXe",
|
||||||
"thread": "01/",
|
"thread": "01/",
|
||||||
"createdAt": "2016-04-19 04:33:53+00:00",
|
"createdAt": "2016-04-19 04:33:53+00:00",
|
||||||
"ratings": [
|
"ratings": [
|
||||||
{ "createdBy": "AqmRukvRiExNpAe8C", "value": 1 },
|
{ "createdBy": "AqmRukvRiExNpAe8C", "value": 1 },
|
||||||
{ "createdBy": "YdE76Wth3yqymKEu5", "value": 1 }
|
{ "createdBy": "YdE76Wth3yqymKEu5", "value": 1 }
|
||||||
],
|
],
|
||||||
"rating": 2,
|
"rating": 2,
|
||||||
"updatedAt": "2020-05-27 19:22:57.091000+00:00",
|
"updatedAt": "2020-05-27 19:22:57.091000+00:00",
|
||||||
"updatedBy": "0"
|
"updatedBy": "0"
|
||||||
}
|
}
|
||||||
|
|
||||||
->
|
->
|
||||||
|
|
||||||
type Reaction {
|
type Reaction {
|
||||||
id: Int!
|
id: Int!
|
||||||
shout: Shout!
|
shout: Shout!
|
||||||
createdAt: DateTime!
|
createdAt: DateTime!
|
||||||
createdBy: User!
|
createdBy: User!
|
||||||
updatedAt: DateTime
|
updatedAt: DateTime
|
||||||
deletedAt: DateTime
|
deletedAt: DateTime
|
||||||
deletedBy: User
|
deletedBy: User
|
||||||
range: String # full / 0:2340
|
range: String # full / 0:2340
|
||||||
kind: ReactionKind!
|
kind: ReactionKind!
|
||||||
body: String
|
body: String
|
||||||
replyTo: Reaction
|
replyTo: Reaction
|
||||||
stat: Stat
|
stat: Stat
|
||||||
old_id: String
|
old_id: String
|
||||||
old_thread: String
|
old_thread: String
|
||||||
}
|
}
|
||||||
'''
|
"""
|
||||||
reaction_dict = {}
|
reaction_dict = {}
|
||||||
reaction_dict['createdAt'] = ts if not entry.get('createdAt') else date_parse(entry.get('createdAt'))
|
reaction_dict["createdAt"] = (
|
||||||
print('[migration] reaction original date %r' % entry.get('createdAt'))
|
ts if not entry.get("createdAt") else date_parse(entry.get("createdAt"))
|
||||||
# print('[migration] comment date %r ' % comment_dict['createdAt'])
|
)
|
||||||
reaction_dict['body'] = html2text(entry.get('body', ''))
|
print("[migration] reaction original date %r" % entry.get("createdAt"))
|
||||||
reaction_dict['oid'] = entry['_id']
|
# print('[migration] comment date %r ' % comment_dict['createdAt'])
|
||||||
if entry.get('createdAt'): reaction_dict['createdAt'] = date_parse(entry.get('createdAt'))
|
reaction_dict["body"] = html2text(entry.get("body", ""))
|
||||||
shout_oid = entry.get('contentItem')
|
reaction_dict["oid"] = entry["_id"]
|
||||||
if not shout_oid in storage['shouts']['by_oid']:
|
if entry.get("createdAt"):
|
||||||
if len(storage['shouts']['by_oid']) > 0:
|
reaction_dict["createdAt"] = date_parse(entry.get("createdAt"))
|
||||||
return shout_oid
|
shout_oid = entry.get("contentItem")
|
||||||
else:
|
if not shout_oid in storage["shouts"]["by_oid"]:
|
||||||
print('[migration] no shouts migrated yet')
|
if len(storage["shouts"]["by_oid"]) > 0:
|
||||||
raise Exception
|
return shout_oid
|
||||||
return
|
else:
|
||||||
else:
|
print("[migration] no shouts migrated yet")
|
||||||
with local_session() as session:
|
raise Exception
|
||||||
author = session.query(User).filter(User.oid == entry['createdBy']).first()
|
return
|
||||||
shout_dict = storage['shouts']['by_oid'][shout_oid]
|
else:
|
||||||
if shout_dict:
|
with local_session() as session:
|
||||||
reaction_dict['shout'] = shout_dict['slug']
|
author = session.query(User).filter(User.oid == entry["createdBy"]).first()
|
||||||
reaction_dict['createdBy'] = author.slug if author else 'discours'
|
shout_dict = storage["shouts"]["by_oid"][shout_oid]
|
||||||
reaction_dict['kind'] = ReactionKind.COMMENT
|
if shout_dict:
|
||||||
|
reaction_dict["shout"] = shout_dict["slug"]
|
||||||
|
reaction_dict["createdBy"] = author.slug if author else "discours"
|
||||||
|
reaction_dict["kind"] = ReactionKind.COMMENT
|
||||||
|
|
||||||
# creating reaction from old comment
|
# creating reaction from old comment
|
||||||
day = (reaction_dict.get('createdAt') or ts).replace(hour=0, minute=0, second=0, microsecond=0)
|
day = (reaction_dict.get("createdAt") or ts).replace(
|
||||||
reaction = Reaction.create(**reaction_dict)
|
hour=0, minute=0, second=0, microsecond=0
|
||||||
await ReactedStorage.increment(reaction)
|
)
|
||||||
|
reaction = Reaction.create(**reaction_dict)
|
||||||
|
await ReactedStorage.increment(reaction)
|
||||||
|
|
||||||
reaction_dict['id'] = reaction.id
|
reaction_dict["id"] = reaction.id
|
||||||
for comment_rating_old in entry.get('ratings',[]):
|
for comment_rating_old in entry.get("ratings", []):
|
||||||
rater = session.query(User).filter(User.oid == comment_rating_old['createdBy']).first()
|
rater = (
|
||||||
reactedBy = rater if rater else session.query(User).filter(User.slug == 'noname').first()
|
session.query(User)
|
||||||
re_reaction_dict = {
|
.filter(User.oid == comment_rating_old["createdBy"])
|
||||||
'shout': reaction_dict['shout'],
|
.first()
|
||||||
'replyTo': reaction.id,
|
)
|
||||||
'kind': ReactionKind.LIKE if comment_rating_old['value'] > 0 else ReactionKind.DISLIKE,
|
reactedBy = (
|
||||||
'createdBy': reactedBy.slug if reactedBy else 'discours'
|
rater
|
||||||
}
|
if rater
|
||||||
cts = comment_rating_old.get('createdAt')
|
else session.query(User).filter(User.slug == "noname").first()
|
||||||
if cts: re_reaction_dict['createdAt'] = date_parse(cts)
|
)
|
||||||
try:
|
re_reaction_dict = {
|
||||||
# creating reaction from old rating
|
"shout": reaction_dict["shout"],
|
||||||
rr = Reaction.create(**re_reaction_dict)
|
"replyTo": reaction.id,
|
||||||
await ReactedStorage.increment(rr)
|
"kind": ReactionKind.LIKE
|
||||||
|
if comment_rating_old["value"] > 0
|
||||||
|
else ReactionKind.DISLIKE,
|
||||||
|
"createdBy": reactedBy.slug if reactedBy else "discours",
|
||||||
|
}
|
||||||
|
cts = comment_rating_old.get("createdAt")
|
||||||
|
if cts:
|
||||||
|
re_reaction_dict["createdAt"] = date_parse(cts)
|
||||||
|
try:
|
||||||
|
# creating reaction from old rating
|
||||||
|
rr = Reaction.create(**re_reaction_dict)
|
||||||
|
await ReactedStorage.increment(rr)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print("[migration] comment rating error: %r" % re_reaction_dict)
|
||||||
|
raise e
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
"[migration] error: cannot find shout for comment %r"
|
||||||
|
% reaction_dict
|
||||||
|
)
|
||||||
|
return reaction
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print('[migration] comment rating error: %r' % re_reaction_dict)
|
|
||||||
raise e
|
|
||||||
else:
|
|
||||||
print('[migration] error: cannot find shout for comment %r' % reaction_dict)
|
|
||||||
return reaction
|
|
||||||
|
|
||||||
def migrate_2stage(rr, old_new_id):
|
def migrate_2stage(rr, old_new_id):
|
||||||
reply_oid = rr.get('replyTo')
|
reply_oid = rr.get("replyTo")
|
||||||
if not reply_oid: return
|
if not reply_oid:
|
||||||
new_id = old_new_id.get(rr.get('oid'))
|
return
|
||||||
if not new_id: return
|
new_id = old_new_id.get(rr.get("oid"))
|
||||||
with local_session() as session:
|
if not new_id:
|
||||||
comment = session.query(Reaction).filter(Reaction.id == new_id).first()
|
return
|
||||||
comment.replyTo = old_new_id.get(reply_oid)
|
with local_session() as session:
|
||||||
comment.save()
|
comment = session.query(Reaction).filter(Reaction.id == new_id).first()
|
||||||
session.commit()
|
comment.replyTo = old_new_id.get(reply_oid)
|
||||||
if not rr['body']: raise Exception(rr)
|
comment.save()
|
||||||
|
session.commit()
|
||||||
|
if not rr["body"]:
|
||||||
|
raise Exception(rr)
|
||||||
|
|
|
@ -10,224 +10,279 @@ from migration.extract import prepare_html_body
|
||||||
from orm.community import Community
|
from orm.community import Community
|
||||||
from orm.reaction import Reaction, ReactionKind
|
from orm.reaction import Reaction, ReactionKind
|
||||||
|
|
||||||
OLD_DATE = '2016-03-05 22:22:00.350000'
|
OLD_DATE = "2016-03-05 22:22:00.350000"
|
||||||
ts = datetime.now()
|
ts = datetime.now()
|
||||||
type2layout = {
|
type2layout = {
|
||||||
'Article': 'article',
|
"Article": "article",
|
||||||
'Literature': 'prose',
|
"Literature": "prose",
|
||||||
'Music': 'music',
|
"Music": "music",
|
||||||
'Video': 'video',
|
"Video": "video",
|
||||||
'Image': 'image'
|
"Image": "image",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_shout_slug(entry):
|
def get_shout_slug(entry):
|
||||||
slug = entry.get('slug', '')
|
slug = entry.get("slug", "")
|
||||||
if not slug:
|
if not slug:
|
||||||
for friend in entry.get('friendlySlugs', []):
|
for friend in entry.get("friendlySlugs", []):
|
||||||
slug = friend.get('slug', '')
|
slug = friend.get("slug", "")
|
||||||
if slug: break
|
if slug:
|
||||||
return slug
|
break
|
||||||
|
return slug
|
||||||
|
|
||||||
|
|
||||||
async def migrate(entry, storage):
|
async def migrate(entry, storage):
|
||||||
# init, set title and layout
|
# init, set title and layout
|
||||||
r = {
|
r = {
|
||||||
'layout': type2layout[entry['type']],
|
"layout": type2layout[entry["type"]],
|
||||||
'title': entry['title'],
|
"title": entry["title"],
|
||||||
'community': Community.default_community.id,
|
"community": Community.default_community.id,
|
||||||
'authors': [],
|
"authors": [],
|
||||||
'topics': set([]),
|
"topics": set([]),
|
||||||
# 'rating': 0,
|
# 'rating': 0,
|
||||||
# 'ratings': [],
|
# 'ratings': [],
|
||||||
'createdAt': []
|
"createdAt": [],
|
||||||
}
|
}
|
||||||
topics_by_oid = storage['topics']['by_oid']
|
topics_by_oid = storage["topics"]["by_oid"]
|
||||||
users_by_oid = storage['users']['by_oid']
|
users_by_oid = storage["users"]["by_oid"]
|
||||||
|
|
||||||
# author
|
# author
|
||||||
|
|
||||||
oid = entry.get('createdBy', entry.get('_id', entry.get('oid')))
|
oid = entry.get("createdBy", entry.get("_id", entry.get("oid")))
|
||||||
userdata = users_by_oid.get(oid)
|
userdata = users_by_oid.get(oid)
|
||||||
if not userdata:
|
if not userdata:
|
||||||
app = entry.get('application')
|
app = entry.get("application")
|
||||||
if app:
|
if app:
|
||||||
userslug = translit(app['name'], 'ru', reversed=True)\
|
userslug = (
|
||||||
.replace(' ', '-')\
|
translit(app["name"], "ru", reversed=True)
|
||||||
.replace('\'', '')\
|
.replace(" ", "-")
|
||||||
.replace('.', '-').lower()
|
.replace("'", "")
|
||||||
userdata = {
|
.replace(".", "-")
|
||||||
'username': app['email'],
|
.lower()
|
||||||
'email': app['email'],
|
)
|
||||||
'name': app['name'],
|
userdata = {
|
||||||
'bio': app.get('bio', ''),
|
"username": app["email"],
|
||||||
'emailConfirmed': False,
|
"email": app["email"],
|
||||||
'slug': userslug,
|
"name": app["name"],
|
||||||
'createdAt': ts,
|
"bio": app.get("bio", ""),
|
||||||
'wasOnlineAt': ts
|
"emailConfirmed": False,
|
||||||
}
|
"slug": userslug,
|
||||||
else:
|
"createdAt": ts,
|
||||||
userdata = User.default_user.dict()
|
"wasOnlineAt": ts,
|
||||||
assert userdata, 'no user found for %s from ' % [oid, len(users_by_oid.keys())]
|
}
|
||||||
r['authors'] = [userdata, ]
|
else:
|
||||||
|
userdata = User.default_user.dict()
|
||||||
|
assert userdata, "no user found for %s from " % [oid, len(users_by_oid.keys())]
|
||||||
|
r["authors"] = [
|
||||||
|
userdata,
|
||||||
|
]
|
||||||
|
|
||||||
# slug
|
# slug
|
||||||
|
|
||||||
slug = get_shout_slug(entry)
|
slug = get_shout_slug(entry)
|
||||||
if slug: r['slug'] = slug
|
if slug:
|
||||||
else: raise Exception
|
r["slug"] = slug
|
||||||
|
else:
|
||||||
|
raise Exception
|
||||||
|
|
||||||
# cover
|
# cover
|
||||||
c = ''
|
c = ""
|
||||||
if entry.get('thumborId'):
|
if entry.get("thumborId"):
|
||||||
c = 'https://assets.discours.io/unsafe/1600x/' + entry['thumborId']
|
c = "https://assets.discours.io/unsafe/1600x/" + entry["thumborId"]
|
||||||
else:
|
else:
|
||||||
c = entry.get('image', {}).get('url')
|
c = entry.get("image", {}).get("url")
|
||||||
if not c or 'cloudinary' in c: c = ''
|
if not c or "cloudinary" in c:
|
||||||
r['cover'] = c
|
c = ""
|
||||||
|
r["cover"] = c
|
||||||
|
|
||||||
# timestamps
|
# timestamps
|
||||||
|
|
||||||
r['createdAt'] = date_parse(entry.get('createdAt', OLD_DATE))
|
r["createdAt"] = date_parse(entry.get("createdAt", OLD_DATE))
|
||||||
r['updatedAt'] = date_parse(entry['updatedAt']) if 'updatedAt' in entry else ts
|
r["updatedAt"] = date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts
|
||||||
if entry.get('published'):
|
if entry.get("published"):
|
||||||
r['publishedAt'] = date_parse(entry.get('publishedAt', OLD_DATE))
|
r["publishedAt"] = date_parse(entry.get("publishedAt", OLD_DATE))
|
||||||
if 'deletedAt' in entry: r['deletedAt'] = date_parse(entry['deletedAt'])
|
if "deletedAt" in entry:
|
||||||
|
r["deletedAt"] = date_parse(entry["deletedAt"])
|
||||||
|
|
||||||
# topics
|
# topics
|
||||||
category = entry['category']
|
category = entry["category"]
|
||||||
mainTopic = topics_by_oid.get(category)
|
mainTopic = topics_by_oid.get(category)
|
||||||
if mainTopic:
|
if mainTopic:
|
||||||
r['mainTopic'] = storage['replacements'].get(mainTopic["slug"], mainTopic["slug"])
|
r["mainTopic"] = storage["replacements"].get(
|
||||||
topic_oids = [category, ]
|
mainTopic["slug"], mainTopic["slug"]
|
||||||
topic_oids.extend(entry.get('tags', []))
|
)
|
||||||
for oid in topic_oids:
|
topic_oids = [
|
||||||
if oid in storage['topics']['by_oid']:
|
category,
|
||||||
r['topics'].add(storage['topics']['by_oid'][oid]['slug'])
|
]
|
||||||
else:
|
topic_oids.extend(entry.get("tags", []))
|
||||||
print('[migration] unknown old topic id: ' + oid)
|
for oid in topic_oids:
|
||||||
r['topics'] = list(r['topics'])
|
if oid in storage["topics"]["by_oid"]:
|
||||||
|
r["topics"].add(storage["topics"]["by_oid"][oid]["slug"])
|
||||||
|
else:
|
||||||
|
print("[migration] unknown old topic id: " + oid)
|
||||||
|
r["topics"] = list(r["topics"])
|
||||||
|
|
||||||
entry['topics'] = r['topics']
|
entry["topics"] = r["topics"]
|
||||||
entry['cover'] = r['cover']
|
entry["cover"] = r["cover"]
|
||||||
entry['authors'] = r['authors']
|
entry["authors"] = r["authors"]
|
||||||
|
|
||||||
# body
|
# body
|
||||||
r['body'] = prepare_html_body(entry)
|
r["body"] = prepare_html_body(entry)
|
||||||
|
|
||||||
# save shout to db
|
# save shout to db
|
||||||
|
|
||||||
s = object()
|
s = object()
|
||||||
shout_dict = r.copy()
|
shout_dict = r.copy()
|
||||||
user = None
|
user = None
|
||||||
del shout_dict['topics'] # NOTE: AttributeError: 'str' object has no attribute '_sa_instance_state'
|
del shout_dict[
|
||||||
#del shout_dict['rating'] # NOTE: TypeError: 'rating' is an invalid keyword argument for Shout
|
"topics"
|
||||||
#del shout_dict['ratings']
|
] # NOTE: AttributeError: 'str' object has no attribute '_sa_instance_state'
|
||||||
email = userdata.get('email')
|
# del shout_dict['rating'] # NOTE: TypeError: 'rating' is an invalid keyword argument for Shout
|
||||||
slug = userdata.get('slug')
|
# del shout_dict['ratings']
|
||||||
if not slug: raise Exception
|
email = userdata.get("email")
|
||||||
with local_session() as session:
|
slug = userdata.get("slug")
|
||||||
# c = session.query(Community).all().pop()
|
if not slug:
|
||||||
if email: user = session.query(User).filter(User.email == email).first()
|
raise Exception
|
||||||
if not user and slug: user = session.query(User).filter(User.slug == slug).first()
|
with local_session() as session:
|
||||||
if not user and userdata:
|
# c = session.query(Community).all().pop()
|
||||||
try:
|
if email:
|
||||||
userdata['slug'] = userdata['slug'].lower().strip().replace(' ', '-')
|
user = session.query(User).filter(User.email == email).first()
|
||||||
user = User.create(**userdata)
|
if not user and slug:
|
||||||
except sqlalchemy.exc.IntegrityError:
|
user = session.query(User).filter(User.slug == slug).first()
|
||||||
print('[migration] user error: ' + userdata)
|
if not user and userdata:
|
||||||
userdata['id'] = user.id
|
try:
|
||||||
userdata['createdAt'] = user.createdAt
|
userdata["slug"] = userdata["slug"].lower().strip().replace(" ", "-")
|
||||||
storage['users']['by_slug'][userdata['slug']] = userdata
|
user = User.create(**userdata)
|
||||||
storage['users']['by_oid'][entry['_id']] = userdata
|
except sqlalchemy.exc.IntegrityError:
|
||||||
assert user, 'could not get a user'
|
print("[migration] user error: " + userdata)
|
||||||
shout_dict['authors'] = [ user, ]
|
userdata["id"] = user.id
|
||||||
|
userdata["createdAt"] = user.createdAt
|
||||||
|
storage["users"]["by_slug"][userdata["slug"]] = userdata
|
||||||
|
storage["users"]["by_oid"][entry["_id"]] = userdata
|
||||||
|
assert user, "could not get a user"
|
||||||
|
shout_dict["authors"] = [
|
||||||
|
user,
|
||||||
|
]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
s = Shout.create(**shout_dict)
|
s = Shout.create(**shout_dict)
|
||||||
except sqlalchemy.exc.IntegrityError as e:
|
except sqlalchemy.exc.IntegrityError as e:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
s = session.query(Shout).filter(Shout.slug == shout_dict['slug']).first()
|
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
|
||||||
bump = False
|
bump = False
|
||||||
if s:
|
if s:
|
||||||
for key in shout_dict:
|
for key in shout_dict:
|
||||||
if key in s.__dict__:
|
if key in s.__dict__:
|
||||||
if s.__dict__[key] != shout_dict[key]:
|
if s.__dict__[key] != shout_dict[key]:
|
||||||
print('[migration] shout already exists, but differs in %s' % key)
|
print(
|
||||||
bump = True
|
"[migration] shout already exists, but differs in %s"
|
||||||
else:
|
% key
|
||||||
print('[migration] shout already exists, but lacks %s' % key)
|
)
|
||||||
bump = True
|
bump = True
|
||||||
if bump:
|
else:
|
||||||
s.update(shout_dict)
|
print("[migration] shout already exists, but lacks %s" % key)
|
||||||
else:
|
bump = True
|
||||||
print('[migration] something went wrong with shout: \n%r' % shout_dict)
|
if bump:
|
||||||
raise e
|
s.update(shout_dict)
|
||||||
session.commit()
|
else:
|
||||||
except Exception as e:
|
print("[migration] something went wrong with shout: \n%r" % shout_dict)
|
||||||
print(e)
|
raise e
|
||||||
print(s)
|
session.commit()
|
||||||
raise Exception
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
print(s)
|
||||||
|
raise Exception
|
||||||
|
|
||||||
|
# shout topics aftermath
|
||||||
|
shout_dict["topics"] = []
|
||||||
|
for tpc in r["topics"]:
|
||||||
|
oldslug = tpc
|
||||||
|
newslug = storage["replacements"].get(oldslug, oldslug)
|
||||||
|
if newslug:
|
||||||
|
with local_session() as session:
|
||||||
|
shout_topic_old = (
|
||||||
|
session.query(ShoutTopic)
|
||||||
|
.filter(ShoutTopic.shout == shout_dict["slug"])
|
||||||
|
.filter(ShoutTopic.topic == oldslug)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if shout_topic_old:
|
||||||
|
shout_topic_old.update({"slug": newslug})
|
||||||
|
else:
|
||||||
|
shout_topic_new = (
|
||||||
|
session.query(ShoutTopic)
|
||||||
|
.filter(ShoutTopic.shout == shout_dict["slug"])
|
||||||
|
.filter(ShoutTopic.topic == newslug)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not shout_topic_new:
|
||||||
|
try:
|
||||||
|
ShoutTopic.create(
|
||||||
|
**{"shout": shout_dict["slug"], "topic": newslug}
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
print("[migration] shout topic error: " + newslug)
|
||||||
|
session.commit()
|
||||||
|
if newslug not in shout_dict["topics"]:
|
||||||
|
shout_dict["topics"].append(newslug)
|
||||||
|
else:
|
||||||
|
print("[migration] ignored topic slug: \n%r" % tpc["slug"])
|
||||||
|
# raise Exception
|
||||||
|
|
||||||
# shout topics aftermath
|
# content_item ratings to reactions
|
||||||
shout_dict['topics'] = []
|
try:
|
||||||
for tpc in r['topics']:
|
for content_rating in entry.get("ratings", []):
|
||||||
oldslug = tpc
|
with local_session() as session:
|
||||||
newslug = storage['replacements'].get(oldslug, oldslug)
|
rater = (
|
||||||
if newslug:
|
session.query(User)
|
||||||
with local_session() as session:
|
.filter(User.oid == content_rating["createdBy"])
|
||||||
shout_topic_old = session.query(ShoutTopic)\
|
.first()
|
||||||
.filter(ShoutTopic.shout == shout_dict['slug'])\
|
)
|
||||||
.filter(ShoutTopic.topic == oldslug).first()
|
reactedBy = (
|
||||||
if shout_topic_old:
|
rater
|
||||||
shout_topic_old.update({ 'slug': newslug })
|
if rater
|
||||||
else:
|
else session.query(User).filter(User.slug == "noname").first()
|
||||||
shout_topic_new = session.query(ShoutTopic)\
|
)
|
||||||
.filter(ShoutTopic.shout == shout_dict['slug'])\
|
if rater:
|
||||||
.filter(ShoutTopic.topic == newslug).first()
|
reaction_dict = {
|
||||||
if not shout_topic_new:
|
"kind": ReactionKind.LIKE
|
||||||
try: ShoutTopic.create(**{ 'shout': shout_dict['slug'], 'topic': newslug })
|
if content_rating["value"] > 0
|
||||||
except: print('[migration] shout topic error: ' + newslug)
|
else ReactionKind.DISLIKE,
|
||||||
session.commit()
|
"createdBy": reactedBy.slug,
|
||||||
if newslug not in shout_dict['topics']:
|
"shout": shout_dict["slug"],
|
||||||
shout_dict['topics'].append(newslug)
|
}
|
||||||
else:
|
cts = content_rating.get("createdAt")
|
||||||
print('[migration] ignored topic slug: \n%r' % tpc['slug'])
|
if cts:
|
||||||
# raise Exception
|
reaction_dict["createdAt"] = date_parse(cts)
|
||||||
|
reaction = (
|
||||||
|
session.query(Reaction)
|
||||||
|
.filter(Reaction.shout == reaction_dict["shout"])
|
||||||
|
.filter(Reaction.createdBy == reaction_dict["createdBy"])
|
||||||
|
.filter(Reaction.kind == reaction_dict["kind"])
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if reaction:
|
||||||
|
reaction_dict["kind"] = (
|
||||||
|
ReactionKind.AGREE
|
||||||
|
if content_rating["value"] > 0
|
||||||
|
else ReactionKind.DISAGREE,
|
||||||
|
)
|
||||||
|
reaction.update(reaction_dict)
|
||||||
|
else:
|
||||||
|
day = (reaction_dict.get("createdAt") or ts).replace(
|
||||||
|
hour=0, minute=0, second=0, microsecond=0
|
||||||
|
)
|
||||||
|
rea = Reaction.create(**reaction_dict)
|
||||||
|
await ReactedStorage.increment(rea)
|
||||||
|
# shout_dict['ratings'].append(reaction_dict)
|
||||||
|
except:
|
||||||
|
print("[migration] content_item.ratings error: \n%r" % content_rating)
|
||||||
|
raise Exception
|
||||||
|
|
||||||
# content_item ratings to reactions
|
# shout views
|
||||||
try:
|
ViewedByDay.create(shout=shout_dict["slug"], value=entry.get("views", 1))
|
||||||
for content_rating in entry.get('ratings',[]):
|
# del shout_dict['ratings']
|
||||||
with local_session() as session:
|
shout_dict["oid"] = entry.get("_id")
|
||||||
rater = session.query(User).filter(User.oid == content_rating['createdBy']).first()
|
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
|
||||||
reactedBy = rater if rater else session.query(User).filter(User.slug == 'noname').first()
|
storage["shouts"]["by_slug"][slug] = shout_dict
|
||||||
if rater:
|
return shout_dict
|
||||||
reaction_dict = {
|
|
||||||
'kind': ReactionKind.LIKE if content_rating['value'] > 0 else ReactionKind.DISLIKE,
|
|
||||||
'createdBy': reactedBy.slug,
|
|
||||||
'shout': shout_dict['slug']
|
|
||||||
}
|
|
||||||
cts = content_rating.get('createdAt')
|
|
||||||
if cts: reaction_dict['createdAt'] = date_parse(cts)
|
|
||||||
reaction = session.query(Reaction).\
|
|
||||||
filter(Reaction.shout == reaction_dict['shout']).\
|
|
||||||
filter(Reaction.createdBy == reaction_dict['createdBy']).\
|
|
||||||
filter(Reaction.kind == reaction_dict['kind']).first()
|
|
||||||
if reaction:
|
|
||||||
reaction_dict['kind'] = ReactionKind.AGREE if content_rating['value'] > 0 else ReactionKind.DISAGREE,
|
|
||||||
reaction.update(reaction_dict)
|
|
||||||
else:
|
|
||||||
day = (reaction_dict.get('createdAt') or ts).replace(hour=0, minute=0, second=0, microsecond=0)
|
|
||||||
rea = Reaction.create(**reaction_dict)
|
|
||||||
await ReactedStorage.increment(rea)
|
|
||||||
# shout_dict['ratings'].append(reaction_dict)
|
|
||||||
except:
|
|
||||||
print('[migration] content_item.ratings error: \n%r' % content_rating)
|
|
||||||
raise Exception
|
|
||||||
|
|
||||||
# shout views
|
|
||||||
ViewedByDay.create( shout = shout_dict['slug'], value = entry.get('views', 1) )
|
|
||||||
# del shout_dict['ratings']
|
|
||||||
shout_dict['oid'] = entry.get('_id')
|
|
||||||
storage['shouts']['by_oid'][entry['_id']] = shout_dict
|
|
||||||
storage['shouts']['by_slug'][slug] = shout_dict
|
|
||||||
return shout_dict
|
|
||||||
|
|
|
@ -4,104 +4,144 @@ from orm import User, UserRating
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
from base.orm import local_session
|
from base.orm import local_session
|
||||||
|
|
||||||
|
|
||||||
def migrate(entry):
|
def migrate(entry):
|
||||||
if 'subscribedTo' in entry: del entry['subscribedTo']
|
if "subscribedTo" in entry:
|
||||||
email = entry['emails'][0]['address']
|
del entry["subscribedTo"]
|
||||||
user_dict = {
|
email = entry["emails"][0]["address"]
|
||||||
'oid': entry['_id'],
|
user_dict = {
|
||||||
'roles': [],
|
"oid": entry["_id"],
|
||||||
'ratings': [],
|
"roles": [],
|
||||||
'username': email,
|
"ratings": [],
|
||||||
'email': email,
|
"username": email,
|
||||||
'password': entry['services']['password'].get('bcrypt', ''),
|
"email": email,
|
||||||
'createdAt': parse(entry['createdAt']),
|
"password": entry["services"]["password"].get("bcrypt", ""),
|
||||||
'emailConfirmed': bool(entry['emails'][0]['verified']),
|
"createdAt": parse(entry["createdAt"]),
|
||||||
'muted': False, # amnesty
|
"emailConfirmed": bool(entry["emails"][0]["verified"]),
|
||||||
'bio': entry['profile'].get('bio', ''),
|
"muted": False, # amnesty
|
||||||
'notifications': [],
|
"bio": entry["profile"].get("bio", ""),
|
||||||
'createdAt': parse(entry['createdAt']),
|
"notifications": [],
|
||||||
'roles': [], # entry['roles'] # roles by community
|
"createdAt": parse(entry["createdAt"]),
|
||||||
'ratings': [], # entry['ratings']
|
"roles": [], # entry['roles'] # roles by community
|
||||||
'links': [],
|
"ratings": [], # entry['ratings']
|
||||||
'name': 'anonymous'
|
"links": [],
|
||||||
}
|
"name": "anonymous",
|
||||||
if 'updatedAt' in entry: user_dict['updatedAt'] = parse(entry['updatedAt'])
|
}
|
||||||
if 'wasOnineAt' in entry: user_dict['wasOnlineAt'] = parse(entry['wasOnlineAt'])
|
if "updatedAt" in entry:
|
||||||
if entry.get('profile'):
|
user_dict["updatedAt"] = parse(entry["updatedAt"])
|
||||||
# slug
|
if "wasOnineAt" in entry:
|
||||||
user_dict['slug'] = entry['profile'].get('path').lower().replace(' ', '-').strip()
|
user_dict["wasOnlineAt"] = parse(entry["wasOnlineAt"])
|
||||||
user_dict['bio'] = html2text(entry.get('profile').get('bio') or '')
|
if entry.get("profile"):
|
||||||
|
# slug
|
||||||
|
user_dict["slug"] = (
|
||||||
|
entry["profile"].get("path").lower().replace(" ", "-").strip()
|
||||||
|
)
|
||||||
|
user_dict["bio"] = html2text(entry.get("profile").get("bio") or "")
|
||||||
|
|
||||||
# userpic
|
# userpic
|
||||||
try: user_dict['userpic'] = 'https://assets.discours.io/unsafe/100x/' + entry['profile']['thumborId']
|
try:
|
||||||
except KeyError:
|
user_dict["userpic"] = (
|
||||||
try: user_dict['userpic'] = entry['profile']['image']['url']
|
"https://assets.discours.io/unsafe/100x/"
|
||||||
except KeyError: user_dict['userpic'] = ''
|
+ entry["profile"]["thumborId"]
|
||||||
|
)
|
||||||
|
except KeyError:
|
||||||
|
try:
|
||||||
|
user_dict["userpic"] = entry["profile"]["image"]["url"]
|
||||||
|
except KeyError:
|
||||||
|
user_dict["userpic"] = ""
|
||||||
|
|
||||||
# name
|
# name
|
||||||
fn = entry['profile'].get('firstName', '')
|
fn = entry["profile"].get("firstName", "")
|
||||||
ln = entry['profile'].get('lastName', '')
|
ln = entry["profile"].get("lastName", "")
|
||||||
name = user_dict['slug'] if user_dict['slug'] else 'anonymous'
|
name = user_dict["slug"] if user_dict["slug"] else "anonymous"
|
||||||
name = fn if fn else name
|
name = fn if fn else name
|
||||||
name = (name + ' ' + ln) if ln else name
|
name = (name + " " + ln) if ln else name
|
||||||
name = entry['profile']['path'].lower().strip().replace(' ', '-') if len(name) < 2 else name
|
name = (
|
||||||
user_dict['name'] = name
|
entry["profile"]["path"].lower().strip().replace(" ", "-")
|
||||||
|
if len(name) < 2
|
||||||
|
else name
|
||||||
|
)
|
||||||
|
user_dict["name"] = name
|
||||||
|
|
||||||
# links
|
# links
|
||||||
fb = entry['profile'].get('facebook', False)
|
fb = entry["profile"].get("facebook", False)
|
||||||
if fb: user_dict['links'].append(fb)
|
if fb:
|
||||||
vk = entry['profile'].get('vkontakte', False)
|
user_dict["links"].append(fb)
|
||||||
if vk: user_dict['links'].append(vk)
|
vk = entry["profile"].get("vkontakte", False)
|
||||||
tr = entry['profile'].get('twitter', False)
|
if vk:
|
||||||
if tr: user_dict['links'].append(tr)
|
user_dict["links"].append(vk)
|
||||||
ws = entry['profile'].get('website', False)
|
tr = entry["profile"].get("twitter", False)
|
||||||
if ws: user_dict['links'].append(ws)
|
if tr:
|
||||||
|
user_dict["links"].append(tr)
|
||||||
|
ws = entry["profile"].get("website", False)
|
||||||
|
if ws:
|
||||||
|
user_dict["links"].append(ws)
|
||||||
|
|
||||||
# some checks
|
# some checks
|
||||||
if not user_dict['slug'] and len(user_dict['links']) > 0:
|
if not user_dict["slug"] and len(user_dict["links"]) > 0:
|
||||||
user_dict['slug'] = user_dict['links'][0].split('/')[-1]
|
user_dict["slug"] = user_dict["links"][0].split("/")[-1]
|
||||||
|
|
||||||
|
user_dict["slug"] = user_dict.get("slug", user_dict["email"].split("@")[0])
|
||||||
|
oid = user_dict["oid"]
|
||||||
|
user_dict["slug"] = user_dict["slug"].lower().strip().replace(" ", "-")
|
||||||
|
try:
|
||||||
|
user = User.create(**user_dict.copy())
|
||||||
|
except sqlalchemy.exc.IntegrityError:
|
||||||
|
print("[migration] cannot create user " + user_dict["slug"])
|
||||||
|
with local_session() as session:
|
||||||
|
old_user = (
|
||||||
|
session.query(User).filter(User.slug == user_dict["slug"]).first()
|
||||||
|
)
|
||||||
|
old_user.oid = oid
|
||||||
|
user = old_user
|
||||||
|
if not user:
|
||||||
|
print("[migration] ERROR: cannot find user " + user_dict["slug"])
|
||||||
|
raise Exception
|
||||||
|
user_dict["id"] = user.id
|
||||||
|
return user_dict
|
||||||
|
|
||||||
user_dict['slug'] = user_dict.get('slug', user_dict['email'].split('@')[0])
|
|
||||||
oid = user_dict['oid']
|
|
||||||
user_dict['slug'] = user_dict['slug'].lower().strip().replace(' ', '-')
|
|
||||||
try: user = User.create(**user_dict.copy())
|
|
||||||
except sqlalchemy.exc.IntegrityError:
|
|
||||||
print('[migration] cannot create user ' + user_dict['slug'])
|
|
||||||
with local_session() as session:
|
|
||||||
old_user = session.query(User).filter(User.slug == user_dict['slug']).first()
|
|
||||||
old_user.oid = oid
|
|
||||||
user = old_user
|
|
||||||
if not user:
|
|
||||||
print('[migration] ERROR: cannot find user ' + user_dict['slug'])
|
|
||||||
raise Exception
|
|
||||||
user_dict['id'] = user.id
|
|
||||||
return user_dict
|
|
||||||
|
|
||||||
def migrate_2stage(entry, id_map):
|
def migrate_2stage(entry, id_map):
|
||||||
ce = 0
|
ce = 0
|
||||||
for rating_entry in entry.get('ratings',[]):
|
for rating_entry in entry.get("ratings", []):
|
||||||
rater_oid = rating_entry['createdBy']
|
rater_oid = rating_entry["createdBy"]
|
||||||
rater_slug = id_map.get(rater_oid)
|
rater_slug = id_map.get(rater_oid)
|
||||||
if not rater_slug:
|
if not rater_slug:
|
||||||
ce +=1
|
ce += 1
|
||||||
# print(rating_entry)
|
# print(rating_entry)
|
||||||
continue
|
continue
|
||||||
oid = entry['_id']
|
oid = entry["_id"]
|
||||||
author_slug = id_map.get(oid)
|
author_slug = id_map.get(oid)
|
||||||
user_rating_dict = {
|
user_rating_dict = {
|
||||||
'value': rating_entry['value'],
|
"value": rating_entry["value"],
|
||||||
'rater': rater_slug,
|
"rater": rater_slug,
|
||||||
'user': author_slug
|
"user": author_slug,
|
||||||
}
|
}
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
try:
|
try:
|
||||||
user_rating = UserRating.create(**user_rating_dict)
|
user_rating = UserRating.create(**user_rating_dict)
|
||||||
except sqlalchemy.exc.IntegrityError:
|
except sqlalchemy.exc.IntegrityError:
|
||||||
old_rating = session.query(UserRating).filter(UserRating.rater == rater_slug).first()
|
old_rating = (
|
||||||
print('[migration] cannot create ' + author_slug + '`s rate from ' + rater_slug)
|
session.query(UserRating)
|
||||||
print('[migration] concat rating value %d+%d=%d' % (old_rating.value, rating_entry['value'], old_rating.value + rating_entry['value']))
|
.filter(UserRating.rater == rater_slug)
|
||||||
old_rating.update({ 'value': old_rating.value + rating_entry['value'] })
|
.first()
|
||||||
session.commit()
|
)
|
||||||
except Exception as e:
|
print(
|
||||||
print(e)
|
"[migration] cannot create "
|
||||||
return ce
|
+ author_slug
|
||||||
|
+ "`s rate from "
|
||||||
|
+ rater_slug
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
"[migration] concat rating value %d+%d=%d"
|
||||||
|
% (
|
||||||
|
old_rating.value,
|
||||||
|
rating_entry["value"],
|
||||||
|
old_rating.value + rating_entry["value"],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
old_rating.update({"value": old_rating.value + rating_entry["value"]})
|
||||||
|
session.commit()
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
return ce
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from json import JSONEncoder
|
from json import JSONEncoder
|
||||||
|
|
||||||
|
|
||||||
class DateTimeEncoder(JSONEncoder):
|
class DateTimeEncoder(JSONEncoder):
|
||||||
def default(self, z):
|
def default(self, z):
|
||||||
if isinstance(z, datetime):
|
if isinstance(z, datetime):
|
||||||
return (str(z))
|
return str(z)
|
||||||
else:
|
else:
|
||||||
return super().default(z)
|
return super().default(z)
|
|
@ -12,9 +12,19 @@ from services.auth.users import UserStorage
|
||||||
from services.stat.viewed import ViewedStorage
|
from services.stat.viewed import ViewedStorage
|
||||||
from base.orm import Base, engine, local_session
|
from base.orm import Base, engine, local_session
|
||||||
|
|
||||||
__all__ = ["User", "Role", "Operation", "Permission", \
|
__all__ = [
|
||||||
"Community", "Shout", "Topic", "TopicFollower", \
|
"User",
|
||||||
"Notification", "Reaction", "UserRating"]
|
"Role",
|
||||||
|
"Operation",
|
||||||
|
"Permission",
|
||||||
|
"Community",
|
||||||
|
"Shout",
|
||||||
|
"Topic",
|
||||||
|
"TopicFollower",
|
||||||
|
"Notification",
|
||||||
|
"Reaction",
|
||||||
|
"UserRating",
|
||||||
|
]
|
||||||
|
|
||||||
Base.metadata.create_all(engine)
|
Base.metadata.create_all(engine)
|
||||||
Operation.init_table()
|
Operation.init_table()
|
||||||
|
@ -24,8 +34,8 @@ Community.init_table()
|
||||||
Role.init_table()
|
Role.init_table()
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
ViewedStorage.init(session)
|
ViewedStorage.init(session)
|
||||||
ReactedStorage.init(session)
|
ReactedStorage.init(session)
|
||||||
RoleStorage.init(session)
|
RoleStorage.init(session)
|
||||||
UserStorage.init(session)
|
UserStorage.init(session)
|
||||||
TopicStorage.init(session)
|
TopicStorage.init(session)
|
||||||
|
|
|
@ -2,21 +2,22 @@ from datetime import datetime
|
||||||
from sqlalchemy import Boolean, Column, String, ForeignKey, DateTime
|
from sqlalchemy import Boolean, Column, String, ForeignKey, DateTime
|
||||||
from base.orm import Base
|
from base.orm import Base
|
||||||
|
|
||||||
class CollabAuthor(Base):
|
|
||||||
__tablename__ = 'collab_author'
|
|
||||||
|
|
||||||
id = None
|
class CollabAuthor(Base):
|
||||||
collab = Column(ForeignKey('collab.id'), primary_key = True)
|
__tablename__ = "collab_author"
|
||||||
author = Column(ForeignKey('user.slug'), primary_key = True)
|
|
||||||
accepted = Column(Boolean, default=False)
|
id = None # type: ignore
|
||||||
|
collab = Column(ForeignKey("collab.id"), primary_key=True)
|
||||||
|
author = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
|
accepted = Column(Boolean, default=False)
|
||||||
|
|
||||||
|
|
||||||
class Collab(Base):
|
class Collab(Base):
|
||||||
__tablename__ = 'collab'
|
__tablename__ = "collab"
|
||||||
|
|
||||||
authors = Column()
|
|
||||||
title: str = Column(String, nullable=True, comment="Title")
|
|
||||||
body: str = Column(String, nullable=True, comment="Body")
|
|
||||||
pic: str = Column(String, nullable=True, comment="Picture")
|
|
||||||
createdAt: datetime = Column(DateTime, default=datetime.now, comment="Created At")
|
|
||||||
createdBy: str = Column(ForeignKey('user.id'), comment="Created By")
|
|
||||||
|
|
||||||
|
authors = Column()
|
||||||
|
title = Column(String, nullable=True, comment="Title")
|
||||||
|
body = Column(String, nullable=True, comment="Body")
|
||||||
|
pic = Column(String, nullable=True, comment="Picture")
|
||||||
|
createdAt = Column(DateTime, default=datetime.now, comment="Created At")
|
||||||
|
createdBy = Column(ForeignKey("user.id"), comment="Created By")
|
||||||
|
|
|
@ -1,22 +1,23 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sqlalchemy import Column, String, ForeignKey, DateTime, JSON as JSONType
|
from sqlalchemy import Column, String, ForeignKey, DateTime
|
||||||
from base.orm import Base
|
from base.orm import Base
|
||||||
|
|
||||||
class ShoutCollection(Base):
|
|
||||||
__tablename__ = 'shout_collection'
|
|
||||||
|
|
||||||
id = None
|
class ShoutCollection(Base):
|
||||||
shout = Column(ForeignKey('shout.slug'), primary_key = True)
|
__tablename__ = "shout_collection"
|
||||||
collection = Column(ForeignKey('collection.slug'), primary_key = True)
|
|
||||||
|
id = None # type: ignore
|
||||||
|
shout = Column(ForeignKey("shout.slug"), primary_key=True)
|
||||||
|
collection = Column(ForeignKey("collection.slug"), primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
class Collection(Base):
|
class Collection(Base):
|
||||||
__tablename__ = 'collection'
|
__tablename__ = "collection"
|
||||||
|
|
||||||
id = None
|
|
||||||
slug: str = Column(String, primary_key = True)
|
|
||||||
title: str = Column(String, nullable=False, comment="Title")
|
|
||||||
body: str = Column(String, nullable=True, comment="Body")
|
|
||||||
pic: str = Column(String, nullable=True, comment="Picture")
|
|
||||||
createdAt: datetime = Column(DateTime, default=datetime.now, comment="Created At")
|
|
||||||
createdBy: str = Column(ForeignKey('user.id'), comment="Created By")
|
|
||||||
|
|
||||||
|
id = None # type: ignore
|
||||||
|
slug = Column(String, primary_key=True)
|
||||||
|
title = Column(String, nullable=False, comment="Title")
|
||||||
|
body = Column(String, nullable=True, comment="Body")
|
||||||
|
pic = Column(String, nullable=True, comment="Picture")
|
||||||
|
createdAt = Column(DateTime, default=datetime.now, comment="Created At")
|
||||||
|
createdBy = Column(ForeignKey("user.id"), comment="Created By")
|
||||||
|
|
|
@ -2,34 +2,39 @@ from datetime import datetime
|
||||||
from sqlalchemy import Column, String, ForeignKey, DateTime
|
from sqlalchemy import Column, String, ForeignKey, DateTime
|
||||||
from base.orm import Base, local_session
|
from base.orm import Base, local_session
|
||||||
|
|
||||||
class CommunityFollower(Base):
|
|
||||||
__tablename__ = 'community_followers'
|
|
||||||
|
|
||||||
id = None
|
class CommunityFollower(Base):
|
||||||
follower = Column(ForeignKey('user.slug'), primary_key = True)
|
__tablename__ = "community_followers"
|
||||||
community = Column(ForeignKey('community.slug'), primary_key = True)
|
|
||||||
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
|
id = None # type: ignore
|
||||||
|
follower = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
|
community = Column(ForeignKey("community.slug"), primary_key=True)
|
||||||
|
createdAt = Column(
|
||||||
|
DateTime, nullable=False, default=datetime.now, comment="Created at"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Community(Base):
|
class Community(Base):
|
||||||
__tablename__ = 'community'
|
__tablename__ = "community"
|
||||||
|
|
||||||
name: str = Column(String, nullable=False, comment="Name")
|
name = Column(String, nullable=False, comment="Name")
|
||||||
slug: str = Column(String, nullable = False, unique=True, comment="Slug")
|
slug = Column(String, nullable=False, unique=True, comment="Slug")
|
||||||
desc: str = Column(String, nullable=False, default='')
|
desc = Column(String, nullable=False, default="")
|
||||||
pic: str = Column(String, nullable=False, default='')
|
pic = Column(String, nullable=False, default="")
|
||||||
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
|
createdAt = Column(
|
||||||
createdBy: str = Column(ForeignKey("user.slug"), nullable=False, comment="Author")
|
DateTime, nullable=False, default=datetime.now, comment="Created at"
|
||||||
|
)
|
||||||
|
createdBy = Column(ForeignKey("user.slug"), nullable=False, comment="Author")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init_table():
|
def init_table():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
default = session.query(Community).filter(Community.slug == "discours").first()
|
default = (
|
||||||
if not default:
|
session.query(Community).filter(Community.slug == "discours").first()
|
||||||
default = Community.create(
|
)
|
||||||
name = "Дискурс",
|
if not default:
|
||||||
slug = "discours",
|
default = Community.create(
|
||||||
createdBy = "discours"
|
name="Дискурс", slug="discours", createdBy="discours"
|
||||||
)
|
)
|
||||||
|
|
||||||
Community.default_community = default
|
Community.default_community = default
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
from sqlalchemy import Column, String, JSON as JSONType
|
from sqlalchemy import Column, String, JSON as JSONType
|
||||||
from base.orm import Base
|
from base.orm import Base
|
||||||
|
|
||||||
|
|
||||||
class Notification(Base):
|
class Notification(Base):
|
||||||
__tablename__ = 'notification'
|
__tablename__ = "notification"
|
||||||
|
|
||||||
kind: str = Column(String, unique = True, primary_key = True)
|
kind = Column(String, unique=True, primary_key=True)
|
||||||
template: str = Column(String, nullable = False)
|
template = Column(String, nullable=False)
|
||||||
variables: JSONType = Column(JSONType, nullable = True) # [ <var1>, .. ]
|
variables = Column(JSONType, nullable=True) # [ <var1>, .. ]
|
||||||
|
|
||||||
# looks like frontend code
|
# looks like frontend code
|
||||||
|
|
153
orm/rbac.py
153
orm/rbac.py
|
@ -6,92 +6,113 @@ from orm.community import Community
|
||||||
|
|
||||||
|
|
||||||
class ClassType(TypeDecorator):
|
class ClassType(TypeDecorator):
|
||||||
impl = String
|
impl = String
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def python_type(self):
|
def python_type(self):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
def process_literal_param(self, value, dialect):
|
def process_literal_param(self, value, dialect):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
def process_bind_param(self, value, dialect):
|
def process_bind_param(self, value, dialect):
|
||||||
return value.__name__ if isinstance(value, type) else str(value)
|
return value.__name__ if isinstance(value, type) else str(value)
|
||||||
|
|
||||||
|
def process_result_value(self, value, dialect):
|
||||||
|
class_ = REGISTRY.get(value)
|
||||||
|
if class_ is None:
|
||||||
|
warnings.warn(f"Can't find class <{value}>,find it yourself!", stacklevel=2)
|
||||||
|
return class_
|
||||||
|
|
||||||
def process_result_value(self, value, dialect):
|
|
||||||
class_ = REGISTRY.get(value)
|
|
||||||
if class_ is None:
|
|
||||||
warnings.warn(f"Can't find class <{value}>,find it yourself!", stacklevel=2)
|
|
||||||
return class_
|
|
||||||
|
|
||||||
class Role(Base):
|
class Role(Base):
|
||||||
__tablename__ = 'role'
|
__tablename__ = "role"
|
||||||
|
|
||||||
name: str = Column(String, nullable=False, comment="Role Name")
|
name = Column(String, nullable=False, comment="Role Name")
|
||||||
desc: str = Column(String, nullable=True, comment="Role Description")
|
desc = Column(String, nullable=True, comment="Role Description")
|
||||||
community: int = Column(ForeignKey("community.id", ondelete="CASCADE"), nullable=False, comment="Community")
|
community = Column(
|
||||||
permissions = relationship(lambda: Permission)
|
ForeignKey("community.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
comment="Community",
|
||||||
|
)
|
||||||
|
permissions = relationship(lambda: Permission)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init_table():
|
def init_table():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
default = session.query(Role).filter(Role.name == "author").first()
|
default = session.query(Role).filter(Role.name == "author").first()
|
||||||
if default:
|
if default:
|
||||||
Role.default_role = default
|
Role.default_role = default
|
||||||
return
|
return
|
||||||
|
|
||||||
default = Role.create(
|
default = Role.create(
|
||||||
name = "author",
|
name="author",
|
||||||
desc = "Role for author",
|
desc="Role for author",
|
||||||
community = Community.default_community.id
|
community=Community.default_community.id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
Role.default_role = default
|
||||||
|
|
||||||
Role.default_role = default
|
|
||||||
|
|
||||||
class Operation(Base):
|
class Operation(Base):
|
||||||
__tablename__ = 'operation'
|
__tablename__ = "operation"
|
||||||
name: str = Column(String, nullable=False, unique=True, comment="Operation Name")
|
name = Column(String, nullable=False, unique=True, comment="Operation Name")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init_table():
|
def init_table():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
edit_op = session.query(Operation).filter(Operation.name == "edit").first()
|
edit_op = session.query(Operation).filter(Operation.name == "edit").first()
|
||||||
if not edit_op:
|
if not edit_op:
|
||||||
edit_op = Operation.create(name = "edit")
|
edit_op = Operation.create(name="edit")
|
||||||
Operation.edit_id = edit_op.id
|
Operation.edit_id = edit_op.id # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class Resource(Base):
|
class Resource(Base):
|
||||||
__tablename__ = "resource"
|
__tablename__ = "resource"
|
||||||
resource_class: str = Column(String, nullable=False, unique=True, comment="Resource class")
|
resource_class = Column(
|
||||||
name: str = Column(String, nullable=False, unique=True, comment="Resource name")
|
String, nullable=False, unique=True, comment="Resource class"
|
||||||
|
)
|
||||||
|
name = Column(String, nullable=False, unique=True, comment="Resource name")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init_table():
|
def init_table():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shout_res = session.query(Resource).filter(Resource.name == "shout").first()
|
shout_res = session.query(Resource).filter(Resource.name == "shout").first()
|
||||||
if not shout_res:
|
if not shout_res:
|
||||||
shout_res = Resource.create(name = "shout", resource_class = "shout")
|
shout_res = Resource.create(name="shout", resource_class="shout")
|
||||||
Resource.shout_id = shout_res.id
|
Resource.shout_id = shout_res.id # type: ignore
|
||||||
|
|
||||||
|
|
||||||
class Permission(Base):
|
class Permission(Base):
|
||||||
__tablename__ = "permission"
|
__tablename__ = "permission"
|
||||||
__table_args__ = (UniqueConstraint("role_id", "operation_id", "resource_id"), {"extend_existing": True})
|
__table_args__ = (
|
||||||
|
UniqueConstraint("role_id", "operation_id", "resource_id"),
|
||||||
|
{"extend_existing": True},
|
||||||
|
)
|
||||||
|
|
||||||
role_id: int = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role")
|
role_id = Column(
|
||||||
operation_id: int = Column(ForeignKey("operation.id", ondelete="CASCADE"), nullable=False, comment="Operation")
|
ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role"
|
||||||
resource_id: int = Column(ForeignKey("resource.id", ondelete="CASCADE"), nullable=False, comment="Resource")
|
)
|
||||||
|
operation_id = Column(
|
||||||
|
ForeignKey("operation.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
comment="Operation",
|
||||||
|
)
|
||||||
|
resource_id = Column(
|
||||||
|
ForeignKey("resource.id", ondelete="CASCADE"),
|
||||||
|
nullable=False,
|
||||||
|
comment="Resource",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
Base.metadata.create_all(engine)
|
Base.metadata.create_all(engine)
|
||||||
ops = [
|
ops = [
|
||||||
Permission(role_id=1, operation_id=1, resource_id=1),
|
Permission(role_id=1, operation_id=1, resource_id=1),
|
||||||
Permission(role_id=1, operation_id=2, resource_id=1),
|
Permission(role_id=1, operation_id=2, resource_id=1),
|
||||||
Permission(role_id=1, operation_id=3, resource_id=1),
|
Permission(role_id=1, operation_id=3, resource_id=1),
|
||||||
Permission(role_id=1, operation_id=4, resource_id=1),
|
Permission(role_id=1, operation_id=4, resource_id=1),
|
||||||
Permission(role_id=2, operation_id=4, resource_id=1)
|
Permission(role_id=2, operation_id=4, resource_id=1),
|
||||||
]
|
]
|
||||||
global_session.add_all(ops)
|
global_session.add_all(ops)
|
||||||
global_session.commit()
|
global_session.commit()
|
||||||
|
|
|
@ -5,27 +5,34 @@ from sqlalchemy import Enum
|
||||||
from services.stat.reacted import ReactedStorage, ReactionKind
|
from services.stat.reacted import ReactedStorage, ReactionKind
|
||||||
from services.stat.viewed import ViewedStorage
|
from services.stat.viewed import ViewedStorage
|
||||||
|
|
||||||
class Reaction(Base):
|
|
||||||
__tablename__ = 'reaction'
|
|
||||||
body: str = Column(String, nullable=True, comment="Reaction Body")
|
|
||||||
createdAt = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
|
|
||||||
createdBy: str = Column(ForeignKey("user.slug"), nullable=False, comment="Sender")
|
|
||||||
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
|
|
||||||
updatedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Last Editor")
|
|
||||||
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
|
|
||||||
deletedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Deleted by")
|
|
||||||
shout = Column(ForeignKey("shout.slug"), nullable=False)
|
|
||||||
replyTo: int = Column(ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID")
|
|
||||||
range: str = Column(String, nullable=True, comment="Range in format <start index>:<end>")
|
|
||||||
kind: int = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
|
|
||||||
oid: str = Column(String, nullable=True, comment="Old ID")
|
|
||||||
|
|
||||||
@property
|
class Reaction(Base):
|
||||||
async def stat(self):
|
__tablename__ = "reaction"
|
||||||
return {
|
body = Column(String, nullable=True, comment="Reaction Body")
|
||||||
"viewed": await ViewedStorage.get_reaction(self.id),
|
createdAt = Column(
|
||||||
"reacted": len(await ReactedStorage.get_reaction(self.id)),
|
DateTime, nullable=False, default=datetime.now, comment="Created at"
|
||||||
# TODO: "replied"
|
)
|
||||||
"rating": await ReactedStorage.get_reaction_rating(self.id),
|
createdBy = Column(ForeignKey("user.slug"), nullable=False, comment="Sender")
|
||||||
"commented": len(await ReactedStorage.get_reaction_comments(self.id))
|
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
|
||||||
}
|
updatedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Last Editor")
|
||||||
|
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
|
||||||
|
deletedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Deleted by")
|
||||||
|
shout = Column(ForeignKey("shout.slug"), nullable=False)
|
||||||
|
replyTo = Column(
|
||||||
|
ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
|
||||||
|
)
|
||||||
|
range = Column(
|
||||||
|
String, nullable=True, comment="Range in format <start index>:<end>"
|
||||||
|
)
|
||||||
|
kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
|
||||||
|
oid = Column(String, nullable=True, comment="Old ID")
|
||||||
|
|
||||||
|
@property
|
||||||
|
async def stat(self):
|
||||||
|
return {
|
||||||
|
"viewed": await ViewedStorage.get_reaction(self.id),
|
||||||
|
"reacted": len(await ReactedStorage.get_reaction(self.id)),
|
||||||
|
# TODO: "replied"
|
||||||
|
"rating": await ReactedStorage.get_reaction_rating(self.id),
|
||||||
|
"commented": len(await ReactedStorage.get_reaction_comments(self.id)),
|
||||||
|
}
|
||||||
|
|
72
orm/shout.py
72
orm/shout.py
|
@ -4,7 +4,7 @@ from sqlalchemy.orm import relationship
|
||||||
from orm.user import User
|
from orm.user import User
|
||||||
from orm.topic import Topic, ShoutTopic
|
from orm.topic import Topic, ShoutTopic
|
||||||
from orm.reaction import Reaction
|
from orm.reaction import Reaction
|
||||||
from services.stat.reacted import ReactedStorage, ReactionKind
|
from services.stat.reacted import ReactedStorage
|
||||||
from services.stat.viewed import ViewedStorage
|
from services.stat.viewed import ViewedStorage
|
||||||
from base.orm import Base
|
from base.orm import Base
|
||||||
|
|
||||||
|
@ -12,54 +12,56 @@ from base.orm import Base
|
||||||
class ShoutReactionsFollower(Base):
|
class ShoutReactionsFollower(Base):
|
||||||
__tablename__ = "shout_reactions_followers"
|
__tablename__ = "shout_reactions_followers"
|
||||||
|
|
||||||
id = None
|
id = None # type: ignore
|
||||||
follower = Column(ForeignKey('user.slug'), primary_key = True)
|
follower = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
shout = Column(ForeignKey('shout.slug'), primary_key = True)
|
shout = Column(ForeignKey("shout.slug"), primary_key=True)
|
||||||
auto = Column(Boolean, nullable=False, default = False)
|
auto = Column(Boolean, nullable=False, default=False)
|
||||||
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
|
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
|
||||||
deletedAt: str = Column(DateTime, nullable=True)
|
deletedAt = Column(DateTime, nullable=True)
|
||||||
|
|
||||||
|
|
||||||
class ShoutAuthor(Base):
|
class ShoutAuthor(Base):
|
||||||
__tablename__ = "shout_author"
|
__tablename__ = "shout_author"
|
||||||
|
|
||||||
id = None
|
id = None # type: ignore
|
||||||
shout = Column(ForeignKey('shout.slug'), primary_key = True)
|
shout = Column(ForeignKey("shout.slug"), primary_key=True)
|
||||||
user = Column(ForeignKey('user.slug'), primary_key = True)
|
user = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
caption: str = Column(String, nullable = True, default = "")
|
caption = Column(String, nullable=True, default="")
|
||||||
|
|
||||||
|
|
||||||
class ShoutAllowed(Base):
|
class ShoutAllowed(Base):
|
||||||
__tablename__ = "shout_allowed"
|
__tablename__ = "shout_allowed"
|
||||||
|
|
||||||
id = None
|
id = None # type: ignore
|
||||||
shout = Column(ForeignKey('shout.slug'), primary_key = True)
|
shout = Column(ForeignKey("shout.slug"), primary_key=True)
|
||||||
user = Column(ForeignKey('user.id'), primary_key = True)
|
user = Column(ForeignKey("user.id"), primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
class Shout(Base):
|
class Shout(Base):
|
||||||
__tablename__ = 'shout'
|
__tablename__ = "shout"
|
||||||
|
|
||||||
id = None
|
id = None # type: ignore
|
||||||
|
slug = Column(String, primary_key=True)
|
||||||
slug: str = Column(String, primary_key=True)
|
community = Column(Integer, ForeignKey("community.id"), nullable=False, comment="Community")
|
||||||
community: str = Column(Integer, ForeignKey("community.id"), nullable=False, comment="Community")
|
body = Column(String, nullable=False, comment="Body")
|
||||||
body: str = Column(String, nullable=False, comment="Body")
|
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
|
||||||
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
|
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
|
||||||
updatedAt: str = Column(DateTime, nullable=True, comment="Updated at")
|
replyTo = Column(ForeignKey("shout.slug"), nullable=True)
|
||||||
replyTo: int = Column(ForeignKey("shout.slug"), nullable=True)
|
versionOf = Column(ForeignKey("shout.slug"), nullable=True)
|
||||||
versionOf: int = Column(ForeignKey("shout.slug"), nullable=True)
|
tags = Column(String, nullable=True)
|
||||||
tags: str = Column(String, nullable=True)
|
publishedBy = Column(ForeignKey("user.id"), nullable=True)
|
||||||
publishedBy: int = Column(ForeignKey("user.id"), nullable=True)
|
publishedAt = Column(DateTime, nullable=True)
|
||||||
publishedAt: str = Column(DateTime, nullable=True)
|
cover = Column(String, nullable=True)
|
||||||
cover: str = Column(String, nullable = True)
|
title = Column(String, nullable=True)
|
||||||
title: str = Column(String, nullable = True)
|
subtitle = Column(String, nullable=True)
|
||||||
subtitle: str = Column(String, nullable = True)
|
layout = Column(String, nullable=True)
|
||||||
layout: str = Column(String, nullable = True)
|
|
||||||
reactions = relationship(lambda: Reaction)
|
reactions = relationship(lambda: Reaction)
|
||||||
authors = relationship(lambda: User, secondary=ShoutAuthor.__tablename__)
|
authors = relationship(lambda: User, secondary=ShoutAuthor.__tablename__)
|
||||||
topics = relationship(lambda: Topic, secondary=ShoutTopic.__tablename__)
|
topics = relationship(lambda: Topic, secondary=ShoutTopic.__tablename__)
|
||||||
mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
|
mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
|
||||||
visibleFor = relationship(lambda: User, secondary=ShoutAllowed.__tablename__)
|
visibleFor = relationship(lambda: User, secondary=ShoutAllowed.__tablename__)
|
||||||
draft: bool = Column(Boolean, default=True)
|
draft = Column(Boolean, default=True)
|
||||||
oid: str = Column(String, nullable=True)
|
oid = Column(String, nullable=True)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
async def stat(self):
|
async def stat(self):
|
||||||
|
@ -67,5 +69,5 @@ class Shout(Base):
|
||||||
"viewed": await ViewedStorage.get_shout(self.slug),
|
"viewed": await ViewedStorage.get_shout(self.slug),
|
||||||
"reacted": len(await ReactedStorage.get_shout(self.slug)),
|
"reacted": len(await ReactedStorage.get_shout(self.slug)),
|
||||||
"commented": len(await ReactedStorage.get_comments(self.slug)),
|
"commented": len(await ReactedStorage.get_comments(self.slug)),
|
||||||
"rating": await ReactedStorage.get_rating(self.slug)
|
"rating": await ReactedStorage.get_rating(self.slug),
|
||||||
}
|
}
|
||||||
|
|
42
orm/topic.py
42
orm/topic.py
|
@ -2,30 +2,32 @@ from datetime import datetime
|
||||||
from sqlalchemy import Column, String, ForeignKey, DateTime, JSON as JSONType
|
from sqlalchemy import Column, String, ForeignKey, DateTime, JSON as JSONType
|
||||||
from base.orm import Base
|
from base.orm import Base
|
||||||
|
|
||||||
|
|
||||||
class ShoutTopic(Base):
|
class ShoutTopic(Base):
|
||||||
__tablename__ = 'shout_topic'
|
__tablename__ = "shout_topic"
|
||||||
|
|
||||||
|
id = None # type: ignore
|
||||||
|
shout = Column(ForeignKey("shout.slug"), primary_key=True)
|
||||||
|
topic = Column(ForeignKey("topic.slug"), primary_key=True)
|
||||||
|
|
||||||
|
|
||||||
id = None
|
|
||||||
shout = Column(ForeignKey('shout.slug'), primary_key = True)
|
|
||||||
topic = Column(ForeignKey('topic.slug'), primary_key = True)
|
|
||||||
class TopicFollower(Base):
|
class TopicFollower(Base):
|
||||||
__tablename__ = "topic_followers"
|
__tablename__ = "topic_followers"
|
||||||
|
|
||||||
|
id = None # type: ignore
|
||||||
|
follower = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
|
topic = Column(ForeignKey("topic.slug"), primary_key=True)
|
||||||
|
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
|
||||||
|
|
||||||
id = None
|
|
||||||
follower = Column(ForeignKey('user.slug'), primary_key = True)
|
|
||||||
topic = Column(ForeignKey('topic.slug'), primary_key = True)
|
|
||||||
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
|
|
||||||
|
|
||||||
class Topic(Base):
|
class Topic(Base):
|
||||||
__tablename__ = 'topic'
|
__tablename__ = "topic"
|
||||||
|
|
||||||
id = None
|
|
||||||
|
|
||||||
slug: str = Column(String, primary_key = True)
|
|
||||||
title: str = Column(String, nullable=False, comment="Title")
|
|
||||||
body: str = Column(String, nullable=True, comment="Body")
|
|
||||||
pic: str = Column(String, nullable=True, comment="Picture")
|
|
||||||
children = Column(JSONType, nullable=True, default = [], comment="list of children topics")
|
|
||||||
community = Column(ForeignKey("community.slug"), nullable=False, comment="Community")
|
|
||||||
oid: str = Column(String, nullable=True, comment="Old ID")
|
|
||||||
|
|
||||||
|
id = None # type: ignore
|
||||||
|
slug = Column(String, primary_key=True)
|
||||||
|
title = Column(String, nullable=False, comment="Title")
|
||||||
|
body = Column(String, nullable=True, comment="Body")
|
||||||
|
pic = Column(String, nullable=True, comment="Picture")
|
||||||
|
children = Column(JSONType, nullable=True, default=[], comment="list of children topics")
|
||||||
|
community = Column(ForeignKey("community.slug"), nullable=False, comment="Community")
|
||||||
|
oid = Column(String, nullable=True, comment="Old ID")
|
||||||
|
|
140
orm/user.py
140
orm/user.py
|
@ -1,89 +1,101 @@
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, DateTime, JSON as JSONType
|
from sqlalchemy import (
|
||||||
|
Column,
|
||||||
|
Integer,
|
||||||
|
String,
|
||||||
|
ForeignKey,
|
||||||
|
Boolean,
|
||||||
|
DateTime,
|
||||||
|
JSON as JSONType,
|
||||||
|
)
|
||||||
from sqlalchemy.orm import relationship
|
from sqlalchemy.orm import relationship
|
||||||
from base.orm import Base, local_session
|
from base.orm import Base, local_session
|
||||||
from orm.rbac import Role
|
from orm.rbac import Role
|
||||||
from services.auth.roles import RoleStorage
|
from services.auth.roles import RoleStorage
|
||||||
|
|
||||||
class UserNotifications(Base):
|
|
||||||
__tablename__ = 'user_notifications'
|
|
||||||
|
|
||||||
id: int = Column(Integer, primary_key = True)
|
class UserNotifications(Base):
|
||||||
user_id: int = Column(Integer, ForeignKey("user.id"))
|
__tablename__ = "user_notifications"
|
||||||
kind: str = Column(String, ForeignKey("notification.kind"))
|
# id auto
|
||||||
values: JSONType = Column(JSONType, nullable = True) # [ <var1>, .. ]
|
user_id = Column(Integer, ForeignKey("user.id"))
|
||||||
|
kind = Column(String, ForeignKey("notification.kind"))
|
||||||
|
values = Column(JSONType, nullable=True) # [ <var1>, .. ]
|
||||||
|
|
||||||
|
|
||||||
class UserRating(Base):
|
class UserRating(Base):
|
||||||
__tablename__ = "user_rating"
|
__tablename__ = "user_rating"
|
||||||
|
|
||||||
|
id = None # type: ignore
|
||||||
|
rater = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
|
user = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
|
value = Column(Integer)
|
||||||
|
|
||||||
id = None
|
|
||||||
rater = Column(ForeignKey('user.slug'), primary_key = True)
|
|
||||||
user = Column(ForeignKey('user.slug'), primary_key = True)
|
|
||||||
value = Column(Integer)
|
|
||||||
|
|
||||||
class UserRole(Base):
|
class UserRole(Base):
|
||||||
__tablename__ = "user_role"
|
__tablename__ = "user_role"
|
||||||
|
|
||||||
|
id = None # type: ignore
|
||||||
|
user_id = Column(ForeignKey("user.id"), primary_key=True)
|
||||||
|
role_id = Column(ForeignKey("role.id"), primary_key=True)
|
||||||
|
|
||||||
id = None
|
|
||||||
user_id = Column(ForeignKey('user.id'), primary_key = True)
|
|
||||||
role_id = Column(ForeignKey('role.id'), primary_key = True)
|
|
||||||
|
|
||||||
class AuthorFollower(Base):
|
class AuthorFollower(Base):
|
||||||
__tablename__ = "author_follower"
|
__tablename__ = "author_follower"
|
||||||
|
|
||||||
|
id = None # type: ignore
|
||||||
|
follower = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
|
author = Column(ForeignKey("user.slug"), primary_key=True)
|
||||||
|
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
|
||||||
|
|
||||||
id = None
|
|
||||||
follower = Column(ForeignKey('user.slug'), primary_key = True)
|
|
||||||
author = Column(ForeignKey('user.slug'), primary_key = True)
|
|
||||||
createdAt = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
|
|
||||||
|
|
||||||
class User(Base):
|
class User(Base):
|
||||||
__tablename__ = "user"
|
__tablename__ = "user"
|
||||||
|
|
||||||
email: str = Column(String, unique=True, nullable=False, comment="Email")
|
email = Column(String, unique=True, nullable=False, comment="Email")
|
||||||
username: str = Column(String, nullable=False, comment="Login")
|
username = Column(String, nullable=False, comment="Login")
|
||||||
password: str = Column(String, nullable=True, comment="Password")
|
password = Column(String, nullable=True, comment="Password")
|
||||||
bio: str = Column(String, nullable=True, comment="Bio")
|
bio = Column(String, nullable=True, comment="Bio")
|
||||||
userpic: str = Column(String, nullable=True, comment="Userpic")
|
userpic = Column(String, nullable=True, comment="Userpic")
|
||||||
name: str = Column(String, nullable=True, comment="Display name")
|
name = Column(String, nullable=True, comment="Display name")
|
||||||
slug: str = Column(String, unique=True, comment="User's slug")
|
slug = Column(String, unique=True, comment="User's slug")
|
||||||
muted: bool = Column(Boolean, default=False)
|
muted = Column(Boolean, default=False)
|
||||||
emailConfirmed: bool = Column(Boolean, default=False)
|
emailConfirmed = Column(Boolean, default=False)
|
||||||
createdAt: DateTime = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
|
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
|
||||||
wasOnlineAt: DateTime = Column(DateTime, nullable=False, default = datetime.now, comment="Was online at")
|
wasOnlineAt = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at")
|
||||||
deletedAt: DateTime = Column(DateTime, nullable=True, comment="Deleted at")
|
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
|
||||||
links: JSONType = Column(JSONType, nullable=True, comment="Links")
|
links = Column(JSONType, nullable=True, comment="Links")
|
||||||
oauth: str = Column(String, nullable=True)
|
oauth = Column(String, nullable=True)
|
||||||
notifications = relationship(lambda: UserNotifications)
|
notifications = relationship(lambda: UserNotifications)
|
||||||
ratings = relationship(UserRating, foreign_keys=UserRating.user)
|
ratings = relationship(UserRating, foreign_keys=UserRating.user)
|
||||||
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
|
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
|
||||||
oid: str = Column(String, nullable = True)
|
oid = Column(String, nullable=True)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init_table():
|
def init_table():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
default = session.query(User).filter(User.slug == "discours").first()
|
default = session.query(User).filter(User.slug == "discours").first()
|
||||||
if not default:
|
if not default:
|
||||||
default = User.create(
|
default = User.create(
|
||||||
id = 0,
|
id=0,
|
||||||
email = "welcome@discours.io",
|
email="welcome@discours.io",
|
||||||
username = "welcome@discours.io",
|
username="welcome@discours.io",
|
||||||
name = "Дискурс",
|
name="Дискурс",
|
||||||
slug = "discours",
|
slug="discours",
|
||||||
userpic = 'https://discours.io/images/logo-mini.svg',
|
userpic="https://discours.io/images/logo-mini.svg",
|
||||||
)
|
)
|
||||||
|
|
||||||
User.default_user = default
|
User.default_user = default
|
||||||
|
|
||||||
async def get_permission(self):
|
async def get_permission(self):
|
||||||
scope = {}
|
scope = {}
|
||||||
for user_role in self.roles:
|
for user_role in self.roles:
|
||||||
role = await RoleStorage.get_role(user_role.id)
|
role: Role = await RoleStorage.get_role(user_role.id) # type: ignore
|
||||||
for p in role.permissions:
|
for p in role.permissions:
|
||||||
if not p.resource_id in scope:
|
if p.resource_id not in scope:
|
||||||
scope[p.resource_id] = set()
|
scope[p.resource_id] = set()
|
||||||
scope[p.resource_id].add(p.operation_id)
|
scope[p.resource_id].add(p.operation_id)
|
||||||
return scope
|
return scope
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
print(User.get_permission(user_id=1))
|
print(User.get_permission(user_id=1)) # type: ignore
|
||||||
|
|
|
@ -17,3 +17,4 @@ requests
|
||||||
bcrypt
|
bcrypt
|
||||||
websockets
|
websockets
|
||||||
bson
|
bson
|
||||||
|
flake8
|
||||||
|
|
|
@ -1,76 +1,109 @@
|
||||||
from resolvers.auth import login, sign_out, is_email_used, register, confirm, auth_forget, auth_reset
|
from resolvers.auth import (
|
||||||
from resolvers.zine import get_shout_by_slug, follow, unfollow, view_shout, \
|
login,
|
||||||
top_month, top_overall, recent_published, recent_all, top_viewed, \
|
sign_out,
|
||||||
shouts_by_authors, shouts_by_topics, shouts_by_communities
|
is_email_used,
|
||||||
from resolvers.profile import get_users_by_slugs, get_current_user, get_user_reacted_shouts, get_user_roles
|
register,
|
||||||
from resolvers.topics import topic_follow, topic_unfollow, topics_by_author, topics_by_community, topics_all
|
confirm,
|
||||||
|
auth_forget,
|
||||||
|
auth_reset,
|
||||||
|
)
|
||||||
|
from resolvers.zine import (
|
||||||
|
get_shout_by_slug,
|
||||||
|
follow,
|
||||||
|
unfollow,
|
||||||
|
view_shout,
|
||||||
|
top_month,
|
||||||
|
top_overall,
|
||||||
|
recent_published,
|
||||||
|
recent_all,
|
||||||
|
top_viewed,
|
||||||
|
shouts_by_authors,
|
||||||
|
shouts_by_topics,
|
||||||
|
shouts_by_communities,
|
||||||
|
)
|
||||||
|
from resolvers.profile import (
|
||||||
|
get_users_by_slugs,
|
||||||
|
get_current_user,
|
||||||
|
get_user_reacted_shouts,
|
||||||
|
get_user_roles,
|
||||||
|
)
|
||||||
|
from resolvers.topics import (
|
||||||
|
topic_follow,
|
||||||
|
topic_unfollow,
|
||||||
|
topics_by_author,
|
||||||
|
topics_by_community,
|
||||||
|
topics_all,
|
||||||
|
)
|
||||||
|
|
||||||
# from resolvers.feed import shouts_for_feed, my_candidates
|
# from resolvers.feed import shouts_for_feed, my_candidates
|
||||||
from resolvers.reactions import create_reaction, delete_reaction, update_reaction, get_all_reactions
|
from resolvers.reactions import (
|
||||||
|
create_reaction,
|
||||||
|
delete_reaction,
|
||||||
|
update_reaction,
|
||||||
|
get_all_reactions,
|
||||||
|
)
|
||||||
from resolvers.collab import invite_author, remove_author
|
from resolvers.collab import invite_author, remove_author
|
||||||
from resolvers.editor import create_shout, delete_shout, update_shout
|
from resolvers.editor import create_shout, delete_shout, update_shout
|
||||||
from resolvers.community import create_community, delete_community, get_community, get_communities
|
from resolvers.community import (
|
||||||
|
create_community,
|
||||||
|
delete_community,
|
||||||
|
get_community,
|
||||||
|
get_communities,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"follow",
|
"follow",
|
||||||
"unfollow",
|
"unfollow",
|
||||||
|
# auth
|
||||||
# auth
|
"login",
|
||||||
"login",
|
"register",
|
||||||
"register",
|
"is_email_used",
|
||||||
"is_email_used",
|
"confirm",
|
||||||
"confirm",
|
"auth_forget",
|
||||||
"auth_forget",
|
"auth_reset" "sign_out",
|
||||||
"auth_reset"
|
# profile
|
||||||
"sign_out",
|
"get_current_user",
|
||||||
|
"get_users_by_slugs",
|
||||||
# profile
|
# zine
|
||||||
"get_current_user",
|
"shouts_for_feed",
|
||||||
"get_users_by_slugs",
|
"my_candidates",
|
||||||
|
"recent_published",
|
||||||
# zine
|
"recent_reacted",
|
||||||
"shouts_for_feed",
|
"recent_all",
|
||||||
"my_candidates",
|
"shouts_by_topics",
|
||||||
"recent_published",
|
"shouts_by_authors",
|
||||||
"recent_reacted",
|
"shouts_by_communities",
|
||||||
"recent_all",
|
"get_user_reacted_shouts",
|
||||||
"shouts_by_topics",
|
"top_month",
|
||||||
"shouts_by_authors",
|
"top_overall",
|
||||||
"shouts_by_communities",
|
"top_viewed",
|
||||||
"get_user_reacted_shouts",
|
"view_shout",
|
||||||
"top_month",
|
"view_reaction",
|
||||||
"top_overall",
|
"get_shout_by_slug",
|
||||||
"top_viewed",
|
# editor
|
||||||
"view_shout",
|
"create_shout",
|
||||||
"view_reaction",
|
"update_shout",
|
||||||
"get_shout_by_slug",
|
"delete_shout",
|
||||||
|
# collab
|
||||||
# editor
|
"invite_author",
|
||||||
"create_shout",
|
"remove_author"
|
||||||
"update_shout",
|
# topics
|
||||||
"delete_shout",
|
"topics_all",
|
||||||
# collab
|
"topics_by_community",
|
||||||
"invite_author",
|
"topics_by_author",
|
||||||
"remove_author"
|
"topic_follow",
|
||||||
|
"topic_unfollow",
|
||||||
# topics
|
# communities
|
||||||
"topics_all",
|
"get_community",
|
||||||
"topics_by_community",
|
"get_communities",
|
||||||
"topics_by_author",
|
"create_community",
|
||||||
"topic_follow",
|
"delete_community",
|
||||||
"topic_unfollow",
|
# reactions
|
||||||
|
"get_shout_reactions",
|
||||||
# communities
|
"reactions_follow",
|
||||||
"get_community",
|
"reactions_unfollow",
|
||||||
"get_communities",
|
"create_reaction",
|
||||||
"create_community",
|
"update_reaction",
|
||||||
"delete_community",
|
"delete_reaction",
|
||||||
|
"get_all_reactions",
|
||||||
# reactions
|
]
|
||||||
"get_shout_reactions",
|
|
||||||
"reactions_follow",
|
|
||||||
"reactions_unfollow",
|
|
||||||
"create_reaction",
|
|
||||||
"update_reaction",
|
|
||||||
"delete_reaction",
|
|
||||||
"get_all_reactions",
|
|
||||||
]
|
|
||||||
|
|
|
@ -13,119 +13,126 @@ from resolvers.profile import get_user_info
|
||||||
from base.exceptions import InvalidPassword, InvalidToken
|
from base.exceptions import InvalidPassword, InvalidToken
|
||||||
from settings import JWT_AUTH_HEADER
|
from settings import JWT_AUTH_HEADER
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("confirmEmail")
|
@mutation.field("confirmEmail")
|
||||||
async def confirm(*_, confirm_token):
|
async def confirm(*_, confirm_token):
|
||||||
''' confirm owning email address '''
|
"""confirm owning email address"""
|
||||||
auth_token, user = await Authorize.confirm(confirm_token)
|
auth_token, user = await Authorize.confirm(confirm_token)
|
||||||
if auth_token:
|
if auth_token:
|
||||||
user.emailConfirmed = True
|
user.emailConfirmed = True
|
||||||
user.save()
|
user.save()
|
||||||
return { "token": auth_token, "user" : user}
|
return {"token": auth_token, "user": user}
|
||||||
else:
|
else:
|
||||||
return { "error": "email not confirmed"}
|
return {"error": "email not confirmed"}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("registerUser")
|
@mutation.field("registerUser")
|
||||||
async def register(*_, email: str, password: str = ""):
|
async def register(*_, email: str, password: str = ""):
|
||||||
''' creates new user account '''
|
"""creates new user account"""
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).filter(User.email == email).first()
|
user = session.query(User).filter(User.email == email).first()
|
||||||
if user:
|
if user:
|
||||||
return {"error" : "user already exist"}
|
return {"error": "user already exist"}
|
||||||
|
|
||||||
user_dict = { "email": email }
|
user_dict = {"email": email}
|
||||||
username = email.split('@')[0]
|
username = email.split("@")[0]
|
||||||
user_dict["username"] = username
|
user_dict["username"] = username
|
||||||
user_dict["slug"] = quote_plus(translit(username, 'ru', reversed=True).replace('.', '-').lower())
|
user_dict["slug"] = quote_plus(
|
||||||
if password:
|
translit(username, "ru", reversed=True).replace(".", "-").lower()
|
||||||
user_dict["password"] = Password.encode(password)
|
)
|
||||||
user = User(**user_dict)
|
if password:
|
||||||
user.roles.append(Role.default_role)
|
user_dict["password"] = Password.encode(password)
|
||||||
with local_session() as session:
|
user = User(**user_dict)
|
||||||
session.add(user)
|
user.roles.append(Role.default_role)
|
||||||
session.commit()
|
with local_session() as session:
|
||||||
|
session.add(user)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
await send_confirm_email(user)
|
await send_confirm_email(user)
|
||||||
|
|
||||||
|
return {"user": user}
|
||||||
|
|
||||||
return { "user": user }
|
|
||||||
|
|
||||||
@mutation.field("requestPasswordUpdate")
|
@mutation.field("requestPasswordUpdate")
|
||||||
async def auth_forget(_, info, email):
|
async def auth_forget(_, info, email):
|
||||||
''' send email to recover account '''
|
"""send email to recover account"""
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).filter(User.email == email).first()
|
user = session.query(User).filter(User.email == email).first()
|
||||||
if not user:
|
if not user:
|
||||||
return {"error" : "user not exist"}
|
return {"error": "user not exist"}
|
||||||
|
|
||||||
await send_reset_password_email(user)
|
await send_reset_password_email(user)
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@mutation.field("updatePassword")
|
@mutation.field("updatePassword")
|
||||||
async def auth_reset(_, info, password, resetToken):
|
async def auth_reset(_, info, password, resetToken):
|
||||||
''' set the new password '''
|
"""set the new password"""
|
||||||
try:
|
try:
|
||||||
user_id = await ResetPassword.verify(resetToken)
|
user_id = await ResetPassword.verify(resetToken)
|
||||||
except InvalidToken as e:
|
except InvalidToken as e:
|
||||||
return {"error" : e.message}
|
return {"error": e.message}
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).filter_by(id = user_id).first()
|
user = session.query(User).filter_by(id=user_id).first()
|
||||||
if not user:
|
if not user:
|
||||||
return {"error" : "user not exist"}
|
return {"error": "user not exist"}
|
||||||
user.password = Password.encode(password)
|
user.password = Password.encode(password)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@query.field("signIn")
|
@query.field("signIn")
|
||||||
async def login(_, info: GraphQLResolveInfo, email: str, password: str = ""):
|
async def login(_, info: GraphQLResolveInfo, email: str, password: str = ""):
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
orm_user = session.query(User).filter(User.email == email).first()
|
orm_user = session.query(User).filter(User.email == email).first()
|
||||||
if orm_user is None:
|
if orm_user is None:
|
||||||
print(f"signIn {email}: email not found")
|
print(f"signIn {email}: email not found")
|
||||||
return {"error" : "email not found"}
|
return {"error": "email not found"}
|
||||||
|
|
||||||
if not password:
|
if not password:
|
||||||
print(f"signIn {email}: send auth email")
|
print(f"signIn {email}: send auth email")
|
||||||
await send_auth_email(orm_user)
|
await send_auth_email(orm_user)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
if not orm_user.emailConfirmed:
|
if not orm_user.emailConfirmed:
|
||||||
return {"error" : "email not confirmed"}
|
return {"error": "email not confirmed"}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
device = info.context["request"].headers['device']
|
device = info.context["request"].headers["device"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
device = "pc"
|
device = "pc"
|
||||||
auto_delete = False if device == "mobile" else True # why autodelete with mobile?
|
auto_delete = False if device == "mobile" else True # why autodelete with mobile?
|
||||||
|
|
||||||
try:
|
try:
|
||||||
user = Identity.identity(orm_user, password)
|
user = Identity.identity(orm_user, password)
|
||||||
except InvalidPassword:
|
except InvalidPassword:
|
||||||
print(f"signIn {email}: invalid password")
|
print(f"signIn {email}: invalid password")
|
||||||
return {"error" : "invalid password"}
|
return {"error": "invalid password"}
|
||||||
|
|
||||||
token = await Authorize.authorize(user, device=device, auto_delete=auto_delete)
|
token = await Authorize.authorize(user, device=device, auto_delete=auto_delete)
|
||||||
print(f"signIn {email}: OK")
|
print(f"signIn {email}: OK")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"token" : token,
|
"token": token,
|
||||||
"user": orm_user,
|
"user": orm_user,
|
||||||
"info": await get_user_info(orm_user.slug)
|
"info": await get_user_info(orm_user.slug),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@query.field("signOut")
|
@query.field("signOut")
|
||||||
@login_required
|
@login_required
|
||||||
async def sign_out(_, info: GraphQLResolveInfo):
|
async def sign_out(_, info: GraphQLResolveInfo):
|
||||||
token = info.context["request"].headers[JWT_AUTH_HEADER]
|
token = info.context["request"].headers[JWT_AUTH_HEADER]
|
||||||
status = await Authorize.revoke(token)
|
status = await Authorize.revoke(token)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@query.field("isEmailUsed")
|
@query.field("isEmailUsed")
|
||||||
async def is_email_used(_, info, email):
|
async def is_email_used(_, info, email):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).filter(User.email == email).first()
|
user = session.query(User).filter(User.email == email).first()
|
||||||
return not user is None
|
return not user is None
|
||||||
|
|
|
@ -6,67 +6,69 @@ from orm.user import User
|
||||||
from base.resolvers import query, mutation
|
from base.resolvers import query, mutation
|
||||||
from auth.authenticate import login_required
|
from auth.authenticate import login_required
|
||||||
|
|
||||||
|
|
||||||
@query.field("getCollabs")
|
@query.field("getCollabs")
|
||||||
@login_required
|
@login_required
|
||||||
async def get_collabs(_, info):
|
async def get_collabs(_, info):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
collabs = []
|
collabs = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).where(User.id == user_id).first()
|
user = session.query(User).where(User.id == user_id).first()
|
||||||
collabs = session.query(Collab).filter(user.slug in Collab.authors)
|
collabs = session.query(Collab).filter(user.slug in Collab.authors)
|
||||||
return collabs
|
return collabs
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("inviteAuthor")
|
@mutation.field("inviteAuthor")
|
||||||
@login_required
|
@login_required
|
||||||
async def invite_author(_, info, author, shout):
|
async def invite_author(_, info, author, shout):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shout = session.query(Shout).filter(Shout.slug == shout).first()
|
shout = session.query(Shout).filter(Shout.slug == shout).first()
|
||||||
if not shout:
|
if not shout:
|
||||||
return {"error": "invalid shout slug"}
|
return {"error": "invalid shout slug"}
|
||||||
authors = [a.id for a in shout.authors]
|
authors = [a.id for a in shout.authors]
|
||||||
if user_id not in authors:
|
if user_id not in authors:
|
||||||
return {"error": "access denied"}
|
return {"error": "access denied"}
|
||||||
author = session.query(User).filter(User.slug == author).first()
|
author = session.query(User).filter(User.slug == author).first()
|
||||||
if author.id in authors:
|
if author.id in authors:
|
||||||
return {"error": "already added"}
|
return {"error": "already added"}
|
||||||
shout.authors.append(author)
|
shout.authors.append(author)
|
||||||
shout.updated_at = datetime.now()
|
shout.updated_at = datetime.now()
|
||||||
shout.save()
|
shout.save()
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
# TODO: email notify
|
# TODO: email notify
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@mutation.field("removeAuthor")
|
@mutation.field("removeAuthor")
|
||||||
@login_required
|
@login_required
|
||||||
async def remove_author(_, info, author, shout):
|
async def remove_author(_, info, author, shout):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shout = session.query(Shout).filter(Shout.slug == shout).first()
|
shout = session.query(Shout).filter(Shout.slug == shout).first()
|
||||||
if not shout:
|
if not shout:
|
||||||
return {"error": "invalid shout slug"}
|
return {"error": "invalid shout slug"}
|
||||||
authors = [author.id for author in shout.authors]
|
authors = [author.id for author in shout.authors]
|
||||||
if user_id not in authors:
|
if user_id not in authors:
|
||||||
return {"error": "access denied"}
|
return {"error": "access denied"}
|
||||||
author = session.query(User).filter(User.slug == author).first()
|
author = session.query(User).filter(User.slug == author).first()
|
||||||
if author.id not in authors:
|
if author.id not in authors:
|
||||||
return {"error": "not in authors"}
|
return {"error": "not in authors"}
|
||||||
shout.authors.remove(author)
|
shout.authors.remove(author)
|
||||||
shout.updated_at = datetime.now()
|
shout.updated_at = datetime.now()
|
||||||
shout.save()
|
shout.save()
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
# result = Result("INVITED")
|
# result = Result("INVITED")
|
||||||
# FIXME: await ShoutStorage.put(result)
|
# FIXME: await ShoutStorage.put(result)
|
||||||
|
|
||||||
# TODO: email notify
|
# TODO: email notify
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
|
@ -7,93 +7,121 @@ from datetime import datetime
|
||||||
from typing import Collection
|
from typing import Collection
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("createCollection")
|
@mutation.field("createCollection")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_collection(_, info, input):
|
async def create_collection(_, info, input):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
collection = Collection.create(
|
collection = Collection.create(
|
||||||
slug = input.get('slug', ''),
|
slug=input.get("slug", ""),
|
||||||
title = input.get('title', ''),
|
title=input.get("title", ""),
|
||||||
desc = input.get('desc', ''),
|
desc=input.get("desc", ""),
|
||||||
pic = input.get('pic', '')
|
pic=input.get("pic", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return {"collection": collection}
|
||||||
|
|
||||||
return {"collection": collection}
|
|
||||||
|
|
||||||
@mutation.field("updateCollection")
|
@mutation.field("updateCollection")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_collection(_, info, input):
|
async def update_collection(_, info, input):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
collection_slug = input.get('slug', '')
|
collection_slug = input.get("slug", "")
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
owner = session.query(User).filter(User.id == user_id) # note list here
|
owner = session.query(User).filter(User.id == user_id) # note list here
|
||||||
collection = session.query(Collection).filter(Collection.slug == collection_slug).first()
|
collection = (
|
||||||
editors = [e.slug for e in collection.editors]
|
session.query(Collection).filter(Collection.slug == collection_slug).first()
|
||||||
if not collection:
|
)
|
||||||
return {"error": "invalid collection id"}
|
editors = [e.slug for e in collection.editors]
|
||||||
if collection.createdBy not in (owner + editors):
|
if not collection:
|
||||||
return {"error": "access denied"}
|
return {"error": "invalid collection id"}
|
||||||
collection.title = input.get('title', '')
|
if collection.createdBy not in (owner + editors):
|
||||||
collection.desc = input.get('desc', '')
|
return {"error": "access denied"}
|
||||||
collection.pic = input.get('pic', '')
|
collection.title = input.get("title", "")
|
||||||
collection.updatedAt = datetime.now()
|
collection.desc = input.get("desc", "")
|
||||||
session.commit()
|
collection.pic = input.get("pic", "")
|
||||||
|
collection.updatedAt = datetime.now()
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("deleteCollection")
|
@mutation.field("deleteCollection")
|
||||||
@login_required
|
@login_required
|
||||||
async def delete_collection(_, info, slug):
|
async def delete_collection(_, info, slug):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
collection = session.query(Collection).filter(Collection.slug == slug).first()
|
collection = session.query(Collection).filter(Collection.slug == slug).first()
|
||||||
if not collection:
|
if not collection:
|
||||||
return {"error": "invalid collection slug"}
|
return {"error": "invalid collection slug"}
|
||||||
if collection.owner != user_id:
|
if collection.owner != user_id:
|
||||||
return {"error": "access denied"}
|
return {"error": "access denied"}
|
||||||
collection.deletedAt = datetime.now()
|
collection.deletedAt = datetime.now()
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@query.field("getUserCollections")
|
@query.field("getUserCollections")
|
||||||
async def get_user_collections(_, info, userslug):
|
async def get_user_collections(_, info, userslug):
|
||||||
collections = []
|
collections = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).filter(User.slug == userslug).first()
|
user = session.query(User).filter(User.slug == userslug).first()
|
||||||
if user:
|
if user:
|
||||||
# TODO: check rights here
|
# TODO: check rights here
|
||||||
collections = session.\
|
collections = (
|
||||||
query(Collection).\
|
session.query(Collection)
|
||||||
where(and_(Collection.createdBy == userslug, Collection.publishedAt != None)).\
|
.where(
|
||||||
all()
|
and_(
|
||||||
for c in collections:
|
Collection.createdBy == userslug, Collection.publishedAt != None
|
||||||
shouts = session.query(ShoutCollection).filter(ShoutCollection.collection == c.id).all()
|
)
|
||||||
c.amount = len(shouts)
|
)
|
||||||
return collections
|
.all()
|
||||||
|
)
|
||||||
|
for c in collections:
|
||||||
|
shouts = (
|
||||||
|
session.query(ShoutCollection)
|
||||||
|
.filter(ShoutCollection.collection == c.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
c.amount = len(shouts)
|
||||||
|
return collections
|
||||||
|
|
||||||
|
|
||||||
@query.field("getMyCollections")
|
@query.field("getMyCollections")
|
||||||
async def get_user_collections(_, info, userslug):
|
async def get_user_collections(_, info, userslug):
|
||||||
collections = []
|
collections = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).filter(User.slug == userslug).first()
|
user = session.query(User).filter(User.slug == userslug).first()
|
||||||
if user:
|
if user:
|
||||||
# TODO: check rights here
|
# TODO: check rights here
|
||||||
collections = session.\
|
collections = (
|
||||||
query(Collection).\
|
session.query(Collection)
|
||||||
where(and_(Collection.createdBy == userslug, Collection.publishedAt != None)).\
|
.where(
|
||||||
all()
|
and_(
|
||||||
for c in collections:
|
Collection.createdBy == userslug, Collection.publishedAt != None
|
||||||
shouts = session.query(ShoutCollection).filter(ShoutCollection.collection == c.id).all()
|
)
|
||||||
c.amount = len(shouts)
|
)
|
||||||
return collections
|
.all()
|
||||||
|
)
|
||||||
|
for c in collections:
|
||||||
|
shouts = (
|
||||||
|
session.query(ShoutCollection)
|
||||||
|
.filter(ShoutCollection.collection == c.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
c.amount = len(shouts)
|
||||||
|
return collections
|
||||||
|
|
||||||
|
|
||||||
@query.field("getMyColelctions")
|
@query.field("getMyColelctions")
|
||||||
@login_required
|
@login_required
|
||||||
async def get_my_collections(_, info):
|
async def get_my_collections(_, info):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
collections = session.query(Collection).when(Collection.createdBy == user_id).all()
|
collections = (
|
||||||
return collections
|
session.query(Collection).when(Collection.createdBy == user_id).all()
|
||||||
|
)
|
||||||
|
return collections
|
||||||
|
|
|
@ -7,96 +7,112 @@ from datetime import datetime
|
||||||
from typing import List
|
from typing import List
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("createCommunity")
|
@mutation.field("createCommunity")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_community(_, info, input):
|
async def create_community(_, info, input):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
|
|
||||||
community = Community.create(
|
community = Community.create(
|
||||||
slug = input.get('slug', ''),
|
slug=input.get("slug", ""),
|
||||||
title = input.get('title', ''),
|
title=input.get("title", ""),
|
||||||
desc = input.get('desc', ''),
|
desc=input.get("desc", ""),
|
||||||
pic = input.get('pic', '')
|
pic=input.get("pic", ""),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return {"community": community}
|
||||||
|
|
||||||
return {"community": community}
|
|
||||||
|
|
||||||
@mutation.field("updateCommunity")
|
@mutation.field("updateCommunity")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_community(_, info, input):
|
async def update_community(_, info, input):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
community_slug = input.get('slug', '')
|
community_slug = input.get("slug", "")
|
||||||
|
|
||||||
|
with local_session() as session:
|
||||||
|
owner = session.query(User).filter(User.id == user_id) # note list here
|
||||||
|
community = (
|
||||||
|
session.query(Community).filter(Community.slug == community_slug).first()
|
||||||
|
)
|
||||||
|
editors = [e.slug for e in community.editors]
|
||||||
|
if not community:
|
||||||
|
return {"error": "invalid community id"}
|
||||||
|
if community.createdBy not in (owner + editors):
|
||||||
|
return {"error": "access denied"}
|
||||||
|
community.title = input.get("title", "")
|
||||||
|
community.desc = input.get("desc", "")
|
||||||
|
community.pic = input.get("pic", "")
|
||||||
|
community.updatedAt = datetime.now()
|
||||||
|
session.commit()
|
||||||
|
|
||||||
with local_session() as session:
|
|
||||||
owner = session.query(User).filter(User.id == user_id) # note list here
|
|
||||||
community = session.query(Community).filter(Community.slug == community_slug).first()
|
|
||||||
editors = [e.slug for e in community.editors]
|
|
||||||
if not community:
|
|
||||||
return {"error": "invalid community id"}
|
|
||||||
if community.createdBy not in (owner + editors):
|
|
||||||
return {"error": "access denied"}
|
|
||||||
community.title = input.get('title', '')
|
|
||||||
community.desc = input.get('desc', '')
|
|
||||||
community.pic = input.get('pic', '')
|
|
||||||
community.updatedAt = datetime.now()
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
@mutation.field("deleteCommunity")
|
@mutation.field("deleteCommunity")
|
||||||
@login_required
|
@login_required
|
||||||
async def delete_community(_, info, slug):
|
async def delete_community(_, info, slug):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
community = session.query(Community).filter(Community.slug == slug).first()
|
community = session.query(Community).filter(Community.slug == slug).first()
|
||||||
if not community:
|
if not community:
|
||||||
return {"error": "invalid community slug"}
|
return {"error": "invalid community slug"}
|
||||||
if community.owner != user_id:
|
if community.owner != user_id:
|
||||||
return {"error": "access denied"}
|
return {"error": "access denied"}
|
||||||
community.deletedAt = datetime.now()
|
community.deletedAt = datetime.now()
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@query.field("getCommunity")
|
@query.field("getCommunity")
|
||||||
async def get_community(_, info, slug):
|
async def get_community(_, info, slug):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
community = session.query(Community).filter(Community.slug == slug).first()
|
community = session.query(Community).filter(Community.slug == slug).first()
|
||||||
if not community:
|
if not community:
|
||||||
return {"error": "invalid community id"}
|
return {"error": "invalid community id"}
|
||||||
|
|
||||||
|
return community
|
||||||
|
|
||||||
return community
|
|
||||||
|
|
||||||
@query.field("getCommunities")
|
@query.field("getCommunities")
|
||||||
async def get_communities(_, info):
|
async def get_communities(_, info):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
communities = session.query(Community)
|
communities = session.query(Community)
|
||||||
return communities
|
return communities
|
||||||
|
|
||||||
|
|
||||||
def community_follow(user, slug):
|
def community_follow(user, slug):
|
||||||
CommunityFollower.create(
|
CommunityFollower.create(follower=user.slug, community=slug)
|
||||||
follower = user.slug,
|
|
||||||
community = slug
|
|
||||||
)
|
|
||||||
|
|
||||||
def community_unfollow(user, slug):
|
def community_unfollow(user, slug):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
following = session.query(CommunityFollower).\
|
following = (
|
||||||
filter(and_(CommunityFollower.follower == user.slug, CommunityFollower.community == slug)).\
|
session.query(CommunityFollower)
|
||||||
first()
|
.filter(
|
||||||
if not following:
|
and_(
|
||||||
raise Exception("[orm.community] following was not exist")
|
CommunityFollower.follower == user.slug,
|
||||||
session.delete(following)
|
CommunityFollower.community == slug,
|
||||||
session.commit()
|
)
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not following:
|
||||||
|
raise Exception("[orm.community] following was not exist")
|
||||||
|
session.delete(following)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
@query.field("userFollowedCommunities")
|
@query.field("userFollowedCommunities")
|
||||||
def get_followed_communities(_, user_slug) -> List[Community]:
|
def get_followed_communities(_, user_slug) -> List[Community]:
|
||||||
ccc = []
|
ccc = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
ccc = session.query(Community.slug).\
|
ccc = (
|
||||||
join(CommunityFollower).\
|
session.query(Community.slug)
|
||||||
where(CommunityFollower.follower == user_slug).\
|
.join(CommunityFollower)
|
||||||
all()
|
.where(CommunityFollower.follower == user_slug)
|
||||||
return ccc
|
.all()
|
||||||
|
)
|
||||||
|
return ccc
|
||||||
|
|
|
@ -13,106 +13,82 @@ from services.zine.gittask import GitTask
|
||||||
@mutation.field("createShout")
|
@mutation.field("createShout")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_shout(_, info, input):
|
async def create_shout(_, info, input):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
|
|
||||||
topic_slugs = input.get("topic_slugs", [])
|
topic_slugs = input.get("topic_slugs", [])
|
||||||
if topic_slugs:
|
if topic_slugs:
|
||||||
del input["topic_slugs"]
|
del input["topic_slugs"]
|
||||||
|
|
||||||
new_shout = Shout.create(**input)
|
new_shout = Shout.create(**input)
|
||||||
ShoutAuthor.create(
|
ShoutAuthor.create(shout=new_shout.slug, user=user.slug)
|
||||||
shout = new_shout.slug,
|
|
||||||
user = user.slug
|
|
||||||
)
|
|
||||||
|
|
||||||
reactions_follow(user, new_shout.slug, True)
|
reactions_follow(user, new_shout.slug, True)
|
||||||
|
|
||||||
if "mainTopic" in input:
|
if "mainTopic" in input:
|
||||||
topic_slugs.append(input["mainTopic"])
|
topic_slugs.append(input["mainTopic"])
|
||||||
|
|
||||||
for slug in topic_slugs:
|
for slug in topic_slugs:
|
||||||
topic = ShoutTopic.create(
|
topic = ShoutTopic.create(shout=new_shout.slug, topic=slug)
|
||||||
shout = new_shout.slug,
|
new_shout.topic_slugs = topic_slugs
|
||||||
topic = slug)
|
|
||||||
new_shout.topic_slugs = topic_slugs
|
|
||||||
|
|
||||||
task = GitTask(
|
task = GitTask(input, user.username, user.email, "new shout %s" % (new_shout.slug))
|
||||||
input,
|
|
||||||
user.username,
|
|
||||||
user.email,
|
|
||||||
"new shout %s" % (new_shout.slug)
|
|
||||||
)
|
|
||||||
|
|
||||||
# await ShoutCommentsStorage.send_shout(new_shout)
|
# await ShoutCommentsStorage.send_shout(new_shout)
|
||||||
|
|
||||||
|
return {"shout": new_shout}
|
||||||
|
|
||||||
return {
|
|
||||||
"shout" : new_shout
|
|
||||||
}
|
|
||||||
|
|
||||||
@mutation.field("updateShout")
|
@mutation.field("updateShout")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_shout(_, info, input):
|
async def update_shout(_, info, input):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
|
|
||||||
slug = input["slug"]
|
slug = input["slug"]
|
||||||
|
|
||||||
session = local_session()
|
session = local_session()
|
||||||
user = session.query(User).filter(User.id == user_id).first()
|
user = session.query(User).filter(User.id == user_id).first()
|
||||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||||
|
|
||||||
if not shout:
|
if not shout:
|
||||||
return {
|
return {"error": "shout not found"}
|
||||||
"error" : "shout not found"
|
|
||||||
}
|
|
||||||
|
|
||||||
authors = [author.id for author in shout.authors]
|
authors = [author.id for author in shout.authors]
|
||||||
if not user_id in authors:
|
if not user_id in authors:
|
||||||
scopes = auth.scopes
|
scopes = auth.scopes
|
||||||
print(scopes)
|
print(scopes)
|
||||||
if not Resource.shout_id in scopes:
|
if not Resource.shout_id in scopes:
|
||||||
return {
|
return {"error": "access denied"}
|
||||||
"error" : "access denied"
|
|
||||||
}
|
|
||||||
|
|
||||||
shout.update(input)
|
shout.update(input)
|
||||||
shout.updatedAt = datetime.now()
|
shout.updatedAt = datetime.now()
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
for topic in input.get("topic_slugs", []):
|
for topic in input.get("topic_slugs", []):
|
||||||
ShoutTopic.create(
|
ShoutTopic.create(shout=slug, topic=topic)
|
||||||
shout = slug,
|
|
||||||
topic = topic)
|
|
||||||
|
|
||||||
task = GitTask(
|
task = GitTask(input, user.username, user.email, "update shout %s" % (slug))
|
||||||
input,
|
|
||||||
user.username,
|
return {"shout": shout}
|
||||||
user.email,
|
|
||||||
"update shout %s" % (slug)
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"shout" : shout
|
|
||||||
}
|
|
||||||
|
|
||||||
@mutation.field("deleteShout")
|
@mutation.field("deleteShout")
|
||||||
@login_required
|
@login_required
|
||||||
async def delete_shout(_, info, slug):
|
async def delete_shout(_, info, slug):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
|
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
shout = session.query(Shout).filter(Shout.slug == slug).first()
|
||||||
authors = [a.id for a in shout.authors]
|
authors = [a.id for a in shout.authors]
|
||||||
if not shout:
|
if not shout:
|
||||||
return {"error": "invalid shout slug"}
|
return {"error": "invalid shout slug"}
|
||||||
if user_id not in authors:
|
if user_id not in authors:
|
||||||
return {"error": "access denied"}
|
return {"error": "access denied"}
|
||||||
for a in authors:
|
for a in authors:
|
||||||
reactions_unfollow(a.slug, slug, True)
|
reactions_unfollow(a.slug, slug, True)
|
||||||
shout.deletedAt = datetime.now()
|
shout.deletedAt = datetime.now()
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
return {}
|
||||||
return {}
|
|
||||||
|
|
|
@ -7,36 +7,44 @@ from orm.topic import TopicFollower
|
||||||
from orm.user import AuthorFollower
|
from orm.user import AuthorFollower
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
@query.field("shoutsForFeed")
|
@query.field("shoutsForFeed")
|
||||||
@login_required
|
@login_required
|
||||||
def get_user_feed(_, info, page, size) -> List[Shout]:
|
def get_user_feed(_, info, page, size) -> List[Shout]:
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
shouts = []
|
shouts = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shouts = session.query(Shout).\
|
shouts = (
|
||||||
join(ShoutAuthor).\
|
session.query(Shout)
|
||||||
join(AuthorFollower).\
|
.join(ShoutAuthor)
|
||||||
where(AuthorFollower.follower == user.slug).\
|
.join(AuthorFollower)
|
||||||
order_by(desc(Shout.createdAt))
|
.where(AuthorFollower.follower == user.slug)
|
||||||
topicrows = session.query(Shout).\
|
.order_by(desc(Shout.createdAt))
|
||||||
join(ShoutTopic).\
|
)
|
||||||
join(TopicFollower).\
|
topicrows = (
|
||||||
where(TopicFollower.follower == user.slug).\
|
session.query(Shout)
|
||||||
order_by(desc(Shout.createdAt))
|
.join(ShoutTopic)
|
||||||
shouts = shouts.union(topicrows).limit(size).offset(page * size).all()
|
.join(TopicFollower)
|
||||||
return shouts
|
.where(TopicFollower.follower == user.slug)
|
||||||
|
.order_by(desc(Shout.createdAt))
|
||||||
|
)
|
||||||
|
shouts = shouts.union(topicrows).limit(size).offset(page * size).all()
|
||||||
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field("myCandidates")
|
@query.field("myCandidates")
|
||||||
@login_required
|
@login_required
|
||||||
async def user_unpublished_shouts(_, info, page = 1, size = 10) -> List[Shout]:
|
async def user_unpublished_shouts(_, info, page=1, size=10) -> List[Shout]:
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
shouts = []
|
shouts = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shouts = session.query(Shout).\
|
shouts = (
|
||||||
join(ShoutAuthor).\
|
session.query(Shout)
|
||||||
where(and_(Shout.publishedAt == None, ShoutAuthor.user == user.slug)).\
|
.join(ShoutAuthor)
|
||||||
order_by(desc(Shout.createdAt)).\
|
.where(and_(Shout.publishedAt == None, ShoutAuthor.user == user.slug))
|
||||||
limit(size).\
|
.order_by(desc(Shout.createdAt))
|
||||||
offset( page * size).\
|
.limit(size)
|
||||||
all()
|
.offset(page * size)
|
||||||
return shouts
|
.all()
|
||||||
|
)
|
||||||
|
return shouts
|
||||||
|
|
|
@ -4,263 +4,283 @@ import asyncio, uuid, json
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from base.redis import redis
|
from base.redis import redis
|
||||||
|
|
||||||
class ChatFollowing:
|
|
||||||
queue = asyncio.Queue()
|
|
||||||
|
|
||||||
def __init__(self, chat_id):
|
class ChatFollowing:
|
||||||
self.chat_id = chat_id
|
queue = asyncio.Queue()
|
||||||
|
|
||||||
|
def __init__(self, chat_id):
|
||||||
|
self.chat_id = chat_id
|
||||||
|
|
||||||
|
|
||||||
class MessagesStorage:
|
class MessagesStorage:
|
||||||
lock = asyncio.Lock()
|
lock = asyncio.Lock()
|
||||||
chats = []
|
chats = []
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def register_chat(chat):
|
async def register_chat(chat):
|
||||||
async with MessagesStorage.lock:
|
async with MessagesStorage.lock:
|
||||||
MessagesStorage.chats.append(chat)
|
MessagesStorage.chats.append(chat)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def remove_chat(chat):
|
async def remove_chat(chat):
|
||||||
async with MessagesStorage.lock:
|
async with MessagesStorage.lock:
|
||||||
MessagesStorage.chats.remove(chat)
|
MessagesStorage.chats.remove(chat)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def put(message_result):
|
||||||
|
async with MessagesStorage.lock:
|
||||||
|
for chat in MessagesStorage.chats:
|
||||||
|
if message_result.message["chatId"] == chat.chat_id:
|
||||||
|
chat.queue.put_nowait(message_result)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def put(message_result):
|
|
||||||
async with MessagesStorage.lock:
|
|
||||||
for chat in MessagesStorage.chats:
|
|
||||||
if message_result.message["chatId"] == chat.chat_id:
|
|
||||||
chat.queue.put_nowait(message_result)
|
|
||||||
|
|
||||||
class MessageResult:
|
class MessageResult:
|
||||||
def __init__(self, status, message):
|
def __init__(self, status, message):
|
||||||
self.status = status
|
self.status = status
|
||||||
self.message = message
|
self.message = message
|
||||||
|
|
||||||
|
|
||||||
async def get_unread_counter(user_slug):
|
async def get_unread_counter(user_slug):
|
||||||
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
|
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
|
||||||
if not chats:
|
if not chats:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
chats = json.loads(chats)
|
chats = json.loads(chats)
|
||||||
unread = 0
|
unread = 0
|
||||||
for chat_id in chats:
|
for chat_id in chats:
|
||||||
n = await redis.execute("LLEN", f"chats/{chat_id}/unread/{user_slug}")
|
n = await redis.execute("LLEN", f"chats/{chat_id}/unread/{user_slug}")
|
||||||
unread += n
|
unread += n
|
||||||
|
|
||||||
return unread
|
return unread
|
||||||
|
|
||||||
async def add_user_to_chat(user_slug, chat_id, chat = None):
|
|
||||||
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
|
|
||||||
if not chats:
|
|
||||||
chats = set()
|
|
||||||
else:
|
|
||||||
chats = set(json.loads(chats))
|
|
||||||
chats.add(str(chat_id))
|
|
||||||
chats = list(chats)
|
|
||||||
await redis.execute("SET", f"chats_by_user/{user_slug}", json.dumps(chats))
|
|
||||||
|
|
||||||
if chat:
|
async def add_user_to_chat(user_slug, chat_id, chat=None):
|
||||||
users = set(chat["users"])
|
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
|
||||||
users.add(user_slug)
|
if not chats:
|
||||||
chat["users"] = list(users)
|
chats = set()
|
||||||
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
|
else:
|
||||||
|
chats = set(json.loads(chats))
|
||||||
|
chats.add(str(chat_id))
|
||||||
|
chats = list(chats)
|
||||||
|
await redis.execute("SET", f"chats_by_user/{user_slug}", json.dumps(chats))
|
||||||
|
|
||||||
|
if chat:
|
||||||
|
users = set(chat["users"])
|
||||||
|
users.add(user_slug)
|
||||||
|
chat["users"] = list(users)
|
||||||
|
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("createChat")
|
@mutation.field("createChat")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_chat(_, info, description):
|
async def create_chat(_, info, description):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
|
|
||||||
chat_id = uuid.uuid4()
|
chat_id = uuid.uuid4()
|
||||||
chat = {
|
chat = {
|
||||||
"description" : description,
|
"description": description,
|
||||||
"createdAt" : str(datetime.now),
|
"createdAt": str(datetime.now),
|
||||||
"createdBy" : user.slug,
|
"createdBy": user.slug,
|
||||||
"id" : str(chat_id),
|
"id": str(chat_id),
|
||||||
"users" : [user.slug]
|
"users": [user.slug],
|
||||||
}
|
}
|
||||||
|
|
||||||
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
|
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
|
||||||
await redis.execute("SET", f"chats/{chat_id}/next_message_id", 0)
|
await redis.execute("SET", f"chats/{chat_id}/next_message_id", 0)
|
||||||
|
|
||||||
await add_user_to_chat(user.slug, chat_id)
|
await add_user_to_chat(user.slug, chat_id)
|
||||||
|
|
||||||
|
return {"chatId": chat_id}
|
||||||
|
|
||||||
return { "chatId" : chat_id }
|
|
||||||
|
|
||||||
async def load_messages(chatId, size, page):
|
async def load_messages(chatId, size, page):
|
||||||
message_ids = await redis.lrange(f"chats/{chatId}/message_ids",
|
message_ids = await redis.lrange(
|
||||||
size * (page -1), size * page - 1)
|
f"chats/{chatId}/message_ids", size * (page - 1), size * page - 1
|
||||||
messages = []
|
)
|
||||||
if message_ids:
|
messages = []
|
||||||
message_keys = [f"chats/{chatId}/messages/{id.decode('UTF-8')}" for id in message_ids]
|
if message_ids:
|
||||||
messages = await redis.mget(*message_keys)
|
message_keys = [
|
||||||
messages = [json.loads(msg) for msg in messages]
|
f"chats/{chatId}/messages/{id.decode('UTF-8')}" for id in message_ids
|
||||||
return messages
|
]
|
||||||
|
messages = await redis.mget(*message_keys)
|
||||||
|
messages = [json.loads(msg) for msg in messages]
|
||||||
|
return messages
|
||||||
|
|
||||||
|
|
||||||
@query.field("userChats")
|
@query.field("userChats")
|
||||||
@login_required
|
@login_required
|
||||||
async def user_chats(_, info):
|
async def user_chats(_, info):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
|
|
||||||
chats = await redis.execute("GET", f"chats_by_user/{user.slug}")
|
chats = await redis.execute("GET", f"chats_by_user/{user.slug}")
|
||||||
if not chats:
|
if not chats:
|
||||||
chats = list()
|
chats = list()
|
||||||
else:
|
else:
|
||||||
chats = list(json.loads(chats))
|
chats = list(json.loads(chats))
|
||||||
|
|
||||||
|
return {"chats": chats}
|
||||||
|
|
||||||
return {"chats" : chats}
|
|
||||||
|
|
||||||
@query.field("enterChat")
|
@query.field("enterChat")
|
||||||
@login_required
|
@login_required
|
||||||
async def enter_chat(_, info, chatId, size):
|
async def enter_chat(_, info, chatId, size):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
|
|
||||||
chat = await redis.execute("GET", f"chats/{chatId}")
|
chat = await redis.execute("GET", f"chats/{chatId}")
|
||||||
if not chat:
|
if not chat:
|
||||||
return { "error" : "chat not exist" }
|
return {"error": "chat not exist"}
|
||||||
chat = json.loads(chat)
|
chat = json.loads(chat)
|
||||||
|
|
||||||
messages = await load_messages(chatId, size, 1)
|
messages = await load_messages(chatId, size, 1)
|
||||||
|
|
||||||
await add_user_to_chat(user.slug, chatId, chat)
|
await add_user_to_chat(user.slug, chatId, chat)
|
||||||
|
|
||||||
|
return {"chat": chat, "messages": messages}
|
||||||
|
|
||||||
return {
|
|
||||||
"chat" : chat,
|
|
||||||
"messages" : messages
|
|
||||||
}
|
|
||||||
|
|
||||||
@mutation.field("createMessage")
|
@mutation.field("createMessage")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_message(_, info, chatId, body, replyTo = None):
|
async def create_message(_, info, chatId, body, replyTo=None):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
|
|
||||||
chat = await redis.execute("GET", f"chats/{chatId}")
|
chat = await redis.execute("GET", f"chats/{chatId}")
|
||||||
if not chat:
|
if not chat:
|
||||||
return { "error" : "chat not exist" }
|
return {"error": "chat not exist"}
|
||||||
|
|
||||||
message_id = await redis.execute("GET", f"chats/{chatId}/next_message_id")
|
message_id = await redis.execute("GET", f"chats/{chatId}/next_message_id")
|
||||||
message_id = int(message_id)
|
message_id = int(message_id)
|
||||||
|
|
||||||
new_message = {
|
new_message = {
|
||||||
"chatId" : chatId,
|
"chatId": chatId,
|
||||||
"id" : message_id,
|
"id": message_id,
|
||||||
"author" : user.slug,
|
"author": user.slug,
|
||||||
"body" : body,
|
"body": body,
|
||||||
"replyTo" : replyTo,
|
"replyTo": replyTo,
|
||||||
"createdAt" : datetime.now().isoformat()
|
"createdAt": datetime.now().isoformat(),
|
||||||
}
|
}
|
||||||
|
|
||||||
await redis.execute("SET", f"chats/{chatId}/messages/{message_id}", json.dumps(new_message))
|
await redis.execute(
|
||||||
await redis.execute("LPUSH", f"chats/{chatId}/message_ids", str(message_id))
|
"SET", f"chats/{chatId}/messages/{message_id}", json.dumps(new_message)
|
||||||
await redis.execute("SET", f"chats/{chatId}/next_message_id", str(message_id + 1))
|
)
|
||||||
|
await redis.execute("LPUSH", f"chats/{chatId}/message_ids", str(message_id))
|
||||||
|
await redis.execute("SET", f"chats/{chatId}/next_message_id", str(message_id + 1))
|
||||||
|
|
||||||
chat = json.loads(chat)
|
chat = json.loads(chat)
|
||||||
users = chat["users"]
|
users = chat["users"]
|
||||||
for user_slug in users:
|
for user_slug in users:
|
||||||
await redis.execute("LPUSH", f"chats/{chatId}/unread/{user_slug}", str(message_id))
|
await redis.execute(
|
||||||
|
"LPUSH", f"chats/{chatId}/unread/{user_slug}", str(message_id)
|
||||||
|
)
|
||||||
|
|
||||||
result = MessageResult("NEW", new_message)
|
result = MessageResult("NEW", new_message)
|
||||||
await MessagesStorage.put(result)
|
await MessagesStorage.put(result)
|
||||||
|
|
||||||
|
return {"message": new_message}
|
||||||
|
|
||||||
return {"message" : new_message}
|
|
||||||
|
|
||||||
@query.field("getMessages")
|
@query.field("getMessages")
|
||||||
@login_required
|
@login_required
|
||||||
async def get_messages(_, info, chatId, size, page):
|
async def get_messages(_, info, chatId, size, page):
|
||||||
chat = await redis.execute("GET", f"chats/{chatId}")
|
chat = await redis.execute("GET", f"chats/{chatId}")
|
||||||
if not chat:
|
if not chat:
|
||||||
return { "error" : "chat not exist" }
|
return {"error": "chat not exist"}
|
||||||
|
|
||||||
messages = await load_messages(chatId, size, page)
|
messages = await load_messages(chatId, size, page)
|
||||||
|
|
||||||
|
return messages
|
||||||
|
|
||||||
return messages
|
|
||||||
|
|
||||||
@mutation.field("updateMessage")
|
@mutation.field("updateMessage")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_message(_, info, chatId, id, body):
|
async def update_message(_, info, chatId, id, body):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
|
|
||||||
chat = await redis.execute("GET", f"chats/{chatId}")
|
chat = await redis.execute("GET", f"chats/{chatId}")
|
||||||
if not chat:
|
if not chat:
|
||||||
return { "error" : "chat not exist" }
|
return {"error": "chat not exist"}
|
||||||
|
|
||||||
message = await redis.execute("GET", f"chats/{chatId}/messages/{id}")
|
message = await redis.execute("GET", f"chats/{chatId}/messages/{id}")
|
||||||
if not message:
|
if not message:
|
||||||
return { "error" : "message not exist" }
|
return {"error": "message not exist"}
|
||||||
|
|
||||||
message = json.loads(message)
|
message = json.loads(message)
|
||||||
if message["author"] != user.slug:
|
if message["author"] != user.slug:
|
||||||
return { "error" : "access denied" }
|
return {"error": "access denied"}
|
||||||
|
|
||||||
message["body"] = body
|
message["body"] = body
|
||||||
message["updatedAt"] = datetime.now().isoformat()
|
message["updatedAt"] = datetime.now().isoformat()
|
||||||
|
|
||||||
await redis.execute("SET", f"chats/{chatId}/messages/{id}", json.dumps(message))
|
await redis.execute("SET", f"chats/{chatId}/messages/{id}", json.dumps(message))
|
||||||
|
|
||||||
result = MessageResult("UPDATED", message)
|
result = MessageResult("UPDATED", message)
|
||||||
await MessagesStorage.put(result)
|
await MessagesStorage.put(result)
|
||||||
|
|
||||||
|
return {"message": message}
|
||||||
|
|
||||||
return {"message" : message}
|
|
||||||
|
|
||||||
@mutation.field("deleteMessage")
|
@mutation.field("deleteMessage")
|
||||||
@login_required
|
@login_required
|
||||||
async def delete_message(_, info, chatId, id):
|
async def delete_message(_, info, chatId, id):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
|
|
||||||
chat = await redis.execute("GET", f"chats/{chatId}")
|
chat = await redis.execute("GET", f"chats/{chatId}")
|
||||||
if not chat:
|
if not chat:
|
||||||
return { "error" : "chat not exist" }
|
return {"error": "chat not exist"}
|
||||||
|
|
||||||
message = await redis.execute("GET", f"chats/{chatId}/messages/{id}")
|
message = await redis.execute("GET", f"chats/{chatId}/messages/{id}")
|
||||||
if not message:
|
if not message:
|
||||||
return { "error" : "message not exist" }
|
return {"error": "message not exist"}
|
||||||
message = json.loads(message)
|
message = json.loads(message)
|
||||||
if message["author"] != user.slug:
|
if message["author"] != user.slug:
|
||||||
return { "error" : "access denied" }
|
return {"error": "access denied"}
|
||||||
|
|
||||||
await redis.execute("LREM", f"chats/{chatId}/message_ids", 0, str(id))
|
await redis.execute("LREM", f"chats/{chatId}/message_ids", 0, str(id))
|
||||||
await redis.execute("DEL", f"chats/{chatId}/messages/{id}")
|
await redis.execute("DEL", f"chats/{chatId}/messages/{id}")
|
||||||
|
|
||||||
chat = json.loads(chat)
|
chat = json.loads(chat)
|
||||||
users = chat["users"]
|
users = chat["users"]
|
||||||
for user_slug in users:
|
for user_slug in users:
|
||||||
await redis.execute("LREM", f"chats/{chatId}/unread/{user_slug}", 0, str(id))
|
await redis.execute("LREM", f"chats/{chatId}/unread/{user_slug}", 0, str(id))
|
||||||
|
|
||||||
result = MessageResult("DELETED", message)
|
result = MessageResult("DELETED", message)
|
||||||
await MessagesStorage.put(result)
|
await MessagesStorage.put(result)
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@mutation.field("markAsRead")
|
@mutation.field("markAsRead")
|
||||||
@login_required
|
@login_required
|
||||||
async def mark_as_read(_, info, chatId, ids):
|
async def mark_as_read(_, info, chatId, ids):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
|
|
||||||
chat = await redis.execute("GET", f"chats/{chatId}")
|
chat = await redis.execute("GET", f"chats/{chatId}")
|
||||||
if not chat:
|
if not chat:
|
||||||
return { "error" : "chat not exist" }
|
return {"error": "chat not exist"}
|
||||||
|
|
||||||
chat = json.loads(chat)
|
chat = json.loads(chat)
|
||||||
users = set(chat["users"])
|
users = set(chat["users"])
|
||||||
if not user.slug in users:
|
if not user.slug in users:
|
||||||
return { "error" : "access denied" }
|
return {"error": "access denied"}
|
||||||
|
|
||||||
for id in ids:
|
for id in ids:
|
||||||
await redis.execute("LREM", f"chats/{chatId}/unread/{user.slug}", 0, str(id))
|
await redis.execute("LREM", f"chats/{chatId}/unread/{user.slug}", 0, str(id))
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@subscription.source("chatUpdated")
|
@subscription.source("chatUpdated")
|
||||||
@login_required
|
@login_required
|
||||||
async def message_generator(obj, info, chatId):
|
async def message_generator(obj, info, chatId):
|
||||||
try:
|
try:
|
||||||
following_chat = ChatFollowing(chatId)
|
following_chat = ChatFollowing(chatId)
|
||||||
await MessagesStorage.register_chat(following_chat)
|
await MessagesStorage.register_chat(following_chat)
|
||||||
while True:
|
while True:
|
||||||
msg = await following_chat.queue.get()
|
msg = await following_chat.queue.get()
|
||||||
yield msg
|
yield msg
|
||||||
finally:
|
finally:
|
||||||
await MessagesStorage.remove_chat(following_chat)
|
await MessagesStorage.remove_chat(following_chat)
|
||||||
|
|
||||||
|
|
||||||
@subscription.field("chatUpdated")
|
@subscription.field("chatUpdated")
|
||||||
def message_resolver(message, info, chatId):
|
def message_resolver(message, info, chatId):
|
||||||
return message
|
return message
|
||||||
|
|
|
@ -14,154 +14,176 @@ from sqlalchemy import and_, desc
|
||||||
from sqlalchemy.orm import selectinload
|
from sqlalchemy.orm import selectinload
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
@query.field("userReactedShouts")
|
@query.field("userReactedShouts")
|
||||||
async def get_user_reacted_shouts(_, info, slug, page, size) -> List[Shout]:
|
async def get_user_reacted_shouts(_, info, slug, page, size) -> List[Shout]:
|
||||||
user = await UserStorage.get_user_by_slug(slug)
|
user = await UserStorage.get_user_by_slug(slug)
|
||||||
if not user: return {}
|
if not user:
|
||||||
with local_session() as session:
|
return []
|
||||||
shouts = session.query(Shout).\
|
with local_session() as session:
|
||||||
join(Reaction).\
|
shouts = (
|
||||||
where(Reaction.createdBy == user.slug).\
|
session.query(Shout)
|
||||||
order_by(desc(Reaction.createdAt)).\
|
.join(Reaction)
|
||||||
limit(size).\
|
.where(Reaction.createdBy == user.slug)
|
||||||
offset(page * size).all()
|
.order_by(desc(Reaction.createdAt))
|
||||||
return shouts
|
.limit(size)
|
||||||
|
.offset(page * size)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field("userFollowedTopics")
|
@query.field("userFollowedTopics")
|
||||||
@login_required
|
@login_required
|
||||||
def get_followed_topics(_, slug) -> List[Topic]:
|
def get_followed_topics(_, slug) -> List[Topic]:
|
||||||
rows = []
|
rows = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
rows = session.query(Topic).\
|
rows = (
|
||||||
join(TopicFollower).\
|
session.query(Topic)
|
||||||
where(TopicFollower.follower == slug).\
|
.join(TopicFollower)
|
||||||
all()
|
.where(TopicFollower.follower == slug)
|
||||||
return rows
|
.all()
|
||||||
|
)
|
||||||
|
return rows
|
||||||
|
|
||||||
|
|
||||||
@query.field("userFollowedAuthors")
|
@query.field("userFollowedAuthors")
|
||||||
def get_followed_authors(_, slug) -> List[User]:
|
def get_followed_authors(_, slug) -> List[User]:
|
||||||
authors = []
|
authors = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
authors = session.query(User).\
|
authors = (
|
||||||
join(AuthorFollower, User.slug == AuthorFollower.author).\
|
session.query(User)
|
||||||
where(AuthorFollower.follower == slug).\
|
.join(AuthorFollower, User.slug == AuthorFollower.author)
|
||||||
all()
|
.where(AuthorFollower.follower == slug)
|
||||||
return authors
|
.all()
|
||||||
|
)
|
||||||
|
return authors
|
||||||
|
|
||||||
|
|
||||||
@query.field("userFollowers")
|
@query.field("userFollowers")
|
||||||
async def user_followers(_, slug) -> List[User]:
|
async def user_followers(_, slug) -> List[User]:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
users = session.query(User).\
|
users = (
|
||||||
join(AuthorFollower, User.slug == AuthorFollower.follower).\
|
session.query(User)
|
||||||
where(AuthorFollower.author == slug).\
|
.join(AuthorFollower, User.slug == AuthorFollower.follower)
|
||||||
all()
|
.where(AuthorFollower.author == slug)
|
||||||
return users
|
.all()
|
||||||
|
)
|
||||||
|
return users
|
||||||
|
|
||||||
|
|
||||||
# for mutation.field("refreshSession")
|
# for mutation.field("refreshSession")
|
||||||
async def get_user_info(slug):
|
async def get_user_info(slug):
|
||||||
return {
|
return {
|
||||||
"unread": await get_unread_counter(slug),
|
"unread": await get_unread_counter(slug),
|
||||||
"topics": [t.slug for t in get_followed_topics(0, slug)],
|
"topics": [t.slug for t in get_followed_topics(0, slug)],
|
||||||
"authors": [a.slug for a in get_followed_authors(0, slug)],
|
"authors": [a.slug for a in get_followed_authors(0, slug)],
|
||||||
"reactions": [r.shout for r in get_shout_reactions(0, slug)],
|
"reactions": [r.shout for r in get_shout_reactions(0, slug)],
|
||||||
"communities": [c.slug for c in get_followed_communities(0, slug)]
|
"communities": [c.slug for c in get_followed_communities(0, slug)],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("refreshSession")
|
@mutation.field("refreshSession")
|
||||||
@login_required
|
@login_required
|
||||||
async def get_current_user(_, info):
|
async def get_current_user(_, info):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user.lastSeen = datetime.now()
|
user.lastSeen = datetime.now()
|
||||||
user.save()
|
user.save()
|
||||||
session.commit()
|
session.commit()
|
||||||
return {
|
return {
|
||||||
"token": "", # same token?
|
"token": "", # same token?
|
||||||
"user": user,
|
"user": user,
|
||||||
"info": await get_user_info(user.slug)
|
"info": await get_user_info(user.slug),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@query.field("getUsersBySlugs")
|
@query.field("getUsersBySlugs")
|
||||||
async def get_users_by_slugs(_, info, slugs):
|
async def get_users_by_slugs(_, info, slugs):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
users = session.query(User).\
|
users = (
|
||||||
options(selectinload(User.ratings)).\
|
session.query(User)
|
||||||
filter(User.slug.in_(slugs)).all()
|
.options(selectinload(User.ratings))
|
||||||
return users
|
.filter(User.slug in slugs)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
return users
|
||||||
|
|
||||||
|
|
||||||
@query.field("getUserRoles")
|
@query.field("getUserRoles")
|
||||||
async def get_user_roles(_, info, slug):
|
async def get_user_roles(_, info, slug):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).where(User.slug == slug).first()
|
user = session.query(User).where(User.slug == slug).first()
|
||||||
roles = session.query(Role).\
|
roles = (
|
||||||
options(selectinload(Role.permissions)).\
|
session.query(Role)
|
||||||
join(UserRole).\
|
.options(selectinload(Role.permissions))
|
||||||
where(UserRole.user_id == user.id).all()
|
.join(UserRole)
|
||||||
return roles
|
.where(UserRole.user_id == user.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
return roles
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("updateProfile")
|
@mutation.field("updateProfile")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_profile(_, info, profile):
|
async def update_profile(_, info, profile):
|
||||||
auth = info.context["request"].auth
|
auth = info.context["request"].auth
|
||||||
user_id = auth.user_id
|
user_id = auth.user_id
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
user = session.query(User).filter(User.id == user_id).first()
|
user = session.query(User).filter(User.id == user_id).first()
|
||||||
user.update(profile)
|
if user:
|
||||||
session.commit()
|
User.update(user, **profile)
|
||||||
return {}
|
session.commit()
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("rateUser")
|
@mutation.field("rateUser")
|
||||||
@login_required
|
@login_required
|
||||||
async def rate_user(_, info, slug, value):
|
async def rate_user(_, info, slug, value):
|
||||||
user = info.context["request"].user
|
user = info.context["request"].user
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
rating = session.query(UserRating).\
|
rating = (
|
||||||
filter(and_(UserRating.rater == user.slug, UserRating.user == slug)).\
|
session.query(UserRating)
|
||||||
first()
|
.filter(and_(UserRating.rater == user.slug, UserRating.user == slug))
|
||||||
if rating:
|
.first()
|
||||||
rating.value = value
|
)
|
||||||
session.commit()
|
if rating:
|
||||||
return {}
|
rating.value = value
|
||||||
try:
|
session.commit()
|
||||||
UserRating.create(
|
return {}
|
||||||
rater=user.slug,
|
try:
|
||||||
user=slug,
|
UserRating.create(rater=user.slug, user=slug, value=value)
|
||||||
value=value
|
except Exception as err:
|
||||||
)
|
return {"error": err}
|
||||||
except Exception as err:
|
return {}
|
||||||
return {"error": err}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# for mutation.field("follow")
|
# for mutation.field("follow")
|
||||||
def author_follow(user, slug):
|
def author_follow(user, slug):
|
||||||
AuthorFollower.create(
|
AuthorFollower.create(follower=user.slug, author=slug)
|
||||||
follower=user.slug,
|
|
||||||
author=slug
|
|
||||||
)
|
|
||||||
|
|
||||||
# for mutation.field("unfollow")
|
# for mutation.field("unfollow")
|
||||||
def author_unfollow(user, slug):
|
def author_unfollow(user, slug):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
flw = session.query(AuthorFollower).\
|
flw = (
|
||||||
filter(and_(AuthorFollower.follower == user.slug, AuthorFollower.author == slug)).\
|
session.query(AuthorFollower)
|
||||||
first()
|
.filter(
|
||||||
if not flw:
|
and_(
|
||||||
raise Exception("[resolvers.profile] follower not exist, cant unfollow")
|
AuthorFollower.follower == user.slug, AuthorFollower.author == slug
|
||||||
else:
|
)
|
||||||
session.delete(flw)
|
)
|
||||||
session.commit()
|
.first()
|
||||||
|
)
|
||||||
|
if not flw:
|
||||||
|
raise Exception("[resolvers.profile] follower not exist, cant unfollow")
|
||||||
|
else:
|
||||||
|
session.delete(flw)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
@query.field("authorsAll")
|
@query.field("authorsAll")
|
||||||
def get_authors_all(_, info, page, size):
|
def get_authors_all(_, info, page, size):
|
||||||
end = page * size
|
end = page * size
|
||||||
start = end - size
|
start = end - size
|
||||||
return UserStorage.get_all_users()[start:end]
|
return list(UserStorage.get_all_users())[start:end] # type: ignore
|
||||||
|
|
||||||
|
|
|
@ -10,11 +10,17 @@ from datetime import datetime
|
||||||
from services.auth.users import UserStorage
|
from services.auth.users import UserStorage
|
||||||
from services.stat.reacted import ReactedStorage
|
from services.stat.reacted import ReactedStorage
|
||||||
|
|
||||||
|
|
||||||
def reactions_follow(user, slug, auto=False):
|
def reactions_follow(user, slug, auto=False):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
fw = session.query(ShoutReactionsFollower).\
|
fw = (
|
||||||
filter(ShoutReactionsFollower.follower == user.slug, ShoutReactionsFollower.shout == slug).\
|
session.query(ShoutReactionsFollower)
|
||||||
first()
|
.filter(
|
||||||
|
ShoutReactionsFollower.follower == user.slug,
|
||||||
|
ShoutReactionsFollower.shout == slug,
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
if auto and fw:
|
if auto and fw:
|
||||||
return
|
return
|
||||||
elif not auto and fw:
|
elif not auto and fw:
|
||||||
|
@ -25,17 +31,19 @@ def reactions_follow(user, slug, auto=False):
|
||||||
return
|
return
|
||||||
# print("[resolvers.reactions] was followed before")
|
# print("[resolvers.reactions] was followed before")
|
||||||
|
|
||||||
ShoutReactionsFollower.create(
|
ShoutReactionsFollower.create(follower=user.slug, shout=slug, auto=auto)
|
||||||
follower=user.slug,
|
|
||||||
shout=slug,
|
|
||||||
auto=auto)
|
|
||||||
|
|
||||||
|
|
||||||
def reactions_unfollow(user, slug):
|
def reactions_unfollow(user, slug):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
following = session.query(ShoutReactionsFollower).\
|
following = (
|
||||||
filter(ShoutReactionsFollower.follower == user.slug, ShoutReactionsFollower.shout == slug).\
|
session.query(ShoutReactionsFollower)
|
||||||
first()
|
.filter(
|
||||||
|
ShoutReactionsFollower.follower == user.slug,
|
||||||
|
ShoutReactionsFollower.shout == slug,
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
if not following:
|
if not following:
|
||||||
# print("[resolvers.reactions] was not followed", slug)
|
# print("[resolvers.reactions] was not followed", slug)
|
||||||
return
|
return
|
||||||
|
@ -56,7 +64,7 @@ async def create_reaction(_, info, inp):
|
||||||
reaction = Reaction.create(**inp)
|
reaction = Reaction.create(**inp)
|
||||||
ReactedStorage.increment(reaction.shout, reaction.replyTo)
|
ReactedStorage.increment(reaction.shout, reaction.replyTo)
|
||||||
try:
|
try:
|
||||||
reactions_follow(user, inp['shout'], True)
|
reactions_follow(user, inp["shout"], True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
|
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
|
||||||
|
|
||||||
|
@ -76,13 +84,13 @@ async def update_reaction(_, info, inp):
|
||||||
return {"error": "invalid reaction id"}
|
return {"error": "invalid reaction id"}
|
||||||
if reaction.createdBy != user.slug:
|
if reaction.createdBy != user.slug:
|
||||||
return {"error": "access denied"}
|
return {"error": "access denied"}
|
||||||
reaction.body = inp['body']
|
reaction.body = inp["body"]
|
||||||
reaction.updatedAt = datetime.now()
|
reaction.updatedAt = datetime.now()
|
||||||
if reaction.kind != inp['kind']:
|
if reaction.kind != inp["kind"]:
|
||||||
# NOTE: change mind detection can be here
|
# NOTE: change mind detection can be here
|
||||||
pass
|
pass
|
||||||
if inp.get('range'):
|
if inp.get("range"):
|
||||||
reaction.range = inp.get('range')
|
reaction.range = inp.get("range")
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
return {"reaction": reaction}
|
return {"reaction": reaction}
|
||||||
|
@ -104,29 +112,39 @@ async def delete_reaction(_, info, id):
|
||||||
session.commit()
|
session.commit()
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
@query.field("reactionsByShout")
|
@query.field("reactionsByShout")
|
||||||
async def get_shout_reactions(_, info, slug, page, size):
|
async def get_shout_reactions(_, info, slug, page, size):
|
||||||
offset = page * size
|
offset = page * size
|
||||||
reactions = []
|
reactions = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
reactions = session.query(Reaction).\
|
reactions = (
|
||||||
filter(Reaction.shout == slug).\
|
session.query(Reaction)
|
||||||
limit(size).offset(offset).all()
|
.filter(Reaction.shout == slug)
|
||||||
|
.limit(size)
|
||||||
|
.offset(offset)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
for r in reactions:
|
for r in reactions:
|
||||||
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
|
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
|
||||||
return reactions
|
return reactions
|
||||||
|
|
||||||
|
|
||||||
@query.field("reactionsForSlugs")
|
@query.field("reactionsForSlugs")
|
||||||
async def get_shout_reactions(_, info, slugs, page, size):
|
async def get_shout_reactions(_, info, slugs, page, size):
|
||||||
offset = page * size
|
offset = page * size
|
||||||
reactions = []
|
reactions = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
for slug in slugs:
|
for slug in slugs:
|
||||||
reactions += session.query(Reaction).\
|
reactions += (
|
||||||
filter(Reaction.shout == slug).\
|
session.query(Reaction)
|
||||||
limit(size).offset(offset).all()
|
.filter(Reaction.shout == slug)
|
||||||
|
.limit(size)
|
||||||
|
.offset(offset)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
for r in reactions:
|
for r in reactions:
|
||||||
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
|
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
|
||||||
return reactions
|
return reactions
|
||||||
|
|
||||||
|
|
||||||
|
@ -135,22 +153,31 @@ async def get_all_reactions(_, info, page=1, size=10):
|
||||||
offset = page * size
|
offset = page * size
|
||||||
reactions = []
|
reactions = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
reactions = session.query(Reaction).\
|
reactions = (
|
||||||
filter(Reaction.deletedAt == None).\
|
session.query(Reaction)
|
||||||
order_by(desc("createdAt")).\
|
.filter(Reaction.deletedAt == None)
|
||||||
offset(offset).limit(size)
|
.order_by(desc("createdAt"))
|
||||||
|
.offset(offset)
|
||||||
|
.limit(size)
|
||||||
|
)
|
||||||
for r in reactions:
|
for r in reactions:
|
||||||
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
|
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
|
||||||
reactions = list(reactions)
|
reactions = list(reactions)
|
||||||
reactions.sort(key=lambda x: x.createdAt, reverse=True)
|
reactions.sort(key=lambda x: x.createdAt, reverse=True)
|
||||||
return reactions
|
return reactions
|
||||||
|
|
||||||
|
|
||||||
@query.field("reactionsByAuthor")
|
@query.field("reactionsByAuthor")
|
||||||
async def get_reactions_by_author(_, info, slug, page=1, size=50):
|
async def get_reactions_by_author(_, info, slug, page=1, size=50):
|
||||||
offset = page * size
|
offset = page * size
|
||||||
reactions = []
|
reactions = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
reactions = session.query(Reaction).filter(Reaction.createdBy == slug).limit(size).offset(offset)
|
reactions = (
|
||||||
|
session.query(Reaction)
|
||||||
|
.filter(Reaction.createdBy == slug)
|
||||||
|
.limit(size)
|
||||||
|
.offset(offset)
|
||||||
|
)
|
||||||
for r in reactions:
|
for r in reactions:
|
||||||
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
|
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
|
||||||
return reactions
|
return reactions
|
||||||
|
|
|
@ -8,68 +8,76 @@ from base.resolvers import mutation, query
|
||||||
from auth.authenticate import login_required
|
from auth.authenticate import login_required
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
|
|
||||||
|
|
||||||
@query.field("topicsAll")
|
@query.field("topicsAll")
|
||||||
async def topics_all(_, info, page = 1, size = 50):
|
async def topics_all(_, info, page=1, size=50):
|
||||||
topics = await TopicStorage.get_topics_all(page, size)
|
topics = await TopicStorage.get_topics_all(page, size)
|
||||||
for topic in topics:
|
for topic in topics:
|
||||||
topic.stat = await TopicStat.get_stat(topic.slug)
|
topic.stat = await TopicStat.get_stat(topic.slug)
|
||||||
return topics
|
return topics
|
||||||
|
|
||||||
|
|
||||||
@query.field("topicsByCommunity")
|
@query.field("topicsByCommunity")
|
||||||
async def topics_by_community(_, info, community):
|
async def topics_by_community(_, info, community):
|
||||||
topics = await TopicStorage.get_topics_by_community(community)
|
topics = await TopicStorage.get_topics_by_community(community)
|
||||||
for topic in topics:
|
for topic in topics:
|
||||||
topic.stat = await TopicStat.get_stat(topic.slug)
|
topic.stat = await TopicStat.get_stat(topic.slug)
|
||||||
return topics
|
return topics
|
||||||
|
|
||||||
|
|
||||||
@query.field("topicsByAuthor")
|
@query.field("topicsByAuthor")
|
||||||
async def topics_by_author(_, info, author):
|
async def topics_by_author(_, info, author):
|
||||||
slugs = set()
|
slugs = set()
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shouts = session.query(Shout).\
|
shouts = session.query(Shout).filter(Shout.authors.any(User.slug == author))
|
||||||
filter(Shout.authors.any(User.slug == author))
|
for shout in shouts:
|
||||||
for shout in shouts:
|
slugs.update([topic.slug for topic in shout.topics])
|
||||||
slugs.update([topic.slug for topic in shout.topics])
|
return await TopicStorage.get_topics(slugs)
|
||||||
return await TopicStorage.get_topics(slugs)
|
|
||||||
|
|
||||||
@mutation.field("createTopic")
|
@mutation.field("createTopic")
|
||||||
@login_required
|
@login_required
|
||||||
async def create_topic(_, info, input):
|
async def create_topic(_, info, input):
|
||||||
new_topic = Topic.create(**input)
|
new_topic = Topic.create(**input)
|
||||||
await TopicStorage.add_topic(new_topic)
|
await TopicStorage.add_topic(new_topic)
|
||||||
|
|
||||||
|
return {"topic": new_topic}
|
||||||
|
|
||||||
return { "topic" : new_topic }
|
|
||||||
|
|
||||||
@mutation.field("updateTopic")
|
@mutation.field("updateTopic")
|
||||||
@login_required
|
@login_required
|
||||||
async def update_topic(_, info, input):
|
async def update_topic(_, info, input):
|
||||||
slug = input["slug"]
|
slug = input["slug"]
|
||||||
|
|
||||||
session = local_session()
|
session = local_session()
|
||||||
topic = session.query(Topic).filter(Topic.slug == slug).first()
|
topic = session.query(Topic).filter(Topic.slug == slug).first()
|
||||||
|
|
||||||
if not topic:
|
if not topic:
|
||||||
return { "error" : "topic not found" }
|
return {"error": "topic not found"}
|
||||||
|
|
||||||
topic.update(input)
|
topic.update(input)
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
await TopicStorage.add_topic(topic)
|
await TopicStorage.add_topic(topic)
|
||||||
|
|
||||||
|
return {"topic": topic}
|
||||||
|
|
||||||
return { "topic" : topic }
|
|
||||||
|
|
||||||
def topic_follow(user, slug):
|
def topic_follow(user, slug):
|
||||||
TopicFollower.create(
|
TopicFollower.create(follower=user.slug, topic=slug)
|
||||||
follower = user.slug,
|
|
||||||
topic = slug)
|
|
||||||
|
|
||||||
def topic_unfollow(user, slug):
|
def topic_unfollow(user, slug):
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
sub = session.query(TopicFollower).\
|
sub = (
|
||||||
filter(and_(TopicFollower.follower == user.slug, TopicFollower.topic == slug)).\
|
session.query(TopicFollower)
|
||||||
first()
|
.filter(
|
||||||
if not sub:
|
and_(TopicFollower.follower == user.slug, TopicFollower.topic == slug)
|
||||||
raise Exception("[resolvers.topics] follower not exist")
|
)
|
||||||
session.delete(sub)
|
.first()
|
||||||
session.commit()
|
)
|
||||||
|
if not sub:
|
||||||
|
raise Exception("[resolvers.topics] follower not exist")
|
||||||
|
session.delete(sub)
|
||||||
|
session.commit()
|
||||||
|
|
|
@ -13,115 +13,140 @@ from resolvers.reactions import reactions_follow, reactions_unfollow
|
||||||
from auth.authenticate import login_required
|
from auth.authenticate import login_required
|
||||||
from sqlalchemy import select, desc, and_, text
|
from sqlalchemy import select, desc, and_, text
|
||||||
from sqlalchemy.orm import selectinload
|
from sqlalchemy.orm import selectinload
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
@query.field("topViewed")
|
@query.field("topViewed")
|
||||||
async def top_viewed(_, info, page, size):
|
async def top_viewed(_, info, page, size):
|
||||||
async with ShoutsCache.lock:
|
async with ShoutsCache.lock:
|
||||||
return ShoutsCache.top_viewed[(page - 1) * size : page * size]
|
return ShoutsCache.top_viewed[(page - 1) * size : page * size]
|
||||||
|
|
||||||
|
|
||||||
@query.field("topMonth")
|
@query.field("topMonth")
|
||||||
async def top_month(_, info, page, size):
|
async def top_month(_, info, page, size):
|
||||||
async with ShoutsCache.lock:
|
async with ShoutsCache.lock:
|
||||||
return ShoutsCache.top_month[(page - 1) * size : page * size]
|
return ShoutsCache.top_month[(page - 1) * size : page * size]
|
||||||
|
|
||||||
|
|
||||||
@query.field("topOverall")
|
@query.field("topOverall")
|
||||||
async def top_overall(_, info, page, size):
|
async def top_overall(_, info, page, size):
|
||||||
async with ShoutsCache.lock:
|
async with ShoutsCache.lock:
|
||||||
return ShoutsCache.top_overall[(page - 1) * size : page * size]
|
return ShoutsCache.top_overall[(page - 1) * size : page * size]
|
||||||
|
|
||||||
|
|
||||||
@query.field("recentPublished")
|
@query.field("recentPublished")
|
||||||
async def recent_published(_, info, page, size):
|
async def recent_published(_, info, page, size):
|
||||||
async with ShoutsCache.lock:
|
async with ShoutsCache.lock:
|
||||||
return ShoutsCache.recent_published[(page - 1) * size : page * size]
|
return ShoutsCache.recent_published[(page - 1) * size : page * size]
|
||||||
|
|
||||||
|
|
||||||
@query.field("recentAll")
|
@query.field("recentAll")
|
||||||
async def recent_all(_, info, page, size):
|
async def recent_all(_, info, page, size):
|
||||||
async with ShoutsCache.lock:
|
async with ShoutsCache.lock:
|
||||||
return ShoutsCache.recent_all[(page - 1) * size : page * size]
|
return ShoutsCache.recent_all[(page - 1) * size : page * size]
|
||||||
|
|
||||||
|
|
||||||
@query.field("recentReacted")
|
@query.field("recentReacted")
|
||||||
async def recent_reacted(_, info, page, size):
|
async def recent_reacted(_, info, page, size):
|
||||||
async with ShoutsCache.lock:
|
async with ShoutsCache.lock:
|
||||||
return ShoutsCache.recent_reacted[(page - 1) * size : page * size]
|
return ShoutsCache.recent_reacted[(page - 1) * size : page * size]
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("viewShout")
|
@mutation.field("viewShout")
|
||||||
async def view_shout(_, info, slug):
|
async def view_shout(_, info, slug):
|
||||||
await ViewedStorage.inc_shout(slug)
|
await ViewedStorage.inc_shout(slug)
|
||||||
return {"error" : ""}
|
return {"error": ""}
|
||||||
|
|
||||||
|
|
||||||
@query.field("getShoutBySlug")
|
@query.field("getShoutBySlug")
|
||||||
async def get_shout_by_slug(_, info, slug):
|
async def get_shout_by_slug(_, info, slug):
|
||||||
all_fields = [node.name.value for node in info.field_nodes[0].selection_set.selections]
|
all_fields = [
|
||||||
|
node.name.value for node in info.field_nodes[0].selection_set.selections
|
||||||
|
]
|
||||||
selected_fields = set(["authors", "topics"]).intersection(all_fields)
|
selected_fields = set(["authors", "topics"]).intersection(all_fields)
|
||||||
select_options = [selectinload(getattr(Shout, field)) for field in selected_fields]
|
select_options = [selectinload(getattr(Shout, field)) for field in selected_fields]
|
||||||
shout = {}
|
shout = {}
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
try: s = text(open('src/queries/shout-by-slug.sql', 'r').read() % slug)
|
try:
|
||||||
except: pass
|
s = text(open("src/queries/shout-by-slug.sql", "r").read() % slug)
|
||||||
shout = session.query(Shout).\
|
except:
|
||||||
options(select_options).\
|
pass
|
||||||
filter(Shout.slug == slug).first()
|
shout = (
|
||||||
|
session.query(Shout)
|
||||||
|
.options(select_options)
|
||||||
|
.filter(Shout.slug == slug)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
if not shout:
|
if not shout:
|
||||||
print(f"shout with slug {slug} not exist")
|
print(f"shout with slug {slug} not exist")
|
||||||
return {"error" : "shout not found"}
|
return {"error": "shout not found"}
|
||||||
else:
|
else:
|
||||||
for a in shout.authors:
|
for a in shout.authors:
|
||||||
a.caption = await ShoutAuthorStorage.get_author_caption(slug, a.slug)
|
a.caption = await ShoutAuthorStorage.get_author_caption(slug, a.slug)
|
||||||
return shout
|
return shout
|
||||||
|
|
||||||
|
|
||||||
@query.field("shoutsByTopics")
|
@query.field("shoutsByTopics")
|
||||||
async def shouts_by_topics(_, info, slugs, page, size):
|
async def shouts_by_topics(_, info, slugs, page, size):
|
||||||
page = page - 1
|
page = page - 1
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shouts = session.query(Shout).\
|
shouts = (
|
||||||
join(ShoutTopic).\
|
session.query(Shout)
|
||||||
where(and_(ShoutTopic.topic.in_(slugs), Shout.publishedAt != None)).\
|
.join(ShoutTopic)
|
||||||
order_by(desc(Shout.publishedAt)).\
|
.where(and_(ShoutTopic.topic.in_(slugs), Shout.publishedAt != None))
|
||||||
limit(size).\
|
.order_by(desc(Shout.publishedAt))
|
||||||
offset(page * size)
|
.limit(size)
|
||||||
|
.offset(page * size)
|
||||||
|
)
|
||||||
|
|
||||||
for s in shouts:
|
for s in shouts:
|
||||||
for a in s.authors:
|
for a in s.authors:
|
||||||
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
|
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field("shoutsByCollection")
|
@query.field("shoutsByCollection")
|
||||||
async def shouts_by_topics(_, info, collection, page, size):
|
async def shouts_by_topics(_, info, collection, page, size):
|
||||||
page = page - 1
|
page = page - 1
|
||||||
shouts = []
|
shouts = []
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
shouts = session.query(Shout).\
|
shouts = (
|
||||||
join(ShoutCollection, ShoutCollection.collection == collection).\
|
session.query(Shout)
|
||||||
where(and_(ShoutCollection.shout == Shout.slug, Shout.publishedAt != None)).\
|
.join(ShoutCollection, ShoutCollection.collection == collection)
|
||||||
order_by(desc(Shout.publishedAt)).\
|
.where(and_(ShoutCollection.shout == Shout.slug, Shout.publishedAt != None))
|
||||||
limit(size).\
|
.order_by(desc(Shout.publishedAt))
|
||||||
offset(page * size)
|
.limit(size)
|
||||||
|
.offset(page * size)
|
||||||
|
)
|
||||||
for s in shouts:
|
for s in shouts:
|
||||||
for a in s.authors:
|
for a in s.authors:
|
||||||
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
|
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@query.field("shoutsByAuthors")
|
@query.field("shoutsByAuthors")
|
||||||
async def shouts_by_authors(_, info, slugs, page, size):
|
async def shouts_by_authors(_, info, slugs, page, size):
|
||||||
page = page - 1
|
page = page - 1
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
|
|
||||||
shouts = session.query(Shout).\
|
shouts = (
|
||||||
join(ShoutAuthor).\
|
session.query(Shout)
|
||||||
where(and_(ShoutAuthor.user.in_(slugs), Shout.publishedAt != None)).\
|
.join(ShoutAuthor)
|
||||||
order_by(desc(Shout.publishedAt)).\
|
.where(and_(ShoutAuthor.user.in_(slugs), Shout.publishedAt != None))
|
||||||
limit(size).\
|
.order_by(desc(Shout.publishedAt))
|
||||||
offset(page * size)
|
.limit(size)
|
||||||
|
.offset(page * size)
|
||||||
|
)
|
||||||
|
|
||||||
for s in shouts:
|
for s in shouts:
|
||||||
for a in s.authors:
|
for a in s.authors:
|
||||||
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
|
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
SINGLE_COMMUNITY = True
|
SINGLE_COMMUNITY = True
|
||||||
|
|
||||||
|
|
||||||
@query.field("shoutsByCommunities")
|
@query.field("shoutsByCommunities")
|
||||||
async def shouts_by_communities(_, info, slugs, page, size):
|
async def shouts_by_communities(_, info, slugs, page, size):
|
||||||
if SINGLE_COMMUNITY:
|
if SINGLE_COMMUNITY:
|
||||||
|
@ -129,22 +154,30 @@ async def shouts_by_communities(_, info, slugs, page, size):
|
||||||
else:
|
else:
|
||||||
page = page - 1
|
page = page - 1
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
#TODO fix postgres high load
|
# TODO fix postgres high load
|
||||||
shouts = session.query(Shout).distinct().\
|
shouts = (
|
||||||
join(ShoutTopic).\
|
session.query(Shout)
|
||||||
where(and_(Shout.publishedAt != None,\
|
.distinct()
|
||||||
ShoutTopic.topic.in_(\
|
.join(ShoutTopic)
|
||||||
select(Topic.slug).where(Topic.community.in_(slugs))\
|
.where(
|
||||||
))).\
|
and_(
|
||||||
order_by(desc(Shout.publishedAt)).\
|
Shout.publishedAt != None,
|
||||||
limit(size).\
|
ShoutTopic.topic.in_(
|
||||||
offset(page * size)
|
select(Topic.slug).where(Topic.community.in_(slugs))
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.order_by(desc(Shout.publishedAt))
|
||||||
|
.limit(size)
|
||||||
|
.offset(page * size)
|
||||||
|
)
|
||||||
|
|
||||||
for s in shouts:
|
for s in shouts:
|
||||||
for a in s.authors:
|
for a in s.authors:
|
||||||
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
|
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
|
||||||
return shouts
|
return shouts
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("follow")
|
@mutation.field("follow")
|
||||||
@login_required
|
@login_required
|
||||||
async def follow(_, info, what, slug):
|
async def follow(_, info, what, slug):
|
||||||
|
@ -159,10 +192,11 @@ async def follow(_, info, what, slug):
|
||||||
elif what == "REACTIONS":
|
elif what == "REACTIONS":
|
||||||
reactions_follow(user, slug)
|
reactions_follow(user, slug)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"error" : str(e)}
|
return {"error": str(e)}
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
@mutation.field("unfollow")
|
@mutation.field("unfollow")
|
||||||
@login_required
|
@login_required
|
||||||
async def unfollow(_, info, what, slug):
|
async def unfollow(_, info, what, slug):
|
||||||
|
@ -178,6 +212,6 @@ async def unfollow(_, info, what, slug):
|
||||||
elif what == "REACTIONS":
|
elif what == "REACTIONS":
|
||||||
reactions_unfollow(user, slug)
|
reactions_unfollow(user, slug)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return {"error" : str(e)}
|
return {"error": str(e)}
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
|
@ -1,35 +1,33 @@
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from sqlalchemy.orm import selectinload
|
from sqlalchemy.orm import selectinload
|
||||||
from orm.rbac import Role
|
from orm.rbac import Role
|
||||||
|
|
||||||
|
|
||||||
class RoleStorage:
|
class RoleStorage:
|
||||||
roles = {}
|
roles = {}
|
||||||
lock = asyncio.Lock()
|
lock = asyncio.Lock()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init(session):
|
def init(session):
|
||||||
self = RoleStorage
|
self = RoleStorage
|
||||||
roles = session.query(Role).\
|
roles = session.query(Role).options(selectinload(Role.permissions)).all()
|
||||||
options(selectinload(Role.permissions)).all()
|
self.roles = dict([(role.id, role) for role in roles])
|
||||||
self.roles = dict([(role.id, role) for role in roles])
|
print("[auth.roles] %d precached" % len(roles))
|
||||||
print('[auth.roles] %d precached' % len(roles))
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_role(id):
|
||||||
|
self = RoleStorage
|
||||||
|
async with self.lock:
|
||||||
|
return self.roles.get(id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_role(id):
|
async def add_role(role):
|
||||||
self = RoleStorage
|
self = RoleStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.roles.get(id)
|
self.roles[id] = role
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def add_role(role):
|
async def del_role(id):
|
||||||
self = RoleStorage
|
self = RoleStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
self.roles[id] = role
|
del self.roles[id]
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def del_role(id):
|
|
||||||
self = RoleStorage
|
|
||||||
async with self.lock:
|
|
||||||
del self.roles[id]
|
|
||||||
|
|
|
@ -4,47 +4,50 @@ from orm.user import User
|
||||||
|
|
||||||
|
|
||||||
class UserStorage:
|
class UserStorage:
|
||||||
users = {}
|
users = {}
|
||||||
lock = asyncio.Lock()
|
lock = asyncio.Lock()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init(session):
|
def init(session):
|
||||||
self = UserStorage
|
self = UserStorage
|
||||||
users = session.query(User).\
|
users = (
|
||||||
options(selectinload(User.roles), selectinload(User.ratings)).all()
|
session.query(User)
|
||||||
self.users = dict([(user.id, user) for user in users])
|
.options(selectinload(User.roles), selectinload(User.ratings))
|
||||||
print('[auth.users] %d precached' % len(self.users))
|
.all()
|
||||||
|
)
|
||||||
|
self.users = dict([(user.id, user) for user in users])
|
||||||
|
print("[auth.users] %d precached" % len(self.users))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_user(id):
|
async def get_user(id):
|
||||||
self = UserStorage
|
self = UserStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.users.get(id)
|
return self.users.get(id)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_all_users():
|
async def get_all_users():
|
||||||
self = UserStorage
|
self = UserStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
aaa = list(self.users.values())
|
aaa = list(self.users.values())
|
||||||
aaa.sort(key=lambda user: user.createdAt)
|
aaa.sort(key=lambda user: user.createdAt)
|
||||||
return aaa
|
return aaa
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_user_by_slug(slug):
|
async def get_user_by_slug(slug):
|
||||||
self = UserStorage
|
self = UserStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
for user in self.users.values():
|
for user in self.users.values():
|
||||||
if user.slug == slug:
|
if user.slug == slug:
|
||||||
return user
|
return user
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def add_user(user):
|
async def add_user(user):
|
||||||
self = UserStorage
|
self = UserStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
self.users[user.id] = user
|
self.users[user.id] = user
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def del_user(id):
|
async def del_user(id):
|
||||||
self = UserStorage
|
self = UserStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
del self.users[id]
|
del self.users[id]
|
||||||
|
|
|
@ -2,180 +2,209 @@ import asyncio
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from sqlalchemy.types import Enum
|
from sqlalchemy.types import Enum
|
||||||
from sqlalchemy import Column, DateTime, ForeignKey, Boolean
|
from sqlalchemy import Column, DateTime, ForeignKey, Boolean
|
||||||
|
|
||||||
# from sqlalchemy.orm.attributes import flag_modified
|
# from sqlalchemy.orm.attributes import flag_modified
|
||||||
from sqlalchemy import Enum
|
from sqlalchemy import Enum
|
||||||
import enum
|
import enum
|
||||||
from base.orm import Base, local_session
|
from base.orm import Base, local_session
|
||||||
from orm.topic import ShoutTopic
|
from orm.topic import ShoutTopic
|
||||||
|
|
||||||
|
|
||||||
class ReactionKind(enum.Enum):
|
class ReactionKind(enum.Enum):
|
||||||
AGREE = 1 # +1
|
AGREE = 1 # +1
|
||||||
DISAGREE = 2 # -1
|
DISAGREE = 2 # -1
|
||||||
PROOF = 3 # +1
|
PROOF = 3 # +1
|
||||||
DISPROOF = 4 # -1
|
DISPROOF = 4 # -1
|
||||||
ASK = 5 # +0 bookmark
|
ASK = 5 # +0 bookmark
|
||||||
PROPOSE = 6 # +0
|
PROPOSE = 6 # +0
|
||||||
QUOTE = 7 # +0 bookmark
|
QUOTE = 7 # +0 bookmark
|
||||||
COMMENT = 8 # +0
|
COMMENT = 8 # +0
|
||||||
ACCEPT = 9 # +1
|
ACCEPT = 9 # +1
|
||||||
REJECT = 0 # -1
|
REJECT = 0 # -1
|
||||||
LIKE = 11 # +1
|
LIKE = 11 # +1
|
||||||
DISLIKE = 12 # -1
|
DISLIKE = 12 # -1
|
||||||
# TYPE = <reaction index> # rating diff
|
# TYPE = <reaction index> # rating diff
|
||||||
|
|
||||||
|
|
||||||
def kind_to_rate(kind) -> int:
|
def kind_to_rate(kind) -> int:
|
||||||
if kind in [
|
if kind in [
|
||||||
ReactionKind.AGREE,
|
ReactionKind.AGREE,
|
||||||
ReactionKind.LIKE,
|
ReactionKind.LIKE,
|
||||||
ReactionKind.PROOF,
|
ReactionKind.PROOF,
|
||||||
ReactionKind.ACCEPT
|
ReactionKind.ACCEPT,
|
||||||
]: return 1
|
]:
|
||||||
elif kind in [
|
return 1
|
||||||
ReactionKind.DISAGREE,
|
elif kind in [
|
||||||
ReactionKind.DISLIKE,
|
ReactionKind.DISAGREE,
|
||||||
ReactionKind.DISPROOF,
|
ReactionKind.DISLIKE,
|
||||||
ReactionKind.REJECT
|
ReactionKind.DISPROOF,
|
||||||
]: return -1
|
ReactionKind.REJECT,
|
||||||
else: return 0
|
]:
|
||||||
|
return -1
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
class ReactedByDay(Base):
|
class ReactedByDay(Base):
|
||||||
__tablename__ = "reacted_by_day"
|
__tablename__ = "reacted_by_day"
|
||||||
|
|
||||||
id = None
|
id = None
|
||||||
reaction = Column(ForeignKey("reaction.id"), primary_key = True)
|
reaction = Column(ForeignKey("reaction.id"), primary_key=True)
|
||||||
shout = Column(ForeignKey('shout.slug'), primary_key=True)
|
shout = Column(ForeignKey("shout.slug"), primary_key=True)
|
||||||
replyTo = Column(ForeignKey('reaction.id'), nullable=True)
|
replyTo = Column(ForeignKey("reaction.id"), nullable=True)
|
||||||
kind: int = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
|
kind: int = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
|
||||||
day = Column(DateTime, primary_key=True, default=datetime.now)
|
day = Column(DateTime, primary_key=True, default=datetime.now)
|
||||||
comment = Column(Boolean, default=False)
|
comment = Column(Boolean, default=False)
|
||||||
|
|
||||||
|
|
||||||
class ReactedStorage:
|
class ReactedStorage:
|
||||||
reacted = {
|
reacted = {"shouts": {}, "topics": {}, "reactions": {}}
|
||||||
'shouts': {},
|
rating = {"shouts": {}, "topics": {}, "reactions": {}}
|
||||||
'topics': {},
|
reactions = []
|
||||||
'reactions': {}
|
to_flush = []
|
||||||
}
|
period = 30 * 60 # sec
|
||||||
rating = {
|
lock = asyncio.Lock()
|
||||||
'shouts': {},
|
|
||||||
'topics': {},
|
|
||||||
'reactions': {}
|
|
||||||
}
|
|
||||||
reactions = []
|
|
||||||
to_flush = []
|
|
||||||
period = 30*60 # sec
|
|
||||||
lock = asyncio.Lock()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_shout(shout_slug):
|
async def get_shout(shout_slug):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.reacted['shouts'].get(shout_slug, [])
|
return self.reacted["shouts"].get(shout_slug, [])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_topic(topic_slug):
|
async def get_topic(topic_slug):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.reacted['topics'].get(topic_slug, [])
|
return self.reacted["topics"].get(topic_slug, [])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_comments(shout_slug):
|
async def get_comments(shout_slug):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return list(filter(lambda r: r.comment, self.reacted['shouts'].get(shout_slug, [])))
|
return list(
|
||||||
|
filter(lambda r: r.comment, self.reacted["shouts"].get(shout_slug, []))
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_topic_comments(topic_slug):
|
async def get_topic_comments(topic_slug):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return list(filter(lambda r: r.comment, self.reacted['topics'].get(topic_slug, [])))
|
return list(
|
||||||
|
filter(lambda r: r.comment, self.reacted["topics"].get(topic_slug, []))
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_reaction_comments(reaction_id):
|
async def get_reaction_comments(reaction_id):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return list(filter(lambda r: r.comment, self.reacted['reactions'].get(reaction_id)))
|
return list(
|
||||||
|
filter(lambda r: r.comment, self.reacted["reactions"].get(reaction_id))
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_reaction(reaction_id):
|
async def get_reaction(reaction_id):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.reacted['reactions'].get(reaction_id, [])
|
return self.reacted["reactions"].get(reaction_id, [])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_rating(shout_slug):
|
async def get_rating(shout_slug):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
rating = 0
|
rating = 0
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
for r in self.reacted['shouts'].get(shout_slug, []):
|
for r in self.reacted["shouts"].get(shout_slug, []):
|
||||||
rating = rating + kind_to_rate(r.kind)
|
rating = rating + kind_to_rate(r.kind)
|
||||||
return rating
|
return rating
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_topic_rating(topic_slug):
|
async def get_topic_rating(topic_slug):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
rating = 0
|
rating = 0
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
for r in self.reacted['topics'].get(topic_slug, []):
|
for r in self.reacted["topics"].get(topic_slug, []):
|
||||||
rating = rating + kind_to_rate(r.kind)
|
rating = rating + kind_to_rate(r.kind)
|
||||||
return rating
|
return rating
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_reaction_rating(reaction_id):
|
async def get_reaction_rating(reaction_id):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
rating = 0
|
rating = 0
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
for r in self.reacted['reactions'].get(reaction_id, []):
|
for r in self.reacted["reactions"].get(reaction_id, []):
|
||||||
rating = rating + kind_to_rate(r.kind)
|
rating = rating + kind_to_rate(r.kind)
|
||||||
return rating
|
return rating
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def increment(reaction):
|
async def increment(reaction):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
r = {
|
r = {
|
||||||
"day": datetime.now().replace(hour=0, minute=0, second=0, microsecond=0),
|
"day": datetime.now().replace(
|
||||||
"reaction": reaction.id,
|
hour=0, minute=0, second=0, microsecond=0
|
||||||
"kind": reaction.kind,
|
),
|
||||||
"shout": reaction.shout
|
"reaction": reaction.id,
|
||||||
}
|
"kind": reaction.kind,
|
||||||
if reaction.replyTo: r['replyTo'] = reaction.replyTo
|
"shout": reaction.shout,
|
||||||
if reaction.body: r['comment'] = True
|
}
|
||||||
reaction = ReactedByDay.create(**r)
|
if reaction.replyTo:
|
||||||
self.reacted['shouts'][reaction.shout] = self.reacted['shouts'].get(reaction.shout, [])
|
r["replyTo"] = reaction.replyTo
|
||||||
self.reacted['shouts'][reaction.shout].append(reaction)
|
if reaction.body:
|
||||||
if reaction.replyTo:
|
r["comment"] = True
|
||||||
self.reacted['reaction'][reaction.replyTo] = self.reacted['reactions'].get(reaction.shout, [])
|
reaction = ReactedByDay.create(**r)
|
||||||
self.reacted['reaction'][reaction.replyTo].append(reaction)
|
self.reacted["shouts"][reaction.shout] = self.reacted["shouts"].get(
|
||||||
self.rating['reactions'][reaction.replyTo] = self.rating['reactions'].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
|
reaction.shout, []
|
||||||
else:
|
)
|
||||||
self.rating['shouts'][reaction.replyTo] = self.rating['shouts'].get(reaction.shout, 0) + kind_to_rate(reaction.kind)
|
self.reacted["shouts"][reaction.shout].append(reaction)
|
||||||
|
if reaction.replyTo:
|
||||||
|
self.reacted["reaction"][reaction.replyTo] = self.reacted[
|
||||||
|
"reactions"
|
||||||
|
].get(reaction.shout, [])
|
||||||
|
self.reacted["reaction"][reaction.replyTo].append(reaction)
|
||||||
|
self.rating["reactions"][reaction.replyTo] = self.rating[
|
||||||
|
"reactions"
|
||||||
|
].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
|
||||||
|
else:
|
||||||
|
self.rating["shouts"][reaction.replyTo] = self.rating["shouts"].get(
|
||||||
|
reaction.shout, 0
|
||||||
|
) + kind_to_rate(reaction.kind)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init(session):
|
def init(session):
|
||||||
self = ReactedStorage
|
self = ReactedStorage
|
||||||
all_reactions = session.query(ReactedByDay).all()
|
all_reactions = session.query(ReactedByDay).all()
|
||||||
print('[stat.reacted] %d reactions total' % len(all_reactions))
|
print("[stat.reacted] %d reactions total" % len(all_reactions))
|
||||||
for reaction in all_reactions:
|
for reaction in all_reactions:
|
||||||
shout = reaction.shout
|
shout = reaction.shout
|
||||||
topics = session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout).all()
|
topics = (
|
||||||
kind = reaction.kind
|
session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout).all()
|
||||||
|
)
|
||||||
|
kind = reaction.kind
|
||||||
|
|
||||||
self.reacted['shouts'][shout] = self.reacted['shouts'].get(shout, [])
|
self.reacted["shouts"][shout] = self.reacted["shouts"].get(shout, [])
|
||||||
self.reacted['shouts'][shout].append(reaction)
|
self.reacted["shouts"][shout].append(reaction)
|
||||||
self.rating['shouts'][shout] = self.rating['shouts'].get(shout, 0) + kind_to_rate(kind)
|
self.rating["shouts"][shout] = self.rating["shouts"].get(
|
||||||
|
shout, 0
|
||||||
|
) + kind_to_rate(kind)
|
||||||
|
|
||||||
for t in topics:
|
for t in topics:
|
||||||
self.reacted['topics'][t] = self.reacted['topics'].get(t, [])
|
self.reacted["topics"][t] = self.reacted["topics"].get(t, [])
|
||||||
self.reacted['topics'][t].append(reaction)
|
self.reacted["topics"][t].append(reaction)
|
||||||
self.rating['topics'][t] = self.rating['topics'].get(t, 0) + kind_to_rate(kind) # rating
|
self.rating["topics"][t] = self.rating["topics"].get(
|
||||||
|
t, 0
|
||||||
|
) + kind_to_rate(
|
||||||
|
kind
|
||||||
|
) # rating
|
||||||
|
|
||||||
if reaction.replyTo:
|
if reaction.replyTo:
|
||||||
self.reacted['reactions'][reaction.replyTo] = self.reacted['reactions'].get(reaction.replyTo, [])
|
self.reacted["reactions"][reaction.replyTo] = self.reacted[
|
||||||
self.reacted['reactions'][reaction.replyTo].append(reaction)
|
"reactions"
|
||||||
self.rating['reactions'][reaction.replyTo] = self.rating['reactions'].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
|
].get(reaction.replyTo, [])
|
||||||
ttt = self.reacted['topics'].values()
|
self.reacted["reactions"][reaction.replyTo].append(reaction)
|
||||||
print('[stat.reacted] %d topics reacted' % len(ttt))
|
self.rating["reactions"][reaction.replyTo] = self.rating[
|
||||||
print('[stat.reacted] %d shouts reacted' % len(self.reacted['shouts']))
|
"reactions"
|
||||||
print('[stat.reacted] %d reactions reacted' % len(self.reacted['reactions']))
|
].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
|
||||||
|
ttt = self.reacted["topics"].values()
|
||||||
|
print("[stat.reacted] %d topics reacted" % len(ttt))
|
||||||
|
print("[stat.reacted] %d shouts reacted" % len(self.reacted["shouts"]))
|
||||||
|
print("[stat.reacted] %d reactions reacted" % len(self.reacted["reactions"]))
|
||||||
|
|
|
@ -5,81 +5,84 @@ from services.stat.viewed import ViewedStorage
|
||||||
from services.zine.shoutauthor import ShoutAuthorStorage
|
from services.zine.shoutauthor import ShoutAuthorStorage
|
||||||
from orm.topic import ShoutTopic, TopicFollower
|
from orm.topic import ShoutTopic, TopicFollower
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
|
|
||||||
class TopicStat:
|
class TopicStat:
|
||||||
shouts_by_topic = {}
|
shouts_by_topic = {}
|
||||||
authors_by_topic = {}
|
authors_by_topic = {}
|
||||||
followers_by_topic = {}
|
followers_by_topic = {}
|
||||||
lock = asyncio.Lock()
|
lock = asyncio.Lock()
|
||||||
period = 30*60 #sec
|
period = 30 * 60 # sec
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def load_stat(session):
|
async def load_stat(session):
|
||||||
self = TopicStat
|
self = TopicStat
|
||||||
self.shouts_by_topic = {}
|
self.shouts_by_topic = {}
|
||||||
self.authors_by_topic = {}
|
self.authors_by_topic = {}
|
||||||
shout_topics = session.query(ShoutTopic).all()
|
shout_topics = session.query(ShoutTopic).all()
|
||||||
for shout_topic in shout_topics:
|
for shout_topic in shout_topics:
|
||||||
topic = shout_topic.topic
|
topic = shout_topic.topic
|
||||||
shout = shout_topic.shout
|
shout = shout_topic.shout
|
||||||
if topic in self.shouts_by_topic:
|
if topic in self.shouts_by_topic:
|
||||||
self.shouts_by_topic[topic].append(shout)
|
self.shouts_by_topic[topic].append(shout)
|
||||||
else:
|
else:
|
||||||
self.shouts_by_topic[topic] = [shout, ]
|
self.shouts_by_topic[topic] = [
|
||||||
|
shout,
|
||||||
|
]
|
||||||
|
|
||||||
authors = await ShoutAuthorStorage.get_authors(shout)
|
authors = await ShoutAuthorStorage.get_authors(shout)
|
||||||
if topic in self.authors_by_topic:
|
if topic in self.authors_by_topic:
|
||||||
self.authors_by_topic[topic].update(authors)
|
self.authors_by_topic[topic].update(authors)
|
||||||
else:
|
else:
|
||||||
self.authors_by_topic[topic] = set(authors)
|
self.authors_by_topic[topic] = set(authors)
|
||||||
|
|
||||||
print('[stat.topics] authors sorted')
|
print("[stat.topics] authors sorted")
|
||||||
print('[stat.topics] shouts sorted')
|
print("[stat.topics] shouts sorted")
|
||||||
|
|
||||||
self.followers_by_topic = {}
|
self.followers_by_topic = {}
|
||||||
followings = session.query(TopicFollower)
|
followings = session.query(TopicFollower)
|
||||||
for flw in followings:
|
for flw in followings:
|
||||||
topic = flw.topic
|
topic = flw.topic
|
||||||
user = flw.follower
|
user = flw.follower
|
||||||
if topic in self.followers_by_topic:
|
if topic in self.followers_by_topic:
|
||||||
self.followers_by_topic[topic].append(user)
|
self.followers_by_topic[topic].append(user)
|
||||||
else:
|
else:
|
||||||
self.followers_by_topic[topic] = [user]
|
self.followers_by_topic[topic] = [user]
|
||||||
print('[stat.topics] followers sorted')
|
print("[stat.topics] followers sorted")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_shouts(topic):
|
async def get_shouts(topic):
|
||||||
self = TopicStat
|
self = TopicStat
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.shouts_by_topic.get(topic, [])
|
return self.shouts_by_topic.get(topic, [])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_stat(topic):
|
async def get_stat(topic):
|
||||||
self = TopicStat
|
self = TopicStat
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
shouts = self.shouts_by_topic.get(topic, [])
|
shouts = self.shouts_by_topic.get(topic, [])
|
||||||
followers = self.followers_by_topic.get(topic, [])
|
followers = self.followers_by_topic.get(topic, [])
|
||||||
authors = self.authors_by_topic.get(topic, [])
|
authors = self.authors_by_topic.get(topic, [])
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"shouts" : len(shouts),
|
"shouts": len(shouts),
|
||||||
"authors" : len(authors),
|
"authors": len(authors),
|
||||||
"followers" : len(followers),
|
"followers": len(followers),
|
||||||
"viewed": await ViewedStorage.get_topic(topic),
|
"viewed": await ViewedStorage.get_topic(topic),
|
||||||
"reacted" : len(await ReactedStorage.get_topic(topic)),
|
"reacted": len(await ReactedStorage.get_topic(topic)),
|
||||||
"commented": len(await ReactedStorage.get_topic_comments(topic)),
|
"commented": len(await ReactedStorage.get_topic_comments(topic)),
|
||||||
"rating" : await ReactedStorage.get_topic_rating(topic),
|
"rating": await ReactedStorage.get_topic_rating(topic),
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def worker():
|
|
||||||
self = TopicStat
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
with local_session() as session:
|
|
||||||
async with self.lock:
|
|
||||||
await self.load_stat(session)
|
|
||||||
print("[stat.topics] periodical update")
|
|
||||||
except Exception as err:
|
|
||||||
print("[stat.topics] errror: %s" % (err))
|
|
||||||
await asyncio.sleep(self.period)
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def worker():
|
||||||
|
self = TopicStat
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
with local_session() as session:
|
||||||
|
async with self.lock:
|
||||||
|
await self.load_stat(session)
|
||||||
|
print("[stat.topics] periodical update")
|
||||||
|
except Exception as err:
|
||||||
|
print("[stat.topics] errror: %s" % (err))
|
||||||
|
await asyncio.sleep(self.period)
|
||||||
|
|
|
@ -7,109 +7,112 @@ from orm.topic import ShoutTopic
|
||||||
|
|
||||||
|
|
||||||
class ViewedByDay(Base):
|
class ViewedByDay(Base):
|
||||||
__tablename__ = "viewed_by_day"
|
__tablename__ = "viewed_by_day"
|
||||||
|
|
||||||
id = None
|
id = None
|
||||||
shout = Column(ForeignKey('shout.slug'), primary_key=True)
|
shout = Column(ForeignKey("shout.slug"), primary_key=True)
|
||||||
day = Column(DateTime, primary_key=True, default=datetime.now)
|
day = Column(DateTime, primary_key=True, default=datetime.now)
|
||||||
value = Column(Integer)
|
value = Column(Integer)
|
||||||
|
|
||||||
|
|
||||||
class ViewedStorage:
|
class ViewedStorage:
|
||||||
viewed = {
|
viewed = {"shouts": {}, "topics": {}, "reactions": {}}
|
||||||
'shouts': {},
|
this_day_views = {}
|
||||||
'topics': {},
|
to_flush = []
|
||||||
'reactions': {}
|
period = 30 * 60 # sec
|
||||||
}
|
lock = asyncio.Lock()
|
||||||
this_day_views = {}
|
|
||||||
to_flush = []
|
|
||||||
period = 30*60 # sec
|
|
||||||
lock = asyncio.Lock()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init(session):
|
def init(session):
|
||||||
self = ViewedStorage
|
self = ViewedStorage
|
||||||
views = session.query(ViewedByDay).all()
|
views = session.query(ViewedByDay).all()
|
||||||
|
|
||||||
for view in views:
|
for view in views:
|
||||||
shout = view.shout
|
shout = view.shout
|
||||||
topics = session.query(ShoutTopic.topic).filter(ShoutTopic.shout == shout).all()
|
topics = (
|
||||||
value = view.value
|
session.query(ShoutTopic.topic).filter(ShoutTopic.shout == shout).all()
|
||||||
if shout:
|
)
|
||||||
old_value = self.viewed['shouts'].get(shout, 0)
|
value = view.value
|
||||||
self.viewed['shouts'][shout] = old_value + value
|
if shout:
|
||||||
for t in topics:
|
old_value = self.viewed["shouts"].get(shout, 0)
|
||||||
old_topic_value = self.viewed['topics'].get(t, 0)
|
self.viewed["shouts"][shout] = old_value + value
|
||||||
self.viewed['topics'][t] = old_topic_value + value
|
for t in topics:
|
||||||
if not shout in self.this_day_views:
|
old_topic_value = self.viewed["topics"].get(t, 0)
|
||||||
self.this_day_views[shout] = view
|
self.viewed["topics"][t] = old_topic_value + value
|
||||||
this_day_view = self.this_day_views[shout]
|
if not shout in self.this_day_views:
|
||||||
if this_day_view.day < view.day:
|
self.this_day_views[shout] = view
|
||||||
self.this_day_views[shout] = view
|
this_day_view = self.this_day_views[shout]
|
||||||
|
if this_day_view.day < view.day:
|
||||||
|
self.this_day_views[shout] = view
|
||||||
|
|
||||||
print('[stat.viewed] %d shouts viewed' % len(views))
|
print("[stat.viewed] %d shouts viewed" % len(views))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_shout(shout_slug):
|
async def get_shout(shout_slug):
|
||||||
self = ViewedStorage
|
self = ViewedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.viewed['shouts'].get(shout_slug, 0)
|
return self.viewed["shouts"].get(shout_slug, 0)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_topic(topic_slug):
|
async def get_topic(topic_slug):
|
||||||
self = ViewedStorage
|
self = ViewedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.viewed['topics'].get(topic_slug, 0)
|
return self.viewed["topics"].get(topic_slug, 0)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_reaction(reaction_id):
|
async def get_reaction(reaction_id):
|
||||||
self = ViewedStorage
|
self = ViewedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.viewed['reactions'].get(reaction_id, 0)
|
return self.viewed["reactions"].get(reaction_id, 0)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def increment(shout_slug):
|
async def increment(shout_slug):
|
||||||
self = ViewedStorage
|
self = ViewedStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
this_day_view = self.this_day_views.get(shout_slug)
|
this_day_view = self.this_day_views.get(shout_slug)
|
||||||
day_start = datetime.now().replace(hour=0, minute=0, second=0)
|
day_start = datetime.now().replace(hour=0, minute=0, second=0)
|
||||||
if not this_day_view or this_day_view.day < day_start:
|
if not this_day_view or this_day_view.day < day_start:
|
||||||
if this_day_view and getattr(this_day_view, "modified", False):
|
if this_day_view and getattr(this_day_view, "modified", False):
|
||||||
self.to_flush.append(this_day_view)
|
self.to_flush.append(this_day_view)
|
||||||
this_day_view = ViewedByDay.create(shout=shout_slug, value=1)
|
this_day_view = ViewedByDay.create(shout=shout_slug, value=1)
|
||||||
self.this_day_views[shout_slug] = this_day_view
|
self.this_day_views[shout_slug] = this_day_view
|
||||||
else:
|
else:
|
||||||
this_day_view.value = this_day_view.value + 1
|
this_day_view.value = this_day_view.value + 1
|
||||||
this_day_view.modified = True
|
this_day_view.modified = True
|
||||||
self.viewed['shouts'][shout_slug] = self.viewed['shouts'].get(shout_slug, 0) + 1
|
self.viewed["shouts"][shout_slug] = (
|
||||||
with local_session() as session:
|
self.viewed["shouts"].get(shout_slug, 0) + 1
|
||||||
topics = session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout_slug).all()
|
)
|
||||||
for t in topics:
|
with local_session() as session:
|
||||||
self.viewed['topics'][t] = self.viewed['topics'].get(t, 0) + 1
|
topics = (
|
||||||
flag_modified(this_day_view, "value")
|
session.query(ShoutTopic.topic)
|
||||||
|
.where(ShoutTopic.shout == shout_slug)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
for t in topics:
|
||||||
|
self.viewed["topics"][t] = self.viewed["topics"].get(t, 0) + 1
|
||||||
|
flag_modified(this_day_view, "value")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def flush_changes(session):
|
||||||
|
self = ViewedStorage
|
||||||
|
async with self.lock:
|
||||||
|
for view in self.this_day_views.values():
|
||||||
|
if getattr(view, "modified", False):
|
||||||
|
session.add(view)
|
||||||
|
flag_modified(view, "value")
|
||||||
|
view.modified = False
|
||||||
|
for view in self.to_flush:
|
||||||
|
session.add(view)
|
||||||
|
self.to_flush.clear()
|
||||||
|
session.commit()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def flush_changes(session):
|
async def worker():
|
||||||
self = ViewedStorage
|
while True:
|
||||||
async with self.lock:
|
try:
|
||||||
for view in self.this_day_views.values():
|
with local_session() as session:
|
||||||
if getattr(view, "modified", False):
|
await ViewedStorage.flush_changes(session)
|
||||||
session.add(view)
|
print("[stat.viewed] periodical flush")
|
||||||
flag_modified(view, "value")
|
except Exception as err:
|
||||||
view.modified = False
|
print("[stat.viewed] errror: %s" % (err))
|
||||||
for view in self.to_flush:
|
await asyncio.sleep(ViewedStorage.period)
|
||||||
session.add(view)
|
|
||||||
self.to_flush.clear()
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def worker():
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
with local_session() as session:
|
|
||||||
await ViewedStorage.flush_changes(session)
|
|
||||||
print("[stat.viewed] periodical flush")
|
|
||||||
except Exception as err:
|
|
||||||
print("[stat.viewed] errror: %s" % (err))
|
|
||||||
await asyncio.sleep(ViewedStorage.period)
|
|
||||||
|
|
|
@ -3,60 +3,67 @@ from pathlib import Path
|
||||||
import asyncio
|
import asyncio
|
||||||
from settings import SHOUTS_REPO
|
from settings import SHOUTS_REPO
|
||||||
|
|
||||||
|
|
||||||
class GitTask:
|
class GitTask:
|
||||||
''' every shout update use a new task '''
|
"""every shout update use a new task"""
|
||||||
queue = asyncio.Queue()
|
|
||||||
|
|
||||||
def __init__(self, input, username, user_email, comment):
|
queue = asyncio.Queue()
|
||||||
self.slug = input["slug"]
|
|
||||||
self.shout_body = input["body"]
|
|
||||||
self.username = username
|
|
||||||
self.user_email = user_email
|
|
||||||
self.comment = comment
|
|
||||||
|
|
||||||
GitTask.queue.put_nowait(self)
|
def __init__(self, input, username, user_email, comment):
|
||||||
|
self.slug = input["slug"]
|
||||||
|
self.shout_body = input["body"]
|
||||||
|
self.username = username
|
||||||
|
self.user_email = user_email
|
||||||
|
self.comment = comment
|
||||||
|
|
||||||
def init_repo(self):
|
GitTask.queue.put_nowait(self)
|
||||||
repo_path = "%s" % (SHOUTS_REPO)
|
|
||||||
|
|
||||||
Path(repo_path).mkdir()
|
def init_repo(self):
|
||||||
|
repo_path = "%s" % (SHOUTS_REPO)
|
||||||
|
|
||||||
cmd = "cd %s && git init && " \
|
Path(repo_path).mkdir()
|
||||||
"git config user.name 'discours' && " \
|
|
||||||
"git config user.email 'discours@discours.io' && " \
|
|
||||||
"touch initial && git add initial && " \
|
|
||||||
"git commit -m 'init repo'" \
|
|
||||||
% (repo_path)
|
|
||||||
output = subprocess.check_output(cmd, shell=True)
|
|
||||||
print(output)
|
|
||||||
|
|
||||||
def execute(self):
|
cmd = (
|
||||||
repo_path = "%s" % (SHOUTS_REPO)
|
"cd %s && git init && "
|
||||||
|
"git config user.name 'discours' && "
|
||||||
|
"git config user.email 'discours@discours.io' && "
|
||||||
|
"touch initial && git add initial && "
|
||||||
|
"git commit -m 'init repo'" % (repo_path)
|
||||||
|
)
|
||||||
|
output = subprocess.check_output(cmd, shell=True)
|
||||||
|
print(output)
|
||||||
|
|
||||||
if not Path(repo_path).exists():
|
def execute(self):
|
||||||
self.init_repo()
|
repo_path = "%s" % (SHOUTS_REPO)
|
||||||
|
|
||||||
#cmd = "cd %s && git checkout master" % (repo_path)
|
if not Path(repo_path).exists():
|
||||||
#output = subprocess.check_output(cmd, shell=True)
|
self.init_repo()
|
||||||
#print(output)
|
|
||||||
|
|
||||||
shout_filename = "%s.mdx" % (self.slug)
|
# cmd = "cd %s && git checkout master" % (repo_path)
|
||||||
shout_full_filename = "%s/%s" % (repo_path, shout_filename)
|
# output = subprocess.check_output(cmd, shell=True)
|
||||||
with open(shout_full_filename, mode='w', encoding='utf-8') as shout_file:
|
# print(output)
|
||||||
shout_file.write(bytes(self.shout_body,'utf-8').decode('utf-8','ignore'))
|
|
||||||
|
|
||||||
author = "%s <%s>" % (self.username, self.user_email)
|
shout_filename = "%s.mdx" % (self.slug)
|
||||||
cmd = "cd %s && git add %s && git commit -m '%s' --author='%s'" % \
|
shout_full_filename = "%s/%s" % (repo_path, shout_filename)
|
||||||
(repo_path, shout_filename, self.comment, author)
|
with open(shout_full_filename, mode="w", encoding="utf-8") as shout_file:
|
||||||
output = subprocess.check_output(cmd, shell=True)
|
shout_file.write(bytes(self.shout_body, "utf-8").decode("utf-8", "ignore"))
|
||||||
print(output)
|
|
||||||
|
|
||||||
@staticmethod
|
author = "%s <%s>" % (self.username, self.user_email)
|
||||||
async def git_task_worker():
|
cmd = "cd %s && git add %s && git commit -m '%s' --author='%s'" % (
|
||||||
print("[service.git] starting task worker")
|
repo_path,
|
||||||
while True:
|
shout_filename,
|
||||||
task = await GitTask.queue.get()
|
self.comment,
|
||||||
try:
|
author,
|
||||||
task.execute()
|
)
|
||||||
except Exception as err:
|
output = subprocess.check_output(cmd, shell=True)
|
||||||
print("[service.git] worker error: %s" % (err))
|
print(output)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def git_task_worker():
|
||||||
|
print("[service.git] starting task worker")
|
||||||
|
while True:
|
||||||
|
task = await GitTask.queue.get()
|
||||||
|
try:
|
||||||
|
task.execute()
|
||||||
|
except Exception as err:
|
||||||
|
print("[service.git] worker error: %s" % (err))
|
||||||
|
|
|
@ -1,47 +1,46 @@
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from base.orm import local_session
|
from base.orm import local_session
|
||||||
from orm.shout import ShoutAuthor
|
from orm.shout import ShoutAuthor
|
||||||
|
|
||||||
|
|
||||||
class ShoutAuthorStorage:
|
class ShoutAuthorStorage:
|
||||||
authors_by_shout = {}
|
authors_by_shout = {}
|
||||||
lock = asyncio.Lock()
|
lock = asyncio.Lock()
|
||||||
period = 30*60 #sec
|
period = 30 * 60 # sec
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def load(session):
|
async def load(session):
|
||||||
self = ShoutAuthorStorage
|
self = ShoutAuthorStorage
|
||||||
sas = session.query(ShoutAuthor).all()
|
sas = session.query(ShoutAuthor).all()
|
||||||
for sa in sas:
|
for sa in sas:
|
||||||
self.authors_by_shout[sa.shout] = self.authors_by_shout.get(sa.shout, [])
|
self.authors_by_shout[sa.shout] = self.authors_by_shout.get(sa.shout, [])
|
||||||
self.authors_by_shout[sa.shout].append([sa.user, sa.caption])
|
self.authors_by_shout[sa.shout].append([sa.user, sa.caption])
|
||||||
print('[zine.authors] %d shouts preprocessed' % len(self.authors_by_shout))
|
print("[zine.authors] %d shouts preprocessed" % len(self.authors_by_shout))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_authors(shout):
|
async def get_authors(shout):
|
||||||
self = ShoutAuthorStorage
|
self = ShoutAuthorStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return self.authors_by_shout.get(shout, [])
|
return self.authors_by_shout.get(shout, [])
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_author_caption(shout, author):
|
async def get_author_caption(shout, author):
|
||||||
self = ShoutAuthorStorage
|
self = ShoutAuthorStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
for a in self.authors_by_shout.get(shout, []):
|
for a in self.authors_by_shout.get(shout, []):
|
||||||
if author in a:
|
if author in a:
|
||||||
return a[1]
|
return a[1]
|
||||||
return { "error": "author caption not found" }
|
return {"error": "author caption not found"}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def worker():
|
async def worker():
|
||||||
self = ShoutAuthorStorage
|
self = ShoutAuthorStorage
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
await self.load(session)
|
await self.load(session)
|
||||||
print("[zine.authors] state updated")
|
print("[zine.authors] state updated")
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
print("[zine.authors] errror: %s" % (err))
|
print("[zine.authors] errror: %s" % (err))
|
||||||
await asyncio.sleep(self.period)
|
await asyncio.sleep(self.period)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from sqlalchemy import and_, desc, func, select
|
from sqlalchemy import and_, desc, func, select
|
||||||
|
@ -11,148 +10,159 @@ from services.stat.viewed import ViewedByDay
|
||||||
|
|
||||||
|
|
||||||
class ShoutsCache:
|
class ShoutsCache:
|
||||||
limit = 200
|
limit = 200
|
||||||
period = 60*60 #1 hour
|
period = 60 * 60 # 1 hour
|
||||||
lock = asyncio.Lock()
|
lock = asyncio.Lock()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def prepare_recent_published():
|
async def prepare_recent_published():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
stmt = select(Shout).\
|
stmt = (
|
||||||
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
|
select(Shout)
|
||||||
where(Shout.publishedAt != None).\
|
.options(selectinload(Shout.authors), selectinload(Shout.topics))
|
||||||
order_by(desc("publishedAt")).\
|
.where(Shout.publishedAt != None)
|
||||||
limit(ShoutsCache.limit)
|
.order_by(desc("publishedAt"))
|
||||||
shouts = []
|
.limit(ShoutsCache.limit)
|
||||||
for row in session.execute(stmt):
|
)
|
||||||
shout = row.Shout
|
shouts = []
|
||||||
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
for row in session.execute(stmt):
|
||||||
shouts.append(shout)
|
shout = row.Shout
|
||||||
async with ShoutsCache.lock:
|
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
||||||
ShoutsCache.recent_published = shouts
|
shouts.append(shout)
|
||||||
print("[zine.cache] %d recently published shouts " % len(shouts))
|
async with ShoutsCache.lock:
|
||||||
|
ShoutsCache.recent_published = shouts
|
||||||
|
print("[zine.cache] %d recently published shouts " % len(shouts))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def prepare_recent_all():
|
async def prepare_recent_all():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
stmt = select(Shout).\
|
stmt = (
|
||||||
options(
|
select(Shout)
|
||||||
selectinload(Shout.authors),
|
.options(selectinload(Shout.authors), selectinload(Shout.topics))
|
||||||
selectinload(Shout.topics)
|
.order_by(desc("createdAt"))
|
||||||
).\
|
.limit(ShoutsCache.limit)
|
||||||
order_by(desc("createdAt")).\
|
)
|
||||||
limit(ShoutsCache.limit)
|
shouts = []
|
||||||
shouts = []
|
for row in session.execute(stmt):
|
||||||
for row in session.execute(stmt):
|
shout = row.Shout
|
||||||
shout = row.Shout
|
# shout.topics = [t.slug for t in shout.topics]
|
||||||
# shout.topics = [t.slug for t in shout.topics]
|
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
||||||
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
shouts.append(shout)
|
||||||
shouts.append(shout)
|
async with ShoutsCache.lock:
|
||||||
async with ShoutsCache.lock:
|
ShoutsCache.recent_all = shouts
|
||||||
ShoutsCache.recent_all = shouts
|
print("[zine.cache] %d recently created shouts " % len(shouts))
|
||||||
print("[zine.cache] %d recently created shouts " % len(shouts))
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def prepare_recent_reacted():
|
async def prepare_recent_reacted():
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
stmt = select(Shout, func.max(Reaction.createdAt).label("reactionCreatedAt")).\
|
stmt = (
|
||||||
options(
|
select(Shout, func.max(Reaction.createdAt).label("reactionCreatedAt"))
|
||||||
selectinload(Shout.authors),
|
.options(
|
||||||
selectinload(Shout.topics),
|
selectinload(Shout.authors),
|
||||||
).\
|
selectinload(Shout.topics),
|
||||||
join(Reaction, Reaction.shout == Shout.slug).\
|
)
|
||||||
where(and_(Shout.publishedAt != None, Reaction.deletedAt == None)).\
|
.join(Reaction, Reaction.shout == Shout.slug)
|
||||||
group_by(Shout.slug).\
|
.where(and_(Shout.publishedAt != None, Reaction.deletedAt == None))
|
||||||
order_by(desc("reactionCreatedAt")).\
|
.group_by(Shout.slug)
|
||||||
limit(ShoutsCache.limit)
|
.order_by(desc("reactionCreatedAt"))
|
||||||
shouts = []
|
.limit(ShoutsCache.limit)
|
||||||
for row in session.execute(stmt):
|
)
|
||||||
shout = row.Shout
|
shouts = []
|
||||||
# shout.topics = [t.slug for t in shout.topics]
|
for row in session.execute(stmt):
|
||||||
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
shout = row.Shout
|
||||||
shouts.append(shout)
|
# shout.topics = [t.slug for t in shout.topics]
|
||||||
async with ShoutsCache.lock:
|
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
||||||
ShoutsCache.recent_reacted = shouts
|
shouts.append(shout)
|
||||||
print("[zine.cache] %d recently reacted shouts " % len(shouts))
|
async with ShoutsCache.lock:
|
||||||
|
ShoutsCache.recent_reacted = shouts
|
||||||
|
print("[zine.cache] %d recently reacted shouts " % len(shouts))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def prepare_top_overall():
|
||||||
|
with local_session() as session:
|
||||||
|
# with reacted times counter
|
||||||
|
stmt = (
|
||||||
|
select(Shout, func.count(Reaction.id).label("reacted"))
|
||||||
|
.options(
|
||||||
|
selectinload(Shout.authors),
|
||||||
|
selectinload(Shout.topics),
|
||||||
|
selectinload(Shout.reactions),
|
||||||
|
)
|
||||||
|
.join(Reaction)
|
||||||
|
.where(and_(Shout.publishedAt != None, Reaction.deletedAt == None))
|
||||||
|
.group_by(Shout.slug)
|
||||||
|
.order_by(desc("reacted"))
|
||||||
|
.limit(ShoutsCache.limit)
|
||||||
|
)
|
||||||
|
shouts = []
|
||||||
|
# with rating synthetic counter
|
||||||
|
for row in session.execute(stmt):
|
||||||
|
shout = row.Shout
|
||||||
|
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
||||||
|
shouts.append(shout)
|
||||||
|
shouts.sort(key=lambda shout: shout.rating, reverse=True)
|
||||||
|
async with ShoutsCache.lock:
|
||||||
|
print("[zine.cache] %d top shouts " % len(shouts))
|
||||||
|
ShoutsCache.top_overall = shouts
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def prepare_top_overall():
|
async def prepare_top_month():
|
||||||
with local_session() as session:
|
month_ago = datetime.now() - timedelta(days=30)
|
||||||
# with reacted times counter
|
with local_session() as session:
|
||||||
stmt = select(Shout,
|
stmt = (
|
||||||
func.count(Reaction.id).label("reacted")).\
|
select(Shout, func.count(Reaction.id).label("reacted"))
|
||||||
options(selectinload(Shout.authors), selectinload(Shout.topics), selectinload(Shout.reactions)).\
|
.options(selectinload(Shout.authors), selectinload(Shout.topics))
|
||||||
join(Reaction).\
|
.join(Reaction)
|
||||||
where(and_(Shout.publishedAt != None, Reaction.deletedAt == None)).\
|
.where(and_(Shout.createdAt > month_ago, Shout.publishedAt != None))
|
||||||
group_by(Shout.slug).\
|
.group_by(Shout.slug)
|
||||||
order_by(desc("reacted")).\
|
.order_by(desc("reacted"))
|
||||||
limit(ShoutsCache.limit)
|
.limit(ShoutsCache.limit)
|
||||||
shouts = []
|
)
|
||||||
# with rating synthetic counter
|
shouts = []
|
||||||
for row in session.execute(stmt):
|
for row in session.execute(stmt):
|
||||||
shout = row.Shout
|
shout = row.Shout
|
||||||
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
||||||
shouts.append(shout)
|
shouts.append(shout)
|
||||||
shouts.sort(key = lambda shout: shout.rating, reverse = True)
|
shouts.sort(key=lambda shout: shout.rating, reverse=True)
|
||||||
async with ShoutsCache.lock:
|
async with ShoutsCache.lock:
|
||||||
print("[zine.cache] %d top shouts " % len(shouts))
|
print("[zine.cache] %d top month shouts " % len(shouts))
|
||||||
ShoutsCache.top_overall = shouts
|
ShoutsCache.top_month = shouts
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def prepare_top_month():
|
async def prepare_top_viewed():
|
||||||
month_ago = datetime.now() - timedelta(days = 30)
|
month_ago = datetime.now() - timedelta(days=30)
|
||||||
with local_session() as session:
|
with local_session() as session:
|
||||||
stmt = select(Shout, func.count(Reaction.id).label("reacted")).\
|
stmt = (
|
||||||
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
|
select(Shout, func.sum(ViewedByDay.value).label("viewed"))
|
||||||
join(Reaction).\
|
.options(selectinload(Shout.authors), selectinload(Shout.topics))
|
||||||
where(and_(Shout.createdAt > month_ago, Shout.publishedAt != None)).\
|
.join(ViewedByDay)
|
||||||
group_by(Shout.slug).\
|
.where(and_(ViewedByDay.day > month_ago, Shout.publishedAt != None))
|
||||||
order_by(desc("reacted")).\
|
.group_by(Shout.slug)
|
||||||
limit(ShoutsCache.limit)
|
.order_by(desc("viewed"))
|
||||||
shouts = []
|
.limit(ShoutsCache.limit)
|
||||||
for row in session.execute(stmt):
|
)
|
||||||
shout = row.Shout
|
shouts = []
|
||||||
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
for row in session.execute(stmt):
|
||||||
shouts.append(shout)
|
shout = row.Shout
|
||||||
shouts.sort(key = lambda shout: shout.rating, reverse = True)
|
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
||||||
async with ShoutsCache.lock:
|
shouts.append(shout)
|
||||||
print("[zine.cache] %d top month shouts " % len(shouts))
|
# shouts.sort(key = lambda shout: shout.viewed, reverse = True)
|
||||||
ShoutsCache.top_month = shouts
|
async with ShoutsCache.lock:
|
||||||
|
print("[zine.cache] %d top viewed shouts " % len(shouts))
|
||||||
|
ShoutsCache.top_viewed = shouts
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def prepare_top_viewed():
|
async def worker():
|
||||||
month_ago = datetime.now() - timedelta(days = 30)
|
while True:
|
||||||
with local_session() as session:
|
try:
|
||||||
stmt = select(Shout, func.sum(ViewedByDay.value).label("viewed")).\
|
await ShoutsCache.prepare_top_month()
|
||||||
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
|
await ShoutsCache.prepare_top_overall()
|
||||||
join(ViewedByDay).\
|
await ShoutsCache.prepare_top_viewed()
|
||||||
where(and_(ViewedByDay.day > month_ago, Shout.publishedAt != None)).\
|
await ShoutsCache.prepare_recent_published()
|
||||||
group_by(Shout.slug).\
|
await ShoutsCache.prepare_recent_all()
|
||||||
order_by(desc("viewed")).\
|
await ShoutsCache.prepare_recent_reacted()
|
||||||
limit(ShoutsCache.limit)
|
print("[zine.cache] periodical update")
|
||||||
shouts = []
|
except Exception as err:
|
||||||
for row in session.execute(stmt):
|
print("[zine.cache] error: %s" % (err))
|
||||||
shout = row.Shout
|
raise err
|
||||||
shout.rating = await ReactedStorage.get_rating(shout.slug) or 0
|
await asyncio.sleep(ShoutsCache.period)
|
||||||
shouts.append(shout)
|
|
||||||
# shouts.sort(key = lambda shout: shout.viewed, reverse = True)
|
|
||||||
async with ShoutsCache.lock:
|
|
||||||
print("[zine.cache] %d top viewed shouts " % len(shouts))
|
|
||||||
ShoutsCache.top_viewed = shouts
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def worker():
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
await ShoutsCache.prepare_top_month()
|
|
||||||
await ShoutsCache.prepare_top_overall()
|
|
||||||
await ShoutsCache.prepare_top_viewed()
|
|
||||||
await ShoutsCache.prepare_recent_published()
|
|
||||||
await ShoutsCache.prepare_recent_all()
|
|
||||||
await ShoutsCache.prepare_recent_reacted()
|
|
||||||
print("[zine.cache] periodical update")
|
|
||||||
except Exception as err:
|
|
||||||
print("[zine.cache] error: %s" % (err))
|
|
||||||
raise err
|
|
||||||
await asyncio.sleep(ShoutsCache.period)
|
|
||||||
|
|
|
@ -3,56 +3,58 @@ from orm.topic import Topic
|
||||||
|
|
||||||
|
|
||||||
class TopicStorage:
|
class TopicStorage:
|
||||||
topics = {}
|
topics = {}
|
||||||
lock = asyncio.Lock()
|
lock = asyncio.Lock()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def init(session):
|
def init(session):
|
||||||
self = TopicStorage
|
self = TopicStorage
|
||||||
topics = session.query(Topic)
|
topics = session.query(Topic)
|
||||||
self.topics = dict([(topic.slug, topic) for topic in topics])
|
self.topics = dict([(topic.slug, topic) for topic in topics])
|
||||||
for topic in self.topics.values():
|
for topic in self.topics.values():
|
||||||
self.load_parents(topic)
|
self.load_parents(topic)
|
||||||
|
|
||||||
print('[zine.topics] %d precached' % len(self.topics.keys()))
|
print("[zine.topics] %d precached" % len(self.topics.keys()))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load_parents(topic):
|
def load_parents(topic):
|
||||||
self = TopicStorage
|
self = TopicStorage
|
||||||
parents = []
|
parents = []
|
||||||
for parent in self.topics.values():
|
for parent in self.topics.values():
|
||||||
if topic.slug in parent.children:
|
if topic.slug in parent.children:
|
||||||
parents.append(parent.slug)
|
parents.append(parent.slug)
|
||||||
topic.parents = parents
|
topic.parents = parents
|
||||||
return topic
|
return topic
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_topics_all(page, size):
|
async def get_topics_all(page, size):
|
||||||
end = page * size
|
end = page * size
|
||||||
start = end - size
|
start = end - size
|
||||||
self = TopicStorage
|
self = TopicStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
return list(self.topics.values())[start:end]
|
return list(self.topics.values())[start:end]
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_topics_by_slugs(slugs):
|
async def get_topics_by_slugs(slugs):
|
||||||
self = TopicStorage
|
self = TopicStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
if not slugs:
|
if not slugs:
|
||||||
return self.topics.values()
|
return self.topics.values()
|
||||||
topics = filter(lambda topic: topic.slug in slugs, self.topics.values())
|
topics = filter(lambda topic: topic.slug in slugs, self.topics.values())
|
||||||
return list(topics)
|
return list(topics)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_topics_by_community(community):
|
async def get_topics_by_community(community):
|
||||||
self = TopicStorage
|
self = TopicStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
topics = filter(lambda topic: topic.community == community, self.topics.values())
|
topics = filter(
|
||||||
return list(topics)
|
lambda topic: topic.community == community, self.topics.values()
|
||||||
|
)
|
||||||
|
return list(topics)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def add_topic(topic):
|
async def add_topic(topic):
|
||||||
self = TopicStorage
|
self = TopicStorage
|
||||||
async with self.lock:
|
async with self.lock:
|
||||||
self.topics[topic.slug] = topic
|
self.topics[topic.slug] = topic
|
||||||
self.load_parents(topic)
|
self.load_parents(topic)
|
||||||
|
|
20
settings.py
20
settings.py
|
@ -1,4 +1,3 @@
|
||||||
from pathlib import Path
|
|
||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
PORT = 8080
|
PORT = 8080
|
||||||
|
@ -8,9 +7,16 @@ BACKEND_URL = environ.get("BACKEND_URL") or "https://localhost:8080"
|
||||||
OAUTH_CALLBACK_URL = environ.get("OAUTH_CALLBACK_URL") or "https://localhost:8080"
|
OAUTH_CALLBACK_URL = environ.get("OAUTH_CALLBACK_URL") or "https://localhost:8080"
|
||||||
RESET_PWD_URL = environ.get("RESET_PWD_URL") or "https://localhost:8080/reset_pwd"
|
RESET_PWD_URL = environ.get("RESET_PWD_URL") or "https://localhost:8080/reset_pwd"
|
||||||
CONFIRM_EMAIL_URL = environ.get("CONFIRM_EMAIL_URL") or "https://new.discours.io"
|
CONFIRM_EMAIL_URL = environ.get("CONFIRM_EMAIL_URL") or "https://new.discours.io"
|
||||||
ERROR_URL_ON_FRONTEND = environ.get("ERROR_URL_ON_FRONTEND") or "https://new.discours.io"
|
ERROR_URL_ON_FRONTEND = (
|
||||||
|
environ.get("ERROR_URL_ON_FRONTEND") or "https://new.discours.io"
|
||||||
|
)
|
||||||
|
|
||||||
DB_URL = environ.get("DATABASE_URL") or environ.get("DB_URL") or "postgresql://postgres@localhost:5432/discoursio" or "sqlite:///db.sqlite3"
|
DB_URL = (
|
||||||
|
environ.get("DATABASE_URL")
|
||||||
|
or environ.get("DB_URL")
|
||||||
|
or "postgresql://postgres@localhost:5432/discoursio"
|
||||||
|
or "sqlite:///db.sqlite3"
|
||||||
|
)
|
||||||
JWT_ALGORITHM = "HS256"
|
JWT_ALGORITHM = "HS256"
|
||||||
JWT_SECRET_KEY = "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key"
|
JWT_SECRET_KEY = "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key"
|
||||||
JWT_LIFE_SPAN = 24 * 60 * 60 # seconds
|
JWT_LIFE_SPAN = 24 * 60 * 60 # seconds
|
||||||
|
@ -24,9 +30,9 @@ MAILGUN_DOMAIN = environ.get("MAILGUN_DOMAIN")
|
||||||
OAUTH_PROVIDERS = ("GITHUB", "FACEBOOK", "GOOGLE")
|
OAUTH_PROVIDERS = ("GITHUB", "FACEBOOK", "GOOGLE")
|
||||||
OAUTH_CLIENTS = {}
|
OAUTH_CLIENTS = {}
|
||||||
for provider in OAUTH_PROVIDERS:
|
for provider in OAUTH_PROVIDERS:
|
||||||
OAUTH_CLIENTS[provider] = {
|
OAUTH_CLIENTS[provider] = {
|
||||||
"id" : environ.get(provider + "_OAUTH_ID"),
|
"id": environ.get(provider + "_OAUTH_ID"),
|
||||||
"key" : environ.get(provider + "_OAUTH_KEY")
|
"key": environ.get(provider + "_OAUTH_KEY"),
|
||||||
}
|
}
|
||||||
|
|
||||||
SHOUTS_REPO = "content"
|
SHOUTS_REPO = "content"
|
||||||
|
|
Loading…
Reference in New Issue
Block a user