format and lint orm

This commit is contained in:
tonyrewin 2022-09-03 13:50:14 +03:00
parent 85892a88bc
commit a89a44f660
55 changed files with 4811 additions and 4174 deletions

View File

@ -2,7 +2,7 @@ root = true
[*]
indent_style = tabs
indent_size = 1
indent_size = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace=true

5
.flake8 Normal file
View File

@ -0,0 +1,5 @@
[flake8]
ignore = D203
exclude = .git,__pycache__
max-complexity = 10
max-line-length = 108

View File

@ -60,7 +60,9 @@ class JWTAuthenticate(AuthenticationBackend):
try:
payload = await _Authenticate.verify(token)
except Exception as exc:
return AuthCredentials(scopes=[], error_message=str(exc)), AuthUser(user_id=None)
return AuthCredentials(scopes=[], error_message=str(exc)), AuthUser(
user_id=None
)
if payload is None:
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
@ -73,15 +75,17 @@ class JWTAuthenticate(AuthenticationBackend):
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
scopes = await user.get_permission()
return AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True), user
return (
AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
user,
)
class EmailAuthenticate:
@staticmethod
async def get_email_token(user):
token = await Authorize.authorize(
user,
device="email",
life_span=EMAIL_TOKEN_LIFE_SPAN
user, device="email", life_span=EMAIL_TOKEN_LIFE_SPAN
)
return token
@ -102,6 +106,7 @@ class EmailAuthenticate:
auth_token = await Authorize.authorize(user)
return (auth_token, user)
class ResetPassword:
@staticmethod
async def get_reset_token(user):
@ -124,6 +129,7 @@ class ResetPassword:
return payload.user_id
def login_required(func):
@wraps(func)
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
@ -131,4 +137,5 @@ def login_required(func):
if not auth.logged_in:
return {"error": auth.error_message or "Please login"}
return await func(parent, info, *args, **kwargs)
return wrap

View File

@ -5,6 +5,7 @@ from base.redis import redis
from settings import JWT_LIFE_SPAN
from auth.validations import User
class TokenStorage:
@staticmethod
async def save(token_key, life_span, auto_delete=True):
@ -20,7 +21,9 @@ class TokenStorage:
class Authorize:
@staticmethod
async def authorize(user: User, device: str = "pc", life_span = JWT_LIFE_SPAN, auto_delete=True) -> str:
async def authorize(
user: User, device: str = "pc", life_span=JWT_LIFE_SPAN, auto_delete=True
) -> str:
exp = datetime.utcnow() + timedelta(seconds=life_span)
token = JWTCodec.encode(user, exp=exp, device=device)
await TokenStorage.save(f"{user.id}-{token}", life_span, auto_delete)

View File

@ -2,8 +2,14 @@ import requests
from starlette.responses import RedirectResponse
from auth.authenticate import EmailAuthenticate, ResetPassword
from base.orm import local_session
from settings import BACKEND_URL, MAILGUN_API_KEY, MAILGUN_DOMAIN, RESET_PWD_URL, \
CONFIRM_EMAIL_URL, ERROR_URL_ON_FRONTEND
from settings import (
BACKEND_URL,
MAILGUN_API_KEY,
MAILGUN_DOMAIN,
RESET_PWD_URL,
CONFIRM_EMAIL_URL,
ERROR_URL_ON_FRONTEND,
)
MAILGUN_API_URL = "https://api.mailgun.net/v3/%s/messages" % (MAILGUN_DOMAIN)
MAILGUN_FROM = "discours.io <noreply@%s>" % (MAILGUN_DOMAIN)
@ -12,6 +18,7 @@ AUTH_URL = "%s/email_authorize" % (BACKEND_URL)
email_templates = {"confirm_email": "", "auth_email": "", "reset_password_email": ""}
def load_email_templates():
for name in email_templates:
filename = "auth/templates/%s.tmpl" % name
@ -19,21 +26,25 @@ def load_email_templates():
email_templates[name] = f.read()
print("[auth.email] templates loaded")
async def send_confirm_email(user):
text = email_templates["confirm_email"]
token = await EmailAuthenticate.get_email_token(user)
await send_email(user, AUTH_URL, text, token)
async def send_auth_email(user):
text = email_templates["auth_email"]
token = await EmailAuthenticate.get_email_token(user)
await send_email(user, AUTH_URL, text, token)
async def send_reset_password_email(user):
text = email_templates["reset_password_email"]
token = await ResetPassword.get_reset_token(user)
await send_email(user, RESET_PWD_URL, text, token)
async def send_email(user, url, text, token):
to = "%s <%s>" % (user.username, user.email)
url_with_token = "%s?token=%s" % (url, token)
@ -45,13 +56,14 @@ async def send_email(user, url, text, token):
"from": MAILGUN_FROM,
"to": to,
"subject": "authorize log in",
"html": text
}
"html": text,
},
)
response.raise_for_status()
async def email_authorize(request):
token = request.query_params.get('token')
token = request.query_params.get("token")
if not token:
url_with_error = "%s?error=%s" % (ERROR_URL_ON_FRONTEND, "INVALID_TOKEN")
return RedirectResponse(url=url_with_error)

View File

@ -20,9 +20,15 @@ class Identity:
@staticmethod
def identity_oauth(input) -> User:
with local_session() as session:
user = session.query(OrmUser).filter(
or_(OrmUser.oauth == input["oauth"], OrmUser.email == input["email"])
).first()
user = (
session.query(OrmUser)
.filter(
or_(
OrmUser.oauth == input["oauth"], OrmUser.email == input["email"]
)
)
.first()
)
if not user:
user = OrmUser.create(**input)
if not user.oauth:

View File

@ -7,7 +7,12 @@ from auth.validations import PayLoad, User
class JWTCodec:
@staticmethod
def encode(user: User, exp: datetime, device: str = "pc") -> str:
payload = {"user_id": user.id, "device": device, "exp": exp, "iat": datetime.utcnow()}
payload = {
"user_id": user.id,
"device": device,
"exp": exp,
"iat": datetime.utcnow(),
}
return jwt.encode(payload, JWT_SECRET_KEY, JWT_ALGORITHM)
@staticmethod

View File

@ -8,66 +8,71 @@ from settings import OAUTH_CLIENTS, BACKEND_URL, OAUTH_CALLBACK_URL
oauth = OAuth()
oauth.register(
name='facebook',
name="facebook",
client_id=OAUTH_CLIENTS["FACEBOOK"]["id"],
client_secret=OAUTH_CLIENTS["FACEBOOK"]["key"],
access_token_url='https://graph.facebook.com/v11.0/oauth/access_token',
access_token_url="https://graph.facebook.com/v11.0/oauth/access_token",
access_token_params=None,
authorize_url='https://www.facebook.com/v11.0/dialog/oauth',
authorize_url="https://www.facebook.com/v11.0/dialog/oauth",
authorize_params=None,
api_base_url='https://graph.facebook.com/',
client_kwargs={'scope': 'public_profile email'},
api_base_url="https://graph.facebook.com/",
client_kwargs={"scope": "public_profile email"},
)
oauth.register(
name='github',
name="github",
client_id=OAUTH_CLIENTS["GITHUB"]["id"],
client_secret=OAUTH_CLIENTS["GITHUB"]["key"],
access_token_url='https://github.com/login/oauth/access_token',
access_token_url="https://github.com/login/oauth/access_token",
access_token_params=None,
authorize_url='https://github.com/login/oauth/authorize',
authorize_url="https://github.com/login/oauth/authorize",
authorize_params=None,
api_base_url='https://api.github.com/',
client_kwargs={'scope': 'user:email'},
api_base_url="https://api.github.com/",
client_kwargs={"scope": "user:email"},
)
oauth.register(
name='google',
name="google",
client_id=OAUTH_CLIENTS["GOOGLE"]["id"],
client_secret=OAUTH_CLIENTS["GOOGLE"]["key"],
server_metadata_url="https://accounts.google.com/.well-known/openid-configuration",
client_kwargs={'scope': 'openid email profile'}
client_kwargs={"scope": "openid email profile"},
)
async def google_profile(client, request, token):
profile = await client.parse_id_token(request, token)
profile["id"] = profile["sub"]
return profile
async def facebook_profile(client, request, token):
profile = await client.get('me?fields=name,id,email', token=token)
profile = await client.get("me?fields=name,id,email", token=token)
return profile.json()
async def github_profile(client, request, token):
profile = await client.get('user', token=token)
profile = await client.get("user", token=token)
return profile.json()
profile_callbacks = {
"google": google_profile,
"facebook": facebook_profile,
"github" : github_profile
"github": github_profile,
}
async def oauth_login(request):
provider = request.path_params['provider']
request.session['provider'] = provider
provider = request.path_params["provider"]
request.session["provider"] = provider
client = oauth.create_client(provider)
redirect_uri = "%s/%s" % (BACKEND_URL, 'oauth_authorize')
redirect_uri = "%s/%s" % (BACKEND_URL, "oauth_authorize")
return await client.authorize_redirect(request, redirect_uri)
async def oauth_authorize(request):
provider = request.session['provider']
provider = request.session["provider"]
client = oauth.create_client(provider)
token = await client.authorize_access_token(request)
get_profile = profile_callbacks[provider]
@ -76,7 +81,7 @@ async def oauth_authorize(request):
user_input = {
"oauth": user_oauth_info,
"email": profile["email"],
"username" : profile["name"]
"username": profile["name"],
}
user = Identity.identity_oauth(user_input)
token = await Authorize.authorize(user, device="pc")

View File

@ -5,16 +5,18 @@ from sqlalchemy.orm import Session
from sqlalchemy.sql.schema import Table
from settings import DB_URL
if DB_URL.startswith('sqlite'):
if DB_URL.startswith("sqlite"):
engine = create_engine(DB_URL)
else:
engine = create_engine(DB_URL, convert_unicode=True, echo=False, \
pool_size=10, max_overflow=20)
engine = create_engine(
DB_URL, convert_unicode=True, echo=False, pool_size=10, max_overflow=20
)
T = TypeVar("T")
REGISTRY: Dict[str, type] = {}
def local_session():
return Session(bind=engine, expire_on_commit=False)

View File

@ -1,6 +1,7 @@
import aioredis
from settings import REDIS_URL
class Redis:
def __init__(self, uri=REDIS_URL):
self._uri: str = uri
@ -30,5 +31,4 @@ class Redis:
redis = Redis()
__all__ = ['redis']
__all__ = ["redis"]

View File

@ -3,10 +3,12 @@ from ariadne import MutationType, QueryType, SubscriptionType, ScalarType
datetime_scalar = ScalarType("DateTime")
@datetime_scalar.serializer
def serialize_datetime(value):
return value.isoformat()
query = QueryType()
mutation = MutationType()
subscription = SubscriptionType()

19
main.py
View File

@ -19,14 +19,15 @@ from services.stat.topicstat import TopicStat
from services.zine.shoutauthor import ShoutAuthorStorage
import asyncio
import_module('resolvers')
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers)
import_module("resolvers")
schema = make_executable_schema(load_schema_from_path("schema.graphql"), resolvers) # type: ignore
middleware = [
Middleware(AuthenticationMiddleware, backend=JWTAuthenticate()),
Middleware(SessionMiddleware, secret_key="!secret")
Middleware(SessionMiddleware, secret_key="!secret"),
]
async def start_up():
await redis.connect()
viewed_storage_task = asyncio.create_task(ViewedStorage.worker())
@ -36,14 +37,22 @@ async def start_up():
topic_stat_task = asyncio.create_task(TopicStat.worker())
git_task = asyncio.create_task(GitTask.git_task_worker())
async def shutdown():
await redis.disconnect()
routes = [
Route("/oauth/{provider}", endpoint=oauth_login),
Route("/oauth_authorize", endpoint=oauth_authorize),
Route("/email_authorize", endpoint=email_authorize)
Route("/email_authorize", endpoint=email_authorize),
]
app = Starlette(debug=True, on_startup=[start_up], on_shutdown=[shutdown], middleware=middleware, routes=routes)
app = Starlette(
debug=True,
on_startup=[start_up],
on_shutdown=[shutdown],
middleware=middleware,
routes=routes,
)
app.mount("/", GraphQL(schema, debug=True))

View File

@ -1,4 +1,4 @@
''' cmd managed migration '''
""" cmd managed migration """
import csv
import asyncio
from datetime import datetime
@ -8,6 +8,7 @@ import sys
import os
import bs4
import numpy as np
# from export import export_email_subscriptions
from .export import export_mdx, export_slug
from orm.reaction import Reaction
@ -21,106 +22,116 @@ from .tables.comments import migrate_2stage as migrateComment_2stage
from settings import DB_URL
TODAY = datetime.strftime(datetime.now(), '%Y%m%d')
TODAY = datetime.strftime(datetime.now(), "%Y%m%d")
OLD_DATE = '2016-03-05 22:22:00.350000'
OLD_DATE = "2016-03-05 22:22:00.350000"
def users_handle(storage):
''' migrating users first '''
"""migrating users first"""
counter = 0
id_map = {}
print('[migration] migrating %d users' % (len(storage['users']['data'])))
for entry in storage['users']['data']:
oid = entry['_id']
print("[migration] migrating %d users" % (len(storage["users"]["data"])))
for entry in storage["users"]["data"]:
oid = entry["_id"]
user = migrateUser(entry)
storage['users']['by_oid'][oid] = user # full
del user['password']
del user['notifications']
del user['emailConfirmed']
del user['username']
del user['email']
storage['users']['by_slug'][user['slug']] = user # public
id_map[user['oid']] = user['slug']
storage["users"]["by_oid"][oid] = user # full
del user["password"]
del user["notifications"]
del user["emailConfirmed"]
del user["username"]
del user["email"]
storage["users"]["by_slug"][user["slug"]] = user # public
id_map[user["oid"]] = user["slug"]
counter += 1
ce = 0
for entry in storage['users']['data']:
for entry in storage["users"]["data"]:
ce += migrateUser_2stage(entry, id_map)
return storage
def topics_handle(storage):
''' topics from categories and tags '''
"""topics from categories and tags"""
counter = 0
for t in (storage['topics']['tags'] + storage['topics']['cats']):
if t['slug'] in storage['replacements']:
t['slug'] = storage['replacements'][t['slug']]
for t in storage["topics"]["tags"] + storage["topics"]["cats"]:
if t["slug"] in storage["replacements"]:
t["slug"] = storage["replacements"][t["slug"]]
topic = migrateTopic(t)
storage['topics']['by_oid'][t['_id']] = topic
storage['topics']['by_slug'][t['slug']] = topic
storage["topics"]["by_oid"][t["_id"]] = topic
storage["topics"]["by_slug"][t["slug"]] = topic
counter += 1
else:
print('[migration] topic ' + t['slug'] + ' ignored')
for oldslug, newslug in storage['replacements'].items():
if oldslug != newslug and oldslug in storage['topics']['by_slug']:
oid = storage['topics']['by_slug'][oldslug]['_id']
del storage['topics']['by_slug'][oldslug]
storage['topics']['by_oid'][oid] = storage['topics']['by_slug'][newslug]
print('[migration] ' + str(counter) + ' topics migrated')
print('[migration] ' + str(len(storage['topics']
['by_oid'].values())) + ' topics by oid')
print('[migration] ' + str(len(storage['topics']
['by_slug'].values())) + ' topics by slug')
print("[migration] topic " + t["slug"] + " ignored")
for oldslug, newslug in storage["replacements"].items():
if oldslug != newslug and oldslug in storage["topics"]["by_slug"]:
oid = storage["topics"]["by_slug"][oldslug]["_id"]
del storage["topics"]["by_slug"][oldslug]
storage["topics"]["by_oid"][oid] = storage["topics"]["by_slug"][newslug]
print("[migration] " + str(counter) + " topics migrated")
print(
"[migration] "
+ str(len(storage["topics"]["by_oid"].values()))
+ " topics by oid"
)
print(
"[migration] "
+ str(len(storage["topics"]["by_slug"].values()))
+ " topics by slug"
)
# raise Exception
return storage
async def shouts_handle(storage, args):
''' migrating content items one by one '''
"""migrating content items one by one"""
counter = 0
discours_author = 0
pub_counter = 0
topics_dataset_bodies = []
topics_dataset_tlist = []
for entry in storage['shouts']['data']:
for entry in storage["shouts"]["data"]:
# slug
slug = get_shout_slug(entry)
# single slug mode
if '-' in args and slug not in args: continue
if "-" in args and slug not in args:
continue
# migrate
shout = await migrateShout(entry, storage)
storage['shouts']['by_oid'][entry['_id']] = shout
storage['shouts']['by_slug'][shout['slug']] = shout
storage["shouts"]["by_oid"][entry["_id"]] = shout
storage["shouts"]["by_slug"][shout["slug"]] = shout
# shouts.topics
if not shout['topics']: print('[migration] no topics!')
if not shout["topics"]:
print("[migration] no topics!")
# wuth author
author = shout['authors'][0].slug
if author == 'discours': discours_author += 1
author = shout["authors"][0].slug
if author == "discours":
discours_author += 1
# print('[migration] ' + shout['slug'] + ' with author ' + author)
if entry.get('published'):
if 'mdx' in args: export_mdx(shout)
if entry.get("published"):
if "mdx" in args:
export_mdx(shout)
pub_counter += 1
# print main counter
counter += 1
line = str(counter+1) + ': ' + shout['slug'] + " @" + author
line = str(counter + 1) + ": " + shout["slug"] + " @" + author
print(line)
b = bs4.BeautifulSoup(shout['body'], 'html.parser')
b = bs4.BeautifulSoup(shout["body"], "html.parser")
texts = []
texts.append(shout['title'].lower().replace(r'[^а-яА-Яa-zA-Z]', ''))
texts.append(shout["title"].lower().replace(r"[^а-яА-Яa-zA-Z]", ""))
texts = b.findAll(text=True)
topics_dataset_bodies.append(u" ".join([x.strip().lower() for x in texts]))
topics_dataset_tlist.append(shout['topics'])
topics_dataset_bodies.append(" ".join([x.strip().lower() for x in texts]))
topics_dataset_tlist.append(shout["topics"])
# np.savetxt('topics_dataset.csv', (topics_dataset_bodies, topics_dataset_tlist), delimiter=',', fmt='%s')
print('[migration] ' + str(counter) + ' content items were migrated')
print('[migration] ' + str(pub_counter) + ' have been published')
print('[migration] ' + str(discours_author) + ' authored by @discours')
print("[migration] " + str(counter) + " content items were migrated")
print("[migration] " + str(pub_counter) + " have been published")
print("[migration] " + str(discours_author) + " authored by @discours")
return storage
@ -128,35 +139,35 @@ async def comments_handle(storage):
id_map = {}
ignored_counter = 0
missed_shouts = {}
for oldcomment in storage['reactions']['data']:
if not oldcomment.get('deleted'):
for oldcomment in storage["reactions"]["data"]:
if not oldcomment.get("deleted"):
reaction = await migrateComment(oldcomment, storage)
if type(reaction) == str:
missed_shouts[reaction] = oldcomment
elif type(reaction) == Reaction:
reaction = reaction.dict()
id = reaction['id']
oid = reaction['oid']
id = reaction["id"]
oid = reaction["oid"]
id_map[oid] = id
else:
ignored_counter += 1
for reaction in storage['reactions']['data']: migrateComment_2stage(
reaction, id_map)
print('[migration] ' + str(len(id_map)) + ' comments migrated')
print('[migration] ' + str(ignored_counter) + ' comments ignored')
print('[migration] ' + str(len(missed_shouts.keys())) +
' commented shouts missed')
for reaction in storage["reactions"]["data"]:
migrateComment_2stage(reaction, id_map)
print("[migration] " + str(len(id_map)) + " comments migrated")
print("[migration] " + str(ignored_counter) + " comments ignored")
print("[migration] " + str(len(missed_shouts.keys())) + " commented shouts missed")
missed_counter = 0
for missed in missed_shouts.values():
missed_counter += len(missed)
print('[migration] ' + str(missed_counter) + ' comments dropped')
print("[migration] " + str(missed_counter) + " comments dropped")
return storage
def bson_handle():
# decode bson # preparing data
from migration import bson2json
bson2json.json_tables()
@ -168,44 +179,31 @@ def export_one(slug, storage, args = None):
async def all_handle(storage, args):
print('[migration] handle everything')
print("[migration] handle everything")
users_handle(storage)
topics_handle(storage)
await shouts_handle(storage, args)
await comments_handle(storage)
# export_email_subscriptions()
print('[migration] done!')
print("[migration] done!")
def data_load():
storage = {
'content_items': {
'by_oid': {},
'by_slug': {},
"content_items": {
"by_oid": {},
"by_slug": {},
},
'shouts': {
'by_oid': {},
'by_slug': {},
'data': []
"shouts": {"by_oid": {}, "by_slug": {}, "data": []},
"reactions": {"by_oid": {}, "by_slug": {}, "by_content": {}, "data": []},
"topics": {
"by_oid": {},
"by_slug": {},
"cats": [],
"tags": [],
},
'reactions': {
'by_oid': {},
'by_slug': {},
'by_content': {},
'data': []
},
'topics': {
'by_oid': {},
'by_slug': {},
'cats': [],
'tags': [],
},
'users': {
'by_oid': {},
'by_slug': {},
'data': []
},
'replacements': json.loads(open('migration/tables/replacements.json').read())
"users": {"by_oid": {}, "by_slug": {}, "data": []},
"replacements": json.loads(open("migration/tables/replacements.json").read()),
}
users_data = []
tags_data = []
@ -213,101 +211,119 @@ def data_load():
comments_data = []
content_data = []
try:
users_data = json.loads(open('migration/data/users.json').read())
print('[migration.load] ' + str(len(users_data)) + ' users ')
tags_data = json.loads(open('migration/data/tags.json').read())
storage['topics']['tags'] = tags_data
print('[migration.load] ' + str(len(tags_data)) + ' tags ')
users_data = json.loads(open("migration/data/users.json").read())
print("[migration.load] " + str(len(users_data)) + " users ")
tags_data = json.loads(open("migration/data/tags.json").read())
storage["topics"]["tags"] = tags_data
print("[migration.load] " + str(len(tags_data)) + " tags ")
cats_data = json.loads(
open('migration/data/content_item_categories.json').read())
storage['topics']['cats'] = cats_data
print('[migration.load] ' + str(len(cats_data)) + ' cats ')
comments_data = json.loads(open('migration/data/comments.json').read())
storage['reactions']['data'] = comments_data
print('[migration.load] ' + str(len(comments_data)) + ' comments ')
content_data = json.loads(open('migration/data/content_items.json').read())
storage['shouts']['data'] = content_data
print('[migration.load] ' + str(len(content_data)) + ' content items ')
open("migration/data/content_item_categories.json").read()
)
storage["topics"]["cats"] = cats_data
print("[migration.load] " + str(len(cats_data)) + " cats ")
comments_data = json.loads(open("migration/data/comments.json").read())
storage["reactions"]["data"] = comments_data
print("[migration.load] " + str(len(comments_data)) + " comments ")
content_data = json.loads(open("migration/data/content_items.json").read())
storage["shouts"]["data"] = content_data
print("[migration.load] " + str(len(content_data)) + " content items ")
# fill out storage
for x in users_data:
storage['users']['by_oid'][x['_id']] = x
storage["users"]["by_oid"][x["_id"]] = x
# storage['users']['by_slug'][x['slug']] = x
# no user.slug yet
print('[migration.load] ' + str(len(storage['users']
['by_oid'].keys())) + ' users by oid')
print(
"[migration.load] "
+ str(len(storage["users"]["by_oid"].keys()))
+ " users by oid"
)
for x in tags_data:
storage['topics']['by_oid'][x['_id']] = x
storage['topics']['by_slug'][x['slug']] = x
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
for x in cats_data:
storage['topics']['by_oid'][x['_id']] = x
storage['topics']['by_slug'][x['slug']] = x
print('[migration.load] ' + str(len(storage['topics']
['by_slug'].keys())) + ' topics by slug')
storage["topics"]["by_oid"][x["_id"]] = x
storage["topics"]["by_slug"][x["slug"]] = x
print(
"[migration.load] "
+ str(len(storage["topics"]["by_slug"].keys()))
+ " topics by slug"
)
for item in content_data:
slug = get_shout_slug(item)
storage['content_items']['by_slug'][slug] = item
storage['content_items']['by_oid'][item['_id']] = item
print('[migration.load] ' + str(len(content_data)) + ' content items')
storage["content_items"]["by_slug"][slug] = item
storage["content_items"]["by_oid"][item["_id"]] = item
print("[migration.load] " + str(len(content_data)) + " content items")
for x in comments_data:
storage['reactions']['by_oid'][x['_id']] = x
cid = x['contentItem']
storage['reactions']['by_content'][cid] = x
ci = storage['content_items']['by_oid'].get(cid, {})
if 'slug' in ci: storage['reactions']['by_slug'][ci['slug']] = x
print('[migration.load] ' + str(len(storage['reactions']
['by_content'].keys())) + ' with comments')
except Exception as e: raise e
storage['users']['data'] = users_data
storage['topics']['tags'] = tags_data
storage['topics']['cats'] = cats_data
storage['shouts']['data'] = content_data
storage['reactions']['data'] = comments_data
storage["reactions"]["by_oid"][x["_id"]] = x
cid = x["contentItem"]
storage["reactions"]["by_content"][cid] = x
ci = storage["content_items"]["by_oid"].get(cid, {})
if "slug" in ci:
storage["reactions"]["by_slug"][ci["slug"]] = x
print(
"[migration.load] "
+ str(len(storage["reactions"]["by_content"].keys()))
+ " with comments"
)
except Exception as e:
raise e
storage["users"]["data"] = users_data
storage["topics"]["tags"] = tags_data
storage["topics"]["cats"] = cats_data
storage["shouts"]["data"] = content_data
storage["reactions"]["data"] = comments_data
return storage
def mongo_download(url):
if not url: raise Exception('\n\nYou should set MONGODB_URL enviroment variable\n')
print('[migration] mongodump ' + url)
subprocess.call([
'mongodump',
'--uri', url + '/?authSource=admin',
'--forceTableScan',
], stderr = subprocess.STDOUT)
if not url:
raise Exception("\n\nYou should set MONGODB_URL enviroment variable\n")
print("[migration] mongodump " + url)
subprocess.call(
[
"mongodump",
"--uri",
url + "/?authSource=admin",
"--forceTableScan",
],
stderr=subprocess.STDOUT,
)
def create_pgdump():
pgurl = DB_URL
if not pgurl: raise Exception('\n\nYou should set DATABASE_URL enviroment variable\n')
if not pgurl:
raise Exception("\n\nYou should set DATABASE_URL enviroment variable\n")
subprocess.call(
[ 'pg_dump', pgurl, '-f', TODAY + '-pgdump.sql'],
stderr = subprocess.STDOUT
["pg_dump", pgurl, "-f", TODAY + "-pgdump.sql"], stderr=subprocess.STDOUT
)
subprocess.call([
'scp',
TODAY + '-pgdump.sql',
'root@build.discours.io:/root/.'
])
subprocess.call(["scp", TODAY + "-pgdump.sql", "root@build.discours.io:/root/."])
async def handle_auto():
print('[migration] no command given, auto mode')
url = os.getenv('MONGODB_URL')
if url: mongo_download(url)
print("[migration] no command given, auto mode")
url = os.getenv("MONGODB_URL")
if url:
mongo_download(url)
bson_handle()
await all_handle(data_load(), sys.argv)
create_pgdump()
async def main():
if len(sys.argv) > 1:
cmd = sys.argv[1]
if type(cmd) == str: print('[migration] command: ' + cmd)
if type(cmd) == str:
print("[migration] command: " + cmd)
await handle_auto()
else:
print('[migration] usage: python server.py migrate')
print("[migration] usage: python server.py migrate")
def migrate():
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
if __name__ == '__main__':
if __name__ == "__main__":
migrate()

View File

@ -4,19 +4,20 @@ import json
from .utils import DateTimeEncoder
def json_tables():
print('[migration] unpack dump/discours/*.bson to migration/data/*.json')
print("[migration] unpack dump/discours/*.bson to migration/data/*.json")
data = {
"content_items": [],
"content_item_categories": [],
"tags": [],
"email_subscriptions": [],
"users": [],
"comments": []
"comments": [],
}
for table in data.keys():
lc = []
with open('dump/discours/'+table+'.bson', 'rb') as f:
with open("dump/discours/" + table + ".bson", "rb") as f:
bs = f.read()
f.close()
base = 0
@ -24,5 +25,6 @@ def json_tables():
base, d = bson.decode_document(bs, base)
lc.append(d)
data[table] = lc
open(os.getcwd() + '/migration/data/'+table+'.json', 'w').write(json.dumps(lc,cls=DateTimeEncoder))
open(os.getcwd() + "/migration/data/" + table + ".json", "w").write(
json.dumps(lc, cls=DateTimeEncoder)
)

View File

@ -1,4 +1,3 @@
from datetime import datetime
import json
import os
@ -6,100 +5,150 @@ import frontmatter
from .extract import extract_html, prepare_html_body
from .utils import DateTimeEncoder
OLD_DATE = '2016-03-05 22:22:00.350000'
EXPORT_DEST = '../discoursio-web/data/'
parentDir = '/'.join(os.getcwd().split('/')[:-1])
contentDir = parentDir + '/discoursio-web/content/'
OLD_DATE = "2016-03-05 22:22:00.350000"
EXPORT_DEST = "../discoursio-web/data/"
parentDir = "/".join(os.getcwd().split("/")[:-1])
contentDir = parentDir + "/discoursio-web/content/"
ts = datetime.now()
def get_metadata(r):
authors = []
for a in r['authors']:
authors.append({ # a short version for public listings
'slug': a.slug or 'discours',
'name': a.name or 'Дискурс',
'userpic': a.userpic or 'https://discours.io/static/img/discours.png'
})
for a in r["authors"]:
authors.append(
{ # a short version for public listings
"slug": a.slug or "discours",
"name": a.name or "Дискурс",
"userpic": a.userpic or "https://discours.io/static/img/discours.png",
}
)
metadata = {}
metadata['title'] = r.get('title', '').replace('{', '(').replace('}', ')')
metadata['authors'] = authors
metadata['createdAt'] = r.get('createdAt', ts)
metadata['layout'] = r['layout']
metadata['topics'] = [topic for topic in r['topics']]
metadata['topics'].sort()
if r.get('cover', False): metadata['cover'] = r.get('cover')
metadata["title"] = r.get("title", "").replace("{", "(").replace("}", ")")
metadata["authors"] = authors
metadata["createdAt"] = r.get("createdAt", ts)
metadata["layout"] = r["layout"]
metadata["topics"] = [topic for topic in r["topics"]]
metadata["topics"].sort()
if r.get("cover", False):
metadata["cover"] = r.get("cover")
return metadata
def export_mdx(r):
# print('[export] mdx %s' % r['slug'])
content = ''
content = ""
metadata = get_metadata(r)
content = frontmatter.dumps(frontmatter.Post(r['body'], **metadata))
ext = 'mdx'
filepath = contentDir + r['slug']
bc = bytes(content,'utf-8').decode('utf-8','ignore')
open(filepath + '.' + ext, 'w').write(bc)
content = frontmatter.dumps(frontmatter.Post(r["body"], **metadata))
ext = "mdx"
filepath = contentDir + r["slug"]
bc = bytes(content, "utf-8").decode("utf-8", "ignore")
open(filepath + "." + ext, "w").write(bc)
def export_body(shout, storage):
entry = storage['content_items']['by_oid'][shout['oid']]
entry = storage["content_items"]["by_oid"][shout["oid"]]
if entry:
shout['body'] = prepare_html_body(entry) # prepare_md_body(entry)
shout["body"] = prepare_html_body(entry) # prepare_md_body(entry)
export_mdx(shout)
print('[export] html for %s' % shout['slug'])
print("[export] html for %s" % shout["slug"])
body = extract_html(entry)
open(contentDir + shout['slug'] + '.html', 'w').write(body)
open(contentDir + shout["slug"] + ".html", "w").write(body)
else:
raise Exception('no content_items entry found')
raise Exception("no content_items entry found")
def export_slug(slug, storage):
shout = storage['shouts']['by_slug'][slug]
shout = storage['shouts']['by_slug'].get(slug)
assert shout, '[export] no shout found by slug: %s ' % slug
author = shout['authors'][0]
assert author, '[export] no author error'
shout = storage["shouts"]["by_slug"][slug]
shout = storage["shouts"]["by_slug"].get(slug)
assert shout, "[export] no shout found by slug: %s " % slug
author = shout["authors"][0]
assert author, "[export] no author error"
export_body(shout, storage)
def export_email_subscriptions():
email_subscriptions_data = json.loads(open('migration/data/email_subscriptions.json').read())
email_subscriptions_data = json.loads(
open("migration/data/email_subscriptions.json").read()
)
for data in email_subscriptions_data:
# migrate_email_subscription(data)
pass
print('[migration] ' + str(len(email_subscriptions_data)) + ' email subscriptions exported')
print(
"[migration] "
+ str(len(email_subscriptions_data))
+ " email subscriptions exported"
)
def export_shouts(storage):
# update what was just migrated or load json again
if len(storage['users']['by_slugs'].keys()) == 0:
storage['users']['by_slugs'] = json.loads(open(EXPORT_DEST + 'authors.json').read())
print('[migration] ' + str(len(storage['users']['by_slugs'].keys())) + ' exported authors ')
if len(storage['shouts']['by_slugs'].keys()) == 0:
storage['shouts']['by_slugs'] = json.loads(open(EXPORT_DEST + 'articles.json').read())
print('[migration] ' + str(len(storage['shouts']['by_slugs'].keys())) + ' exported articles ')
for slug in storage['shouts']['by_slugs'].keys(): export_slug(slug, storage)
if len(storage["users"]["by_slugs"].keys()) == 0:
storage["users"]["by_slugs"] = json.loads(
open(EXPORT_DEST + "authors.json").read()
)
print(
"[migration] "
+ str(len(storage["users"]["by_slugs"].keys()))
+ " exported authors "
)
if len(storage["shouts"]["by_slugs"].keys()) == 0:
storage["shouts"]["by_slugs"] = json.loads(
open(EXPORT_DEST + "articles.json").read()
)
print(
"[migration] "
+ str(len(storage["shouts"]["by_slugs"].keys()))
+ " exported articles "
)
for slug in storage["shouts"]["by_slugs"].keys():
export_slug(slug, storage)
def export_json(export_articles = {}, export_authors = {}, export_topics = {}, export_comments = {}):
open(EXPORT_DEST + 'authors.json', 'w').write(json.dumps(export_authors,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False))
print('[migration] ' + str(len(export_authors.items())) + ' authors exported')
open(EXPORT_DEST + 'topics.json', 'w').write(json.dumps(export_topics,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False))
print('[migration] ' + str(len(export_topics.keys())) + ' topics exported')
open(EXPORT_DEST + 'articles.json', 'w').write(json.dumps(export_articles,
def export_json(
export_articles={}, export_authors={}, export_topics={}, export_comments={}
):
open(EXPORT_DEST + "authors.json", "w").write(
json.dumps(
export_authors,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False))
print('[migration] ' + str(len(export_articles.items())) + ' articles exported')
open(EXPORT_DEST + 'comments.json', 'w').write(json.dumps(export_comments,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_authors.items())) + " authors exported")
open(EXPORT_DEST + "topics.json", "w").write(
json.dumps(
export_topics,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False))
print('[migration] ' + str(len(export_comments.items())) + ' exported articles with comments')
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_topics.keys())) + " topics exported")
open(EXPORT_DEST + "articles.json", "w").write(
json.dumps(
export_articles,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print("[migration] " + str(len(export_articles.items())) + " articles exported")
open(EXPORT_DEST + "comments.json", "w").write(
json.dumps(
export_comments,
cls=DateTimeEncoder,
indent=4,
sort_keys=True,
ensure_ascii=False,
)
)
print(
"[migration] "
+ str(len(export_comments.items()))
+ " exported articles with comments"
)

View File

@ -3,53 +3,67 @@ import re
import base64
from .html2text import html2text
TOOLTIP_REGEX = r'(\/\/\/(.+)\/\/\/)'
contentDir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'discoursio-web', 'content')
s3 = 'https://discours-io.s3.amazonaws.com/'
cdn = 'https://assets.discours.io'
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
contentDir = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "..", "..", "discoursio-web", "content"
)
s3 = "https://discours-io.s3.amazonaws.com/"
cdn = "https://assets.discours.io"
def replace_tooltips(body):
# change if you prefer regexp
newbody = body
matches = list(re.finditer(TOOLTIP_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
for match in matches:
newbody = body.replace(match.group(1), '<Tooltip text="' + match.group(2) + '" />') # NOTE: doesn't work
newbody = body.replace(
match.group(1), '<Tooltip text="' + match.group(2) + '" />'
) # NOTE: doesn't work
if len(matches) > 0:
print('[extract] found %d tooltips' % len(matches))
print("[extract] found %d tooltips" % len(matches))
return newbody
def place_tooltips(body):
parts = body.split('&&&')
parts = body.split("&&&")
l = len(parts)
newparts = list(parts)
placed = False
if l & 1:
if l > 1:
i = 1
print('[extract] found %d tooltips' % (l-1))
print("[extract] found %d tooltips" % (l - 1))
for part in parts[1:]:
if i & 1:
placed = True
if 'a class="footnote-url" href=' in part:
print('[extract] footnote: ' + part)
print("[extract] footnote: " + part)
fn = 'a class="footnote-url" href="'
link = part.split(fn, 1)[1].split('"', 1)[0]
extracted_part = part.split(fn,1)[0] + ' ' + part.split('/', 1)[-1]
newparts[i] = '<Tooltip' + (' link="' + link + '" ' if link else '') + '>' + extracted_part + '</Tooltip>'
extracted_part = (
part.split(fn, 1)[0] + " " + part.split("/", 1)[-1]
)
newparts[i] = (
"<Tooltip"
+ (' link="' + link + '" ' if link else "")
+ ">"
+ extracted_part
+ "</Tooltip>"
)
else:
newparts[i] = '<Tooltip>%s</Tooltip>' % part
newparts[i] = "<Tooltip>%s</Tooltip>" % part
# print('[extract] ' + newparts[i])
else:
# print('[extract] ' + part[:10] + '..')
newparts[i] = part
i += 1
return (''.join(newparts), placed)
return ("".join(newparts), placed)
IMG_REGEX = r"\!\[(.*?)\]\((data\:image\/(png|jpeg|jpg);base64\,((?:[A-Za-z\d+\/]{4})*(?:[A-Za-z\d+\/]{3}=|[A-Za-z\d+\/]{2}==)))\)"
parentDir = '/'.join(os.getcwd().split('/')[:-1])
public = parentDir + '/discoursio-web/public'
parentDir = "/".join(os.getcwd().split("/")[:-1])
public = parentDir + "/discoursio-web/public"
cache = {}
@ -58,30 +72,34 @@ def reextract_images(body, oid):
matches = list(re.finditer(IMG_REGEX, body, re.IGNORECASE | re.MULTILINE))[1:]
i = 0
for match in matches:
print('[extract] image ' + match.group(1))
print("[extract] image " + match.group(1))
ext = match.group(3)
name = oid + str(i)
link = public + '/upload/image-' + name + '.' + ext
link = public + "/upload/image-" + name + "." + ext
img = match.group(4)
title = match.group(1) # NOTE: this is not the title
if img not in cache:
content = base64.b64decode(img + '==')
print(str(len(img)) + ' image bytes been written')
open('../' + link, 'wb').write(content)
content = base64.b64decode(img + "==")
print(str(len(img)) + " image bytes been written")
open("../" + link, "wb").write(content)
cache[img] = name
i += 1
else:
print('[extract] image cached ' + cache[img])
body.replace(str(match), '![' + title + '](' + cdn + link + ')') # WARNING: this does not work
print("[extract] image cached " + cache[img])
body.replace(
str(match), "![" + title + "](" + cdn + link + ")"
) # WARNING: this does not work
return body
IMAGES = {
'data:image/png': 'png',
'data:image/jpg': 'jpg',
'data:image/jpeg': 'jpg',
"data:image/png": "png",
"data:image/jpg": "jpg",
"data:image/jpeg": "jpg",
}
b64 = ';base64,'
b64 = ";base64,"
def extract_imageparts(bodyparts, prefix):
# recursive loop
@ -90,219 +108,272 @@ def extract_imageparts(bodyparts, prefix):
i = bodyparts.index(current)
for mime in IMAGES.keys():
if mime == current[-len(mime) :] and (i + 1 < len(bodyparts)):
print('[extract] ' + mime)
print("[extract] " + mime)
next = bodyparts[i + 1]
ext = IMAGES[mime]
b64end = next.index(')')
b64end = next.index(")")
b64encoded = next[:b64end]
name = prefix + '-' + str(len(cache))
link = '/upload/image-' + name + '.' + ext
print('[extract] name: ' + name)
print('[extract] link: ' + link)
print('[extract] %d bytes' % len(b64encoded))
name = prefix + "-" + str(len(cache))
link = "/upload/image-" + name + "." + ext
print("[extract] name: " + name)
print("[extract] link: " + link)
print("[extract] %d bytes" % len(b64encoded))
if b64encoded not in cache:
try:
content = base64.b64decode(b64encoded + '==')
open(public + link, 'wb').write(content)
print('[extract] ' +str(len(content)) + ' image bytes been written')
content = base64.b64decode(b64encoded + "==")
open(public + link, "wb").write(content)
print(
"[extract] "
+ str(len(content))
+ " image bytes been written"
)
cache[b64encoded] = name
except:
raise Exception
# raise Exception('[extract] error decoding image %r' %b64encoded)
else:
print('[extract] cached link ' + cache[b64encoded])
print("[extract] cached link " + cache[b64encoded])
name = cache[b64encoded]
link = cdn + '/upload/image-' + name + '.' + ext
newparts[i] = current[:-len(mime)] + current[-len(mime):] + link + next[-b64end:]
link = cdn + "/upload/image-" + name + "." + ext
newparts[i] = (
current[: -len(mime)]
+ current[-len(mime) :]
+ link
+ next[-b64end:]
)
newparts[i + 1] = next[:-b64end]
break
return extract_imageparts(newparts[i] + newparts[i+1] + b64.join(bodyparts[i+2:]), prefix) \
if len(bodyparts) > (i + 1) else ''.join(newparts)
return (
extract_imageparts(
newparts[i] + newparts[i + 1] + b64.join(bodyparts[i + 2 :]), prefix
)
if len(bodyparts) > (i + 1)
else "".join(newparts)
)
def extract_dataimages(parts, prefix):
newparts = list(parts)
for part in parts:
i = parts.index(part)
if part.endswith(']('):
if part.endswith("]("):
[ext, rest] = parts[i + 1].split(b64)
name = prefix + '-' + str(len(cache))
if ext == '/jpeg': ext = 'jpg'
else: ext = ext.replace('/', '')
link = '/upload/image-' + name + '.' + ext
print('[extract] filename: ' + link)
b64end = rest.find(')')
name = prefix + "-" + str(len(cache))
if ext == "/jpeg":
ext = "jpg"
else:
ext = ext.replace("/", "")
link = "/upload/image-" + name + "." + ext
print("[extract] filename: " + link)
b64end = rest.find(")")
if b64end != -1:
b64encoded = rest[:b64end]
print('[extract] %d text bytes' % len(b64encoded))
print("[extract] %d text bytes" % len(b64encoded))
# write if not cached
if b64encoded not in cache:
try:
content = base64.b64decode(b64encoded + '==')
open(public + link, 'wb').write(content)
print('[extract] ' +str(len(content)) + ' image bytes')
content = base64.b64decode(b64encoded + "==")
open(public + link, "wb").write(content)
print("[extract] " + str(len(content)) + " image bytes")
cache[b64encoded] = name
except:
raise Exception
# raise Exception('[extract] error decoding image %r' %b64encoded)
else:
print('[extract] 0 image bytes, cached for ' + cache[b64encoded])
print("[extract] 0 image bytes, cached for " + cache[b64encoded])
name = cache[b64encoded]
# update link with CDN
link = cdn + '/upload/image-' + name + '.' + ext
link = cdn + "/upload/image-" + name + "." + ext
# patch newparts
newparts[i + 1] = link + rest[b64end:]
else:
raise Exception('cannot find the end of base64 encoded string')
raise Exception("cannot find the end of base64 encoded string")
else:
print('[extract] dataimage skipping part ' + str(i))
print("[extract] dataimage skipping part " + str(i))
continue
return ''.join(newparts)
return "".join(newparts)
di = "data:image"
di = 'data:image'
def extract_md_images(body, oid):
newbody = ''
body = body\
.replace('\n! []('+di, '\n ![]('+di)\
.replace('\n[]('+di, '\n![]('+di)\
.replace(' []('+di, ' ![]('+di)
newbody = ""
body = (
body.replace("\n! [](" + di, "\n ![](" + di)
.replace("\n[](" + di, "\n![](" + di)
.replace(" [](" + di, " ![](" + di)
)
parts = body.split(di)
i = 0
if len(parts) > 1: newbody = extract_dataimages(parts, oid)
else: newbody = body
if len(parts) > 1:
newbody = extract_dataimages(parts, oid)
else:
newbody = body
return newbody
def cleanup(body):
newbody = body\
.replace('<', '').replace('>', '')\
.replace('{', '(').replace('}', ')')\
.replace('', '...')\
.replace(' __ ', ' ')\
.replace('_ _', ' ')\
.replace('****', '')\
.replace('\u00a0', ' ')\
.replace('\u02c6', '^')\
.replace('\u00a0',' ')\
.replace('\ufeff', '')\
.replace('\u200b', '')\
.replace('\u200c', '')\
# .replace('\u2212', '-')
newbody = (
body.replace("<", "")
.replace(">", "")
.replace("{", "(")
.replace("}", ")")
.replace("", "...")
.replace(" __ ", " ")
.replace("_ _", " ")
.replace("****", "")
.replace("\u00a0", " ")
.replace("\u02c6", "^")
.replace("\u00a0", " ")
.replace("\ufeff", "")
.replace("\u200b", "")
.replace("\u200c", "")
) # .replace('\u2212', '-')
return newbody
def extract_md(body, oid):
newbody = body
if newbody:
newbody = extract_md_images(newbody, oid)
if not newbody: raise Exception('extract_images error')
if not newbody:
raise Exception("extract_images error")
newbody = cleanup(newbody)
if not newbody: raise Exception('cleanup error')
if not newbody:
raise Exception("cleanup error")
newbody, placed = place_tooltips(newbody)
if not newbody: raise Exception('place_tooltips error')
if not newbody:
raise Exception("place_tooltips error")
if placed:
newbody = 'import Tooltip from \'$/components/Article/Tooltip\'\n\n' + newbody
newbody = "import Tooltip from '$/components/Article/Tooltip'\n\n" + newbody
return newbody
def prepare_md_body(entry):
# body modifications
body = ''
kind = entry.get('type')
addon = ''
if kind == 'Video':
addon = ''
for m in entry.get('media', []):
if 'youtubeId' in m: addon += '<VideoPlayer youtubeId=\'' + m['youtubeId'] + '\' />\n'
elif 'vimeoId' in m: addon += '<VideoPlayer vimeoId=\'' + m['vimeoId'] + '\' />\n'
body = ""
kind = entry.get("type")
addon = ""
if kind == "Video":
addon = ""
for m in entry.get("media", []):
if "youtubeId" in m:
addon += "<VideoPlayer youtubeId='" + m["youtubeId"] + "' />\n"
elif "vimeoId" in m:
addon += "<VideoPlayer vimeoId='" + m["vimeoId"] + "' />\n"
else:
print('[extract] media is not supported')
print("[extract] media is not supported")
print(m)
body = 'import VideoPlayer from \'$/components/Article/VideoPlayer\'\n\n' + addon
body = "import VideoPlayer from '$/components/Article/VideoPlayer'\n\n" + addon
elif kind == 'Music':
addon = ''
for m in entry.get('media', []):
artist = m.get('performer')
trackname = ''
if artist: trackname += artist + ' - '
if 'title' in m: trackname += m.get('title','')
addon += '<MusicPlayer src=\"' + m.get('fileUrl','') + '\" title=\"' + trackname + '\" />\n'
body = 'import MusicPlayer from \'$/components/Article/MusicPlayer\'\n\n' + addon
elif kind == "Music":
addon = ""
for m in entry.get("media", []):
artist = m.get("performer")
trackname = ""
if artist:
trackname += artist + " - "
if "title" in m:
trackname += m.get("title", "")
addon += (
'<MusicPlayer src="'
+ m.get("fileUrl", "")
+ '" title="'
+ trackname
+ '" />\n'
)
body = "import MusicPlayer from '$/components/Article/MusicPlayer'\n\n" + addon
body_orig = extract_html(entry)
if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
if not body: print('[extract] empty MDX body')
if body_orig:
body += extract_md(html2text(body_orig), entry["_id"])
if not body:
print("[extract] empty MDX body")
return body
def prepare_html_body(entry):
# body modifications
body = ''
kind = entry.get('type')
addon = ''
if kind == 'Video':
addon = ''
for m in entry.get('media', []):
if 'youtubeId' in m:
body = ""
kind = entry.get("type")
addon = ""
if kind == "Video":
addon = ""
for m in entry.get("media", []):
if "youtubeId" in m:
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
addon += m['youtubeId']
addon += m["youtubeId"]
addon += '?autoplay=1" frameborder="0" allowfullscreen></iframe>\n'
elif 'vimeoId' in m:
elif "vimeoId" in m:
addon += '<iframe src="https://player.vimeo.com/video/'
addon += m['vimeoId']
addon += m["vimeoId"]
addon += ' width="420" height="345" frameborder="0" allow="autoplay; fullscreen" allowfullscreen></iframe>'
else:
print('[extract] media is not supported')
print("[extract] media is not supported")
print(m)
body += addon
elif kind == 'Music':
addon = ''
for m in entry.get('media', []):
artist = m.get('performer')
trackname = ''
if artist: trackname += artist + ' - '
if 'title' in m: trackname += m.get('title','')
addon += '<figure><figcaption>'
elif kind == "Music":
addon = ""
for m in entry.get("media", []):
artist = m.get("performer")
trackname = ""
if artist:
trackname += artist + " - "
if "title" in m:
trackname += m.get("title", "")
addon += "<figure><figcaption>"
addon += trackname
addon += '</figcaption><audio controls src="'
addon += m.get('fileUrl','')
addon += m.get("fileUrl", "")
addon += '"></audio></figure>'
body += addon
body = extract_html(entry)
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
if not body: print('[extract] empty HTML body')
if not body:
print("[extract] empty HTML body")
return body
def extract_html(entry):
body_orig = entry.get('body') or ''
media = entry.get('media', [])
kind = entry.get('type') or ''
print('[extract] kind: ' + kind)
body_orig = entry.get("body") or ""
media = entry.get("media", [])
kind = entry.get("type") or ""
print("[extract] kind: " + kind)
mbodies = set([])
if media:
# print('[extract] media is found')
for m in media:
mbody = m.get('body', '')
addon = ''
if kind == 'Literature':
mbody = m.get('literatureBody') or m.get('body', '')
elif kind == 'Image':
cover = ''
if 'thumborId' in entry: cover = cdn + '/unsafe/1600x/' + entry['thumborId']
mbody = m.get("body", "")
addon = ""
if kind == "Literature":
mbody = m.get("literatureBody") or m.get("body", "")
elif kind == "Image":
cover = ""
if "thumborId" in entry:
cover = cdn + "/unsafe/1600x/" + entry["thumborId"]
if not cover:
if 'image' in entry: cover = entry['image'].get('url', '')
if 'cloudinary' in cover: cover = ''
if "image" in entry:
cover = entry["image"].get("url", "")
if "cloudinary" in cover:
cover = ""
# else: print('[extract] cover: ' + cover)
title = m.get('title','').replace('\n', ' ').replace('&nbsp;', ' ')
u = m.get('thumborId') or cover or ''
if title: addon += '<h4>' + title + '</h4>\n'
if not u.startswith('http'): u = s3 + u
if not u: print('[extract] no image url for ' + str(m))
if 'cloudinary' in u: u = 'img/lost.svg'
title = m.get("title", "").replace("\n", " ").replace("&nbsp;", " ")
u = m.get("thumborId") or cover or ""
if title:
addon += "<h4>" + title + "</h4>\n"
if not u.startswith("http"):
u = s3 + u
if not u:
print("[extract] no image url for " + str(m))
if "cloudinary" in u:
u = "img/lost.svg"
if u != cover or (u == cover and media.index(m) == 0):
addon += '<img src=\"' + u + '\" alt=\"'+ title +'\" />\n'
addon += '<img src="' + u + '" alt="' + title + '" />\n'
if addon:
body_orig += addon
# print('[extract] item addon: ' + addon)
@ -311,14 +382,18 @@ def extract_html(entry):
mbodies.add(mbody)
body_orig += mbody
if len(list(mbodies)) != len(media):
print('[extract] %d/%d media item bodies appended' % (len(list(mbodies)),len(media)))
print(
"[extract] %d/%d media item bodies appended"
% (len(list(mbodies)), len(media))
)
# print('[extract] media items body: \n' + body_orig)
if not body_orig:
for up in entry.get('bodyHistory', []) or []:
body_orig = up.get('text', '') or ''
for up in entry.get("bodyHistory", []) or []:
body_orig = up.get("text", "") or ""
if body_orig:
print('[extract] got html body from history')
print("[extract] got html body from history")
break
if not body_orig: print('[extract] empty HTML body')
if not body_orig:
print("[extract] empty HTML body")
# body_html = str(BeautifulSoup(body_orig, features="html.parser"))
return body_orig

View File

@ -351,46 +351,50 @@ class HTML2Text(html.parser.HTMLParser):
self.space = False
self.o(hn(tag) * "#" + " ")
self.o("[")
self.header_id = attrs.get('id')
self.header_id = attrs.get("id")
else:
self.p()
if start:
self.inheader = True
self.o(hn(tag) * "#" + " ")
if self.header_id:
self.o(' {#' + self.header_id + '}')
self.o(" {#" + self.header_id + "}")
self.header_id = None
else:
self.inheader = False
return # prevent redundant emphasis marks on headers
if 'class' in attrs:
self.current_class = attrs.get('class', '')
if "class" in attrs:
self.current_class = attrs.get("class", "")
# self.p()
if not start:
self.current_class = ''
self.current_class = ""
if tag == 'span':
if 'style' in attrs:
if attrs.get('style') == 'text-align: center':
self.current_class = 'center'
if tag == "span":
if "style" in attrs:
if attrs.get("style") == "text-align: center":
self.current_class = "center"
if not start:
self.current_class = ''
self.current_class = ""
if start:
if self.current_class == 'highlight' and \
self.inheader == False and \
self.span_lead == False and \
self.astack == False:
self.o('`') # NOTE: same as <code>
if (
self.current_class == "highlight"
and self.inheader == False
and self.span_lead == False
and self.astack == False
):
self.o("`") # NOTE: same as <code>
self.span_highlight = True
elif self.current_class == 'lead' and \
self.inheader == False and \
self.span_highlight == False:
elif (
self.current_class == "lead"
and self.inheader == False
and self.span_highlight == False
):
# self.o("==") # NOTE: CriticMarkup {==
self.span_lead = True
else:
if self.span_highlight:
self.o('`')
self.o("`")
self.span_highlight = False
elif self.span_lead:
# self.o('==')
@ -469,8 +473,12 @@ class HTML2Text(html.parser.HTMLParser):
# without it, Markdown won't render the resulting *** correctly.
# (Don't add a space otherwise, though, since there isn't one in the
# original HTML.)
if not self.inheader and not self.astack \
and not self.span_lead and not self.span_highlight:
if (
not self.inheader
and not self.astack
and not self.span_lead
and not self.span_highlight
):
if (
start
and self.preceding_data
@ -532,16 +540,21 @@ class HTML2Text(html.parser.HTMLParser):
if tag == "a" and not self.ignore_links:
if start:
if 'data-original-title' in attrs:
if "data-original-title" in attrs:
# WARNING: old discours specific code
self.o('&&&%s&&&' % attrs['data-original-title'])
self.o("&&&%s&&&" % attrs["data-original-title"])
else:
if (
"href" in attrs
and not attrs["href"].startswith('#_ftn')
and not attrs["href"].startswith("#_ftn")
and attrs["href"] is not None
and not (self.skip_internal_links and attrs["href"].startswith("#"))
and not (self.ignore_mailto_links and attrs["href"].startswith("mailto:"))
and not (
self.skip_internal_links and attrs["href"].startswith("#")
)
and not (
self.ignore_mailto_links
and attrs["href"].startswith("mailto:")
)
):
self.astack.append(attrs)
self.maybe_automatic_link = attrs["href"]
@ -578,7 +591,7 @@ class HTML2Text(html.parser.HTMLParser):
if tag == "img" and start and not self.ignore_images:
# skip cloudinary images
if "src" in attrs and 'cloudinary' not in attrs['src']:
if "src" in attrs and "cloudinary" not in attrs["src"]:
assert attrs["src"] is not None
if not self.images_to_alt:
attrs["href"] = attrs["src"]
@ -1030,8 +1043,10 @@ class HTML2Text(html.parser.HTMLParser):
return result
def html2text(html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH) -> str:
h = html.strip() or ''
def html2text(
html: str, baseurl: str = "", bodywidth: Optional[int] = config.BODY_WIDTH
) -> str:
h = html.strip() or ""
if h:
h = HTML2Text(baseurl=baseurl, bodywidth=bodywidth)
h = h.handle(html.strip())

View File

@ -1 +1 @@
__all__ = ["users", "tags", "content_items", "comments"],
__all__ = (["users", "tags", "content_items", "comments"],)

View File

@ -8,8 +8,9 @@ from services.stat.reacted import ReactedStorage
ts = datetime.now()
async def migrate(entry, storage):
'''
"""
{
"_id": "hdtwS8fSyFLxXCgSC",
"body": "<p>",
@ -44,68 +45,91 @@ async def migrate(entry, storage):
old_id: String
old_thread: String
}
'''
"""
reaction_dict = {}
reaction_dict['createdAt'] = ts if not entry.get('createdAt') else date_parse(entry.get('createdAt'))
print('[migration] reaction original date %r' % entry.get('createdAt'))
reaction_dict["createdAt"] = (
ts if not entry.get("createdAt") else date_parse(entry.get("createdAt"))
)
print("[migration] reaction original date %r" % entry.get("createdAt"))
# print('[migration] comment date %r ' % comment_dict['createdAt'])
reaction_dict['body'] = html2text(entry.get('body', ''))
reaction_dict['oid'] = entry['_id']
if entry.get('createdAt'): reaction_dict['createdAt'] = date_parse(entry.get('createdAt'))
shout_oid = entry.get('contentItem')
if not shout_oid in storage['shouts']['by_oid']:
if len(storage['shouts']['by_oid']) > 0:
reaction_dict["body"] = html2text(entry.get("body", ""))
reaction_dict["oid"] = entry["_id"]
if entry.get("createdAt"):
reaction_dict["createdAt"] = date_parse(entry.get("createdAt"))
shout_oid = entry.get("contentItem")
if not shout_oid in storage["shouts"]["by_oid"]:
if len(storage["shouts"]["by_oid"]) > 0:
return shout_oid
else:
print('[migration] no shouts migrated yet')
print("[migration] no shouts migrated yet")
raise Exception
return
else:
with local_session() as session:
author = session.query(User).filter(User.oid == entry['createdBy']).first()
shout_dict = storage['shouts']['by_oid'][shout_oid]
author = session.query(User).filter(User.oid == entry["createdBy"]).first()
shout_dict = storage["shouts"]["by_oid"][shout_oid]
if shout_dict:
reaction_dict['shout'] = shout_dict['slug']
reaction_dict['createdBy'] = author.slug if author else 'discours'
reaction_dict['kind'] = ReactionKind.COMMENT
reaction_dict["shout"] = shout_dict["slug"]
reaction_dict["createdBy"] = author.slug if author else "discours"
reaction_dict["kind"] = ReactionKind.COMMENT
# creating reaction from old comment
day = (reaction_dict.get('createdAt') or ts).replace(hour=0, minute=0, second=0, microsecond=0)
day = (reaction_dict.get("createdAt") or ts).replace(
hour=0, minute=0, second=0, microsecond=0
)
reaction = Reaction.create(**reaction_dict)
await ReactedStorage.increment(reaction)
reaction_dict['id'] = reaction.id
for comment_rating_old in entry.get('ratings',[]):
rater = session.query(User).filter(User.oid == comment_rating_old['createdBy']).first()
reactedBy = rater if rater else session.query(User).filter(User.slug == 'noname').first()
reaction_dict["id"] = reaction.id
for comment_rating_old in entry.get("ratings", []):
rater = (
session.query(User)
.filter(User.oid == comment_rating_old["createdBy"])
.first()
)
reactedBy = (
rater
if rater
else session.query(User).filter(User.slug == "noname").first()
)
re_reaction_dict = {
'shout': reaction_dict['shout'],
'replyTo': reaction.id,
'kind': ReactionKind.LIKE if comment_rating_old['value'] > 0 else ReactionKind.DISLIKE,
'createdBy': reactedBy.slug if reactedBy else 'discours'
"shout": reaction_dict["shout"],
"replyTo": reaction.id,
"kind": ReactionKind.LIKE
if comment_rating_old["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": reactedBy.slug if reactedBy else "discours",
}
cts = comment_rating_old.get('createdAt')
if cts: re_reaction_dict['createdAt'] = date_parse(cts)
cts = comment_rating_old.get("createdAt")
if cts:
re_reaction_dict["createdAt"] = date_parse(cts)
try:
# creating reaction from old rating
rr = Reaction.create(**re_reaction_dict)
await ReactedStorage.increment(rr)
except Exception as e:
print('[migration] comment rating error: %r' % re_reaction_dict)
print("[migration] comment rating error: %r" % re_reaction_dict)
raise e
else:
print('[migration] error: cannot find shout for comment %r' % reaction_dict)
print(
"[migration] error: cannot find shout for comment %r"
% reaction_dict
)
return reaction
def migrate_2stage(rr, old_new_id):
reply_oid = rr.get('replyTo')
if not reply_oid: return
new_id = old_new_id.get(rr.get('oid'))
if not new_id: return
reply_oid = rr.get("replyTo")
if not reply_oid:
return
new_id = old_new_id.get(rr.get("oid"))
if not new_id:
return
with local_session() as session:
comment = session.query(Reaction).filter(Reaction.id == new_id).first()
comment.replyTo = old_new_id.get(reply_oid)
comment.save()
session.commit()
if not rr['body']: raise Exception(rr)
if not rr["body"]:
raise Exception(rr)

View File

@ -10,156 +10,182 @@ from migration.extract import prepare_html_body
from orm.community import Community
from orm.reaction import Reaction, ReactionKind
OLD_DATE = '2016-03-05 22:22:00.350000'
OLD_DATE = "2016-03-05 22:22:00.350000"
ts = datetime.now()
type2layout = {
'Article': 'article',
'Literature': 'prose',
'Music': 'music',
'Video': 'video',
'Image': 'image'
"Article": "article",
"Literature": "prose",
"Music": "music",
"Video": "video",
"Image": "image",
}
def get_shout_slug(entry):
slug = entry.get('slug', '')
slug = entry.get("slug", "")
if not slug:
for friend in entry.get('friendlySlugs', []):
slug = friend.get('slug', '')
if slug: break
for friend in entry.get("friendlySlugs", []):
slug = friend.get("slug", "")
if slug:
break
return slug
async def migrate(entry, storage):
# init, set title and layout
r = {
'layout': type2layout[entry['type']],
'title': entry['title'],
'community': Community.default_community.id,
'authors': [],
'topics': set([]),
"layout": type2layout[entry["type"]],
"title": entry["title"],
"community": Community.default_community.id,
"authors": [],
"topics": set([]),
# 'rating': 0,
# 'ratings': [],
'createdAt': []
"createdAt": [],
}
topics_by_oid = storage['topics']['by_oid']
users_by_oid = storage['users']['by_oid']
topics_by_oid = storage["topics"]["by_oid"]
users_by_oid = storage["users"]["by_oid"]
# author
oid = entry.get('createdBy', entry.get('_id', entry.get('oid')))
oid = entry.get("createdBy", entry.get("_id", entry.get("oid")))
userdata = users_by_oid.get(oid)
if not userdata:
app = entry.get('application')
app = entry.get("application")
if app:
userslug = translit(app['name'], 'ru', reversed=True)\
.replace(' ', '-')\
.replace('\'', '')\
.replace('.', '-').lower()
userslug = (
translit(app["name"], "ru", reversed=True)
.replace(" ", "-")
.replace("'", "")
.replace(".", "-")
.lower()
)
userdata = {
'username': app['email'],
'email': app['email'],
'name': app['name'],
'bio': app.get('bio', ''),
'emailConfirmed': False,
'slug': userslug,
'createdAt': ts,
'wasOnlineAt': ts
"username": app["email"],
"email": app["email"],
"name": app["name"],
"bio": app.get("bio", ""),
"emailConfirmed": False,
"slug": userslug,
"createdAt": ts,
"wasOnlineAt": ts,
}
else:
userdata = User.default_user.dict()
assert userdata, 'no user found for %s from ' % [oid, len(users_by_oid.keys())]
r['authors'] = [userdata, ]
assert userdata, "no user found for %s from " % [oid, len(users_by_oid.keys())]
r["authors"] = [
userdata,
]
# slug
slug = get_shout_slug(entry)
if slug: r['slug'] = slug
else: raise Exception
if slug:
r["slug"] = slug
else:
raise Exception
# cover
c = ''
if entry.get('thumborId'):
c = 'https://assets.discours.io/unsafe/1600x/' + entry['thumborId']
c = ""
if entry.get("thumborId"):
c = "https://assets.discours.io/unsafe/1600x/" + entry["thumborId"]
else:
c = entry.get('image', {}).get('url')
if not c or 'cloudinary' in c: c = ''
r['cover'] = c
c = entry.get("image", {}).get("url")
if not c or "cloudinary" in c:
c = ""
r["cover"] = c
# timestamps
r['createdAt'] = date_parse(entry.get('createdAt', OLD_DATE))
r['updatedAt'] = date_parse(entry['updatedAt']) if 'updatedAt' in entry else ts
if entry.get('published'):
r['publishedAt'] = date_parse(entry.get('publishedAt', OLD_DATE))
if 'deletedAt' in entry: r['deletedAt'] = date_parse(entry['deletedAt'])
r["createdAt"] = date_parse(entry.get("createdAt", OLD_DATE))
r["updatedAt"] = date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts
if entry.get("published"):
r["publishedAt"] = date_parse(entry.get("publishedAt", OLD_DATE))
if "deletedAt" in entry:
r["deletedAt"] = date_parse(entry["deletedAt"])
# topics
category = entry['category']
category = entry["category"]
mainTopic = topics_by_oid.get(category)
if mainTopic:
r['mainTopic'] = storage['replacements'].get(mainTopic["slug"], mainTopic["slug"])
topic_oids = [category, ]
topic_oids.extend(entry.get('tags', []))
r["mainTopic"] = storage["replacements"].get(
mainTopic["slug"], mainTopic["slug"]
)
topic_oids = [
category,
]
topic_oids.extend(entry.get("tags", []))
for oid in topic_oids:
if oid in storage['topics']['by_oid']:
r['topics'].add(storage['topics']['by_oid'][oid]['slug'])
if oid in storage["topics"]["by_oid"]:
r["topics"].add(storage["topics"]["by_oid"][oid]["slug"])
else:
print('[migration] unknown old topic id: ' + oid)
r['topics'] = list(r['topics'])
print("[migration] unknown old topic id: " + oid)
r["topics"] = list(r["topics"])
entry['topics'] = r['topics']
entry['cover'] = r['cover']
entry['authors'] = r['authors']
entry["topics"] = r["topics"]
entry["cover"] = r["cover"]
entry["authors"] = r["authors"]
# body
r['body'] = prepare_html_body(entry)
r["body"] = prepare_html_body(entry)
# save shout to db
s = object()
shout_dict = r.copy()
user = None
del shout_dict['topics'] # NOTE: AttributeError: 'str' object has no attribute '_sa_instance_state'
del shout_dict[
"topics"
] # NOTE: AttributeError: 'str' object has no attribute '_sa_instance_state'
# del shout_dict['rating'] # NOTE: TypeError: 'rating' is an invalid keyword argument for Shout
# del shout_dict['ratings']
email = userdata.get('email')
slug = userdata.get('slug')
if not slug: raise Exception
email = userdata.get("email")
slug = userdata.get("slug")
if not slug:
raise Exception
with local_session() as session:
# c = session.query(Community).all().pop()
if email: user = session.query(User).filter(User.email == email).first()
if not user and slug: user = session.query(User).filter(User.slug == slug).first()
if email:
user = session.query(User).filter(User.email == email).first()
if not user and slug:
user = session.query(User).filter(User.slug == slug).first()
if not user and userdata:
try:
userdata['slug'] = userdata['slug'].lower().strip().replace(' ', '-')
userdata["slug"] = userdata["slug"].lower().strip().replace(" ", "-")
user = User.create(**userdata)
except sqlalchemy.exc.IntegrityError:
print('[migration] user error: ' + userdata)
userdata['id'] = user.id
userdata['createdAt'] = user.createdAt
storage['users']['by_slug'][userdata['slug']] = userdata
storage['users']['by_oid'][entry['_id']] = userdata
assert user, 'could not get a user'
shout_dict['authors'] = [ user, ]
print("[migration] user error: " + userdata)
userdata["id"] = user.id
userdata["createdAt"] = user.createdAt
storage["users"]["by_slug"][userdata["slug"]] = userdata
storage["users"]["by_oid"][entry["_id"]] = userdata
assert user, "could not get a user"
shout_dict["authors"] = [
user,
]
try:
s = Shout.create(**shout_dict)
except sqlalchemy.exc.IntegrityError as e:
with local_session() as session:
s = session.query(Shout).filter(Shout.slug == shout_dict['slug']).first()
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
bump = False
if s:
for key in shout_dict:
if key in s.__dict__:
if s.__dict__[key] != shout_dict[key]:
print('[migration] shout already exists, but differs in %s' % key)
print(
"[migration] shout already exists, but differs in %s"
% key
)
bump = True
else:
print('[migration] shout already exists, but lacks %s' % key)
print("[migration] shout already exists, but lacks %s" % key)
bump = True
if bump:
s.update(shout_dict)
else:
print('[migration] something went wrong with shout: \n%r' % shout_dict)
print("[migration] something went wrong with shout: \n%r" % shout_dict)
raise e
session.commit()
except Exception as e:
@ -167,67 +193,96 @@ async def migrate(entry, storage):
print(s)
raise Exception
# shout topics aftermath
shout_dict['topics'] = []
for tpc in r['topics']:
shout_dict["topics"] = []
for tpc in r["topics"]:
oldslug = tpc
newslug = storage['replacements'].get(oldslug, oldslug)
newslug = storage["replacements"].get(oldslug, oldslug)
if newslug:
with local_session() as session:
shout_topic_old = session.query(ShoutTopic)\
.filter(ShoutTopic.shout == shout_dict['slug'])\
.filter(ShoutTopic.topic == oldslug).first()
shout_topic_old = (
session.query(ShoutTopic)
.filter(ShoutTopic.shout == shout_dict["slug"])
.filter(ShoutTopic.topic == oldslug)
.first()
)
if shout_topic_old:
shout_topic_old.update({ 'slug': newslug })
shout_topic_old.update({"slug": newslug})
else:
shout_topic_new = session.query(ShoutTopic)\
.filter(ShoutTopic.shout == shout_dict['slug'])\
.filter(ShoutTopic.topic == newslug).first()
shout_topic_new = (
session.query(ShoutTopic)
.filter(ShoutTopic.shout == shout_dict["slug"])
.filter(ShoutTopic.topic == newslug)
.first()
)
if not shout_topic_new:
try: ShoutTopic.create(**{ 'shout': shout_dict['slug'], 'topic': newslug })
except: print('[migration] shout topic error: ' + newslug)
try:
ShoutTopic.create(
**{"shout": shout_dict["slug"], "topic": newslug}
)
except:
print("[migration] shout topic error: " + newslug)
session.commit()
if newslug not in shout_dict['topics']:
shout_dict['topics'].append(newslug)
if newslug not in shout_dict["topics"]:
shout_dict["topics"].append(newslug)
else:
print('[migration] ignored topic slug: \n%r' % tpc['slug'])
print("[migration] ignored topic slug: \n%r" % tpc["slug"])
# raise Exception
# content_item ratings to reactions
try:
for content_rating in entry.get('ratings',[]):
for content_rating in entry.get("ratings", []):
with local_session() as session:
rater = session.query(User).filter(User.oid == content_rating['createdBy']).first()
reactedBy = rater if rater else session.query(User).filter(User.slug == 'noname').first()
rater = (
session.query(User)
.filter(User.oid == content_rating["createdBy"])
.first()
)
reactedBy = (
rater
if rater
else session.query(User).filter(User.slug == "noname").first()
)
if rater:
reaction_dict = {
'kind': ReactionKind.LIKE if content_rating['value'] > 0 else ReactionKind.DISLIKE,
'createdBy': reactedBy.slug,
'shout': shout_dict['slug']
"kind": ReactionKind.LIKE
if content_rating["value"] > 0
else ReactionKind.DISLIKE,
"createdBy": reactedBy.slug,
"shout": shout_dict["slug"],
}
cts = content_rating.get('createdAt')
if cts: reaction_dict['createdAt'] = date_parse(cts)
reaction = session.query(Reaction).\
filter(Reaction.shout == reaction_dict['shout']).\
filter(Reaction.createdBy == reaction_dict['createdBy']).\
filter(Reaction.kind == reaction_dict['kind']).first()
cts = content_rating.get("createdAt")
if cts:
reaction_dict["createdAt"] = date_parse(cts)
reaction = (
session.query(Reaction)
.filter(Reaction.shout == reaction_dict["shout"])
.filter(Reaction.createdBy == reaction_dict["createdBy"])
.filter(Reaction.kind == reaction_dict["kind"])
.first()
)
if reaction:
reaction_dict['kind'] = ReactionKind.AGREE if content_rating['value'] > 0 else ReactionKind.DISAGREE,
reaction_dict["kind"] = (
ReactionKind.AGREE
if content_rating["value"] > 0
else ReactionKind.DISAGREE,
)
reaction.update(reaction_dict)
else:
day = (reaction_dict.get('createdAt') or ts).replace(hour=0, minute=0, second=0, microsecond=0)
day = (reaction_dict.get("createdAt") or ts).replace(
hour=0, minute=0, second=0, microsecond=0
)
rea = Reaction.create(**reaction_dict)
await ReactedStorage.increment(rea)
# shout_dict['ratings'].append(reaction_dict)
except:
print('[migration] content_item.ratings error: \n%r' % content_rating)
print("[migration] content_item.ratings error: \n%r" % content_rating)
raise Exception
# shout views
ViewedByDay.create( shout = shout_dict['slug'], value = entry.get('views', 1) )
ViewedByDay.create(shout=shout_dict["slug"], value=entry.get("views", 1))
# del shout_dict['ratings']
shout_dict['oid'] = entry.get('_id')
storage['shouts']['by_oid'][entry['_id']] = shout_dict
storage['shouts']['by_slug'][slug] = shout_dict
shout_dict["oid"] = entry.get("_id")
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
storage["shouts"]["by_slug"][slug] = shout_dict
return shout_dict

View File

@ -4,103 +4,143 @@ from orm import User, UserRating
from dateutil.parser import parse
from base.orm import local_session
def migrate(entry):
if 'subscribedTo' in entry: del entry['subscribedTo']
email = entry['emails'][0]['address']
if "subscribedTo" in entry:
del entry["subscribedTo"]
email = entry["emails"][0]["address"]
user_dict = {
'oid': entry['_id'],
'roles': [],
'ratings': [],
'username': email,
'email': email,
'password': entry['services']['password'].get('bcrypt', ''),
'createdAt': parse(entry['createdAt']),
'emailConfirmed': bool(entry['emails'][0]['verified']),
'muted': False, # amnesty
'bio': entry['profile'].get('bio', ''),
'notifications': [],
'createdAt': parse(entry['createdAt']),
'roles': [], # entry['roles'] # roles by community
'ratings': [], # entry['ratings']
'links': [],
'name': 'anonymous'
"oid": entry["_id"],
"roles": [],
"ratings": [],
"username": email,
"email": email,
"password": entry["services"]["password"].get("bcrypt", ""),
"createdAt": parse(entry["createdAt"]),
"emailConfirmed": bool(entry["emails"][0]["verified"]),
"muted": False, # amnesty
"bio": entry["profile"].get("bio", ""),
"notifications": [],
"createdAt": parse(entry["createdAt"]),
"roles": [], # entry['roles'] # roles by community
"ratings": [], # entry['ratings']
"links": [],
"name": "anonymous",
}
if 'updatedAt' in entry: user_dict['updatedAt'] = parse(entry['updatedAt'])
if 'wasOnineAt' in entry: user_dict['wasOnlineAt'] = parse(entry['wasOnlineAt'])
if entry.get('profile'):
if "updatedAt" in entry:
user_dict["updatedAt"] = parse(entry["updatedAt"])
if "wasOnineAt" in entry:
user_dict["wasOnlineAt"] = parse(entry["wasOnlineAt"])
if entry.get("profile"):
# slug
user_dict['slug'] = entry['profile'].get('path').lower().replace(' ', '-').strip()
user_dict['bio'] = html2text(entry.get('profile').get('bio') or '')
user_dict["slug"] = (
entry["profile"].get("path").lower().replace(" ", "-").strip()
)
user_dict["bio"] = html2text(entry.get("profile").get("bio") or "")
# userpic
try: user_dict['userpic'] = 'https://assets.discours.io/unsafe/100x/' + entry['profile']['thumborId']
try:
user_dict["userpic"] = (
"https://assets.discours.io/unsafe/100x/"
+ entry["profile"]["thumborId"]
)
except KeyError:
try: user_dict['userpic'] = entry['profile']['image']['url']
except KeyError: user_dict['userpic'] = ''
try:
user_dict["userpic"] = entry["profile"]["image"]["url"]
except KeyError:
user_dict["userpic"] = ""
# name
fn = entry['profile'].get('firstName', '')
ln = entry['profile'].get('lastName', '')
name = user_dict['slug'] if user_dict['slug'] else 'anonymous'
fn = entry["profile"].get("firstName", "")
ln = entry["profile"].get("lastName", "")
name = user_dict["slug"] if user_dict["slug"] else "anonymous"
name = fn if fn else name
name = (name + ' ' + ln) if ln else name
name = entry['profile']['path'].lower().strip().replace(' ', '-') if len(name) < 2 else name
user_dict['name'] = name
name = (name + " " + ln) if ln else name
name = (
entry["profile"]["path"].lower().strip().replace(" ", "-")
if len(name) < 2
else name
)
user_dict["name"] = name
# links
fb = entry['profile'].get('facebook', False)
if fb: user_dict['links'].append(fb)
vk = entry['profile'].get('vkontakte', False)
if vk: user_dict['links'].append(vk)
tr = entry['profile'].get('twitter', False)
if tr: user_dict['links'].append(tr)
ws = entry['profile'].get('website', False)
if ws: user_dict['links'].append(ws)
fb = entry["profile"].get("facebook", False)
if fb:
user_dict["links"].append(fb)
vk = entry["profile"].get("vkontakte", False)
if vk:
user_dict["links"].append(vk)
tr = entry["profile"].get("twitter", False)
if tr:
user_dict["links"].append(tr)
ws = entry["profile"].get("website", False)
if ws:
user_dict["links"].append(ws)
# some checks
if not user_dict['slug'] and len(user_dict['links']) > 0:
user_dict['slug'] = user_dict['links'][0].split('/')[-1]
if not user_dict["slug"] and len(user_dict["links"]) > 0:
user_dict["slug"] = user_dict["links"][0].split("/")[-1]
user_dict['slug'] = user_dict.get('slug', user_dict['email'].split('@')[0])
oid = user_dict['oid']
user_dict['slug'] = user_dict['slug'].lower().strip().replace(' ', '-')
try: user = User.create(**user_dict.copy())
user_dict["slug"] = user_dict.get("slug", user_dict["email"].split("@")[0])
oid = user_dict["oid"]
user_dict["slug"] = user_dict["slug"].lower().strip().replace(" ", "-")
try:
user = User.create(**user_dict.copy())
except sqlalchemy.exc.IntegrityError:
print('[migration] cannot create user ' + user_dict['slug'])
print("[migration] cannot create user " + user_dict["slug"])
with local_session() as session:
old_user = session.query(User).filter(User.slug == user_dict['slug']).first()
old_user = (
session.query(User).filter(User.slug == user_dict["slug"]).first()
)
old_user.oid = oid
user = old_user
if not user:
print('[migration] ERROR: cannot find user ' + user_dict['slug'])
print("[migration] ERROR: cannot find user " + user_dict["slug"])
raise Exception
user_dict['id'] = user.id
user_dict["id"] = user.id
return user_dict
def migrate_2stage(entry, id_map):
ce = 0
for rating_entry in entry.get('ratings',[]):
rater_oid = rating_entry['createdBy']
for rating_entry in entry.get("ratings", []):
rater_oid = rating_entry["createdBy"]
rater_slug = id_map.get(rater_oid)
if not rater_slug:
ce += 1
# print(rating_entry)
continue
oid = entry['_id']
oid = entry["_id"]
author_slug = id_map.get(oid)
user_rating_dict = {
'value': rating_entry['value'],
'rater': rater_slug,
'user': author_slug
"value": rating_entry["value"],
"rater": rater_slug,
"user": author_slug,
}
with local_session() as session:
try:
user_rating = UserRating.create(**user_rating_dict)
except sqlalchemy.exc.IntegrityError:
old_rating = session.query(UserRating).filter(UserRating.rater == rater_slug).first()
print('[migration] cannot create ' + author_slug + '`s rate from ' + rater_slug)
print('[migration] concat rating value %d+%d=%d' % (old_rating.value, rating_entry['value'], old_rating.value + rating_entry['value']))
old_rating.update({ 'value': old_rating.value + rating_entry['value'] })
old_rating = (
session.query(UserRating)
.filter(UserRating.rater == rater_slug)
.first()
)
print(
"[migration] cannot create "
+ author_slug
+ "`s rate from "
+ rater_slug
)
print(
"[migration] concat rating value %d+%d=%d"
% (
old_rating.value,
rating_entry["value"],
old_rating.value + rating_entry["value"],
)
)
old_rating.update({"value": old_rating.value + rating_entry["value"]})
session.commit()
except Exception as e:
print(e)

View File

@ -1,9 +1,10 @@
from datetime import datetime
from json import JSONEncoder
class DateTimeEncoder(JSONEncoder):
def default(self, z):
if isinstance(z, datetime):
return (str(z))
return str(z)
else:
return super().default(z)

View File

@ -12,9 +12,19 @@ from services.auth.users import UserStorage
from services.stat.viewed import ViewedStorage
from base.orm import Base, engine, local_session
__all__ = ["User", "Role", "Operation", "Permission", \
"Community", "Shout", "Topic", "TopicFollower", \
"Notification", "Reaction", "UserRating"]
__all__ = [
"User",
"Role",
"Operation",
"Permission",
"Community",
"Shout",
"Topic",
"TopicFollower",
"Notification",
"Reaction",
"UserRating",
]
Base.metadata.create_all(engine)
Operation.init_table()

View File

@ -2,21 +2,22 @@ from datetime import datetime
from sqlalchemy import Boolean, Column, String, ForeignKey, DateTime
from base.orm import Base
class CollabAuthor(Base):
__tablename__ = 'collab_author'
id = None
collab = Column(ForeignKey('collab.id'), primary_key = True)
author = Column(ForeignKey('user.slug'), primary_key = True)
class CollabAuthor(Base):
__tablename__ = "collab_author"
id = None # type: ignore
collab = Column(ForeignKey("collab.id"), primary_key=True)
author = Column(ForeignKey("user.slug"), primary_key=True)
accepted = Column(Boolean, default=False)
class Collab(Base):
__tablename__ = 'collab'
__tablename__ = "collab"
authors = Column()
title: str = Column(String, nullable=True, comment="Title")
body: str = Column(String, nullable=True, comment="Body")
pic: str = Column(String, nullable=True, comment="Picture")
createdAt: datetime = Column(DateTime, default=datetime.now, comment="Created At")
createdBy: str = Column(ForeignKey('user.id'), comment="Created By")
title = Column(String, nullable=True, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
createdAt = Column(DateTime, default=datetime.now, comment="Created At")
createdBy = Column(ForeignKey("user.id"), comment="Created By")

View File

@ -1,22 +1,23 @@
from datetime import datetime
from sqlalchemy import Column, String, ForeignKey, DateTime, JSON as JSONType
from sqlalchemy import Column, String, ForeignKey, DateTime
from base.orm import Base
class ShoutCollection(Base):
__tablename__ = 'shout_collection'
id = None
shout = Column(ForeignKey('shout.slug'), primary_key = True)
collection = Column(ForeignKey('collection.slug'), primary_key = True)
class ShoutCollection(Base):
__tablename__ = "shout_collection"
id = None # type: ignore
shout = Column(ForeignKey("shout.slug"), primary_key=True)
collection = Column(ForeignKey("collection.slug"), primary_key=True)
class Collection(Base):
__tablename__ = 'collection'
id = None
slug: str = Column(String, primary_key = True)
title: str = Column(String, nullable=False, comment="Title")
body: str = Column(String, nullable=True, comment="Body")
pic: str = Column(String, nullable=True, comment="Picture")
createdAt: datetime = Column(DateTime, default=datetime.now, comment="Created At")
createdBy: str = Column(ForeignKey('user.id'), comment="Created By")
__tablename__ = "collection"
id = None # type: ignore
slug = Column(String, primary_key=True)
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
createdAt = Column(DateTime, default=datetime.now, comment="Created At")
createdBy = Column(ForeignKey("user.id"), comment="Created By")

View File

@ -2,34 +2,39 @@ from datetime import datetime
from sqlalchemy import Column, String, ForeignKey, DateTime
from base.orm import Base, local_session
class CommunityFollower(Base):
__tablename__ = 'community_followers'
id = None
follower = Column(ForeignKey('user.slug'), primary_key = True)
community = Column(ForeignKey('community.slug'), primary_key = True)
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
class CommunityFollower(Base):
__tablename__ = "community_followers"
id = None # type: ignore
follower = Column(ForeignKey("user.slug"), primary_key=True)
community = Column(ForeignKey("community.slug"), primary_key=True)
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
class Community(Base):
__tablename__ = 'community'
__tablename__ = "community"
name: str = Column(String, nullable=False, comment="Name")
slug: str = Column(String, nullable = False, unique=True, comment="Slug")
desc: str = Column(String, nullable=False, default='')
pic: str = Column(String, nullable=False, default='')
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
createdBy: str = Column(ForeignKey("user.slug"), nullable=False, comment="Author")
name = Column(String, nullable=False, comment="Name")
slug = Column(String, nullable=False, unique=True, comment="Slug")
desc = Column(String, nullable=False, default="")
pic = Column(String, nullable=False, default="")
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
createdBy = Column(ForeignKey("user.slug"), nullable=False, comment="Author")
@staticmethod
def init_table():
with local_session() as session:
default = session.query(Community).filter(Community.slug == "discours").first()
default = (
session.query(Community).filter(Community.slug == "discours").first()
)
if not default:
default = Community.create(
name = "Дискурс",
slug = "discours",
createdBy = "discours"
name="Дискурс", slug="discours", createdBy="discours"
)
Community.default_community = default

View File

@ -1,11 +1,12 @@
from sqlalchemy import Column, String, JSON as JSONType
from base.orm import Base
class Notification(Base):
__tablename__ = 'notification'
kind: str = Column(String, unique = True, primary_key = True)
template: str = Column(String, nullable = False)
variables: JSONType = Column(JSONType, nullable = True) # [ <var1>, .. ]
class Notification(Base):
__tablename__ = "notification"
kind = Column(String, unique=True, primary_key=True)
template = Column(String, nullable=False)
variables = Column(JSONType, nullable=True) # [ <var1>, .. ]
# looks like frontend code

View File

@ -24,12 +24,17 @@ class ClassType(TypeDecorator):
warnings.warn(f"Can't find class <{value}>,find it yourself!", stacklevel=2)
return class_
class Role(Base):
__tablename__ = 'role'
name: str = Column(String, nullable=False, comment="Role Name")
desc: str = Column(String, nullable=True, comment="Role Description")
community: int = Column(ForeignKey("community.id", ondelete="CASCADE"), nullable=False, comment="Community")
class Role(Base):
__tablename__ = "role"
name = Column(String, nullable=False, comment="Role Name")
desc = Column(String, nullable=True, comment="Role Description")
community = Column(
ForeignKey("community.id", ondelete="CASCADE"),
nullable=False,
comment="Community",
)
permissions = relationship(lambda: Permission)
@staticmethod
@ -43,14 +48,15 @@ class Role(Base):
default = Role.create(
name="author",
desc="Role for author",
community = Community.default_community.id
community=Community.default_community.id,
)
Role.default_role = default
class Operation(Base):
__tablename__ = 'operation'
name: str = Column(String, nullable=False, unique=True, comment="Operation Name")
__tablename__ = "operation"
name = Column(String, nullable=False, unique=True, comment="Operation Name")
@staticmethod
def init_table():
@ -58,13 +64,15 @@ class Operation(Base):
edit_op = session.query(Operation).filter(Operation.name == "edit").first()
if not edit_op:
edit_op = Operation.create(name="edit")
Operation.edit_id = edit_op.id
Operation.edit_id = edit_op.id # type: ignore
class Resource(Base):
__tablename__ = "resource"
resource_class: str = Column(String, nullable=False, unique=True, comment="Resource class")
name: str = Column(String, nullable=False, unique=True, comment="Resource name")
resource_class = Column(
String, nullable=False, unique=True, comment="Resource class"
)
name = Column(String, nullable=False, unique=True, comment="Resource name")
@staticmethod
def init_table():
@ -72,26 +80,39 @@ class Resource(Base):
shout_res = session.query(Resource).filter(Resource.name == "shout").first()
if not shout_res:
shout_res = Resource.create(name="shout", resource_class="shout")
Resource.shout_id = shout_res.id
Resource.shout_id = shout_res.id # type: ignore
class Permission(Base):
__tablename__ = "permission"
__table_args__ = (UniqueConstraint("role_id", "operation_id", "resource_id"), {"extend_existing": True})
__table_args__ = (
UniqueConstraint("role_id", "operation_id", "resource_id"),
{"extend_existing": True},
)
role_id: int = Column(ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role")
operation_id: int = Column(ForeignKey("operation.id", ondelete="CASCADE"), nullable=False, comment="Operation")
resource_id: int = Column(ForeignKey("resource.id", ondelete="CASCADE"), nullable=False, comment="Resource")
role_id = Column(
ForeignKey("role.id", ondelete="CASCADE"), nullable=False, comment="Role"
)
operation_id = Column(
ForeignKey("operation.id", ondelete="CASCADE"),
nullable=False,
comment="Operation",
)
resource_id = Column(
ForeignKey("resource.id", ondelete="CASCADE"),
nullable=False,
comment="Resource",
)
if __name__ == '__main__':
if __name__ == "__main__":
Base.metadata.create_all(engine)
ops = [
Permission(role_id=1, operation_id=1, resource_id=1),
Permission(role_id=1, operation_id=2, resource_id=1),
Permission(role_id=1, operation_id=3, resource_id=1),
Permission(role_id=1, operation_id=4, resource_id=1),
Permission(role_id=2, operation_id=4, resource_id=1)
Permission(role_id=2, operation_id=4, resource_id=1),
]
global_session.add_all(ops)
global_session.commit()

View File

@ -5,20 +5,27 @@ from sqlalchemy import Enum
from services.stat.reacted import ReactedStorage, ReactionKind
from services.stat.viewed import ViewedStorage
class Reaction(Base):
__tablename__ = 'reaction'
body: str = Column(String, nullable=True, comment="Reaction Body")
createdAt = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
createdBy: str = Column(ForeignKey("user.slug"), nullable=False, comment="Sender")
__tablename__ = "reaction"
body = Column(String, nullable=True, comment="Reaction Body")
createdAt = Column(
DateTime, nullable=False, default=datetime.now, comment="Created at"
)
createdBy = Column(ForeignKey("user.slug"), nullable=False, comment="Sender")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
updatedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Last Editor")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
deletedBy = Column(ForeignKey("user.slug"), nullable=True, comment="Deleted by")
shout = Column(ForeignKey("shout.slug"), nullable=False)
replyTo: int = Column(ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID")
range: str = Column(String, nullable=True, comment="Range in format <start index>:<end>")
kind: int = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
oid: str = Column(String, nullable=True, comment="Old ID")
replyTo = Column(
ForeignKey("reaction.id"), nullable=True, comment="Reply to reaction ID"
)
range = Column(
String, nullable=True, comment="Range in format <start index>:<end>"
)
kind = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
oid = Column(String, nullable=True, comment="Old ID")
@property
async def stat(self):
@ -27,5 +34,5 @@ class Reaction(Base):
"reacted": len(await ReactedStorage.get_reaction(self.id)),
# TODO: "replied"
"rating": await ReactedStorage.get_reaction_rating(self.id),
"commented": len(await ReactedStorage.get_reaction_comments(self.id))
"commented": len(await ReactedStorage.get_reaction_comments(self.id)),
}

View File

@ -4,7 +4,7 @@ from sqlalchemy.orm import relationship
from orm.user import User
from orm.topic import Topic, ShoutTopic
from orm.reaction import Reaction
from services.stat.reacted import ReactedStorage, ReactionKind
from services.stat.reacted import ReactedStorage
from services.stat.viewed import ViewedStorage
from base.orm import Base
@ -12,54 +12,56 @@ from base.orm import Base
class ShoutReactionsFollower(Base):
__tablename__ = "shout_reactions_followers"
id = None
follower = Column(ForeignKey('user.slug'), primary_key = True)
shout = Column(ForeignKey('shout.slug'), primary_key = True)
id = None # type: ignore
follower = Column(ForeignKey("user.slug"), primary_key=True)
shout = Column(ForeignKey("shout.slug"), primary_key=True)
auto = Column(Boolean, nullable=False, default=False)
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
deletedAt: str = Column(DateTime, nullable=True)
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
deletedAt = Column(DateTime, nullable=True)
class ShoutAuthor(Base):
__tablename__ = "shout_author"
id = None
shout = Column(ForeignKey('shout.slug'), primary_key = True)
user = Column(ForeignKey('user.slug'), primary_key = True)
caption: str = Column(String, nullable = True, default = "")
id = None # type: ignore
shout = Column(ForeignKey("shout.slug"), primary_key=True)
user = Column(ForeignKey("user.slug"), primary_key=True)
caption = Column(String, nullable=True, default="")
class ShoutAllowed(Base):
__tablename__ = "shout_allowed"
id = None
shout = Column(ForeignKey('shout.slug'), primary_key = True)
user = Column(ForeignKey('user.id'), primary_key = True)
id = None # type: ignore
shout = Column(ForeignKey("shout.slug"), primary_key=True)
user = Column(ForeignKey("user.id"), primary_key=True)
class Shout(Base):
__tablename__ = 'shout'
__tablename__ = "shout"
id = None
slug: str = Column(String, primary_key=True)
community: str = Column(Integer, ForeignKey("community.id"), nullable=False, comment="Community")
body: str = Column(String, nullable=False, comment="Body")
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
updatedAt: str = Column(DateTime, nullable=True, comment="Updated at")
replyTo: int = Column(ForeignKey("shout.slug"), nullable=True)
versionOf: int = Column(ForeignKey("shout.slug"), nullable=True)
tags: str = Column(String, nullable=True)
publishedBy: int = Column(ForeignKey("user.id"), nullable=True)
publishedAt: str = Column(DateTime, nullable=True)
cover: str = Column(String, nullable = True)
title: str = Column(String, nullable = True)
subtitle: str = Column(String, nullable = True)
layout: str = Column(String, nullable = True)
id = None # type: ignore
slug = Column(String, primary_key=True)
community = Column(Integer, ForeignKey("community.id"), nullable=False, comment="Community")
body = Column(String, nullable=False, comment="Body")
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
updatedAt = Column(DateTime, nullable=True, comment="Updated at")
replyTo = Column(ForeignKey("shout.slug"), nullable=True)
versionOf = Column(ForeignKey("shout.slug"), nullable=True)
tags = Column(String, nullable=True)
publishedBy = Column(ForeignKey("user.id"), nullable=True)
publishedAt = Column(DateTime, nullable=True)
cover = Column(String, nullable=True)
title = Column(String, nullable=True)
subtitle = Column(String, nullable=True)
layout = Column(String, nullable=True)
reactions = relationship(lambda: Reaction)
authors = relationship(lambda: User, secondary=ShoutAuthor.__tablename__)
topics = relationship(lambda: Topic, secondary=ShoutTopic.__tablename__)
mainTopic = Column(ForeignKey("topic.slug"), nullable=True)
visibleFor = relationship(lambda: User, secondary=ShoutAllowed.__tablename__)
draft: bool = Column(Boolean, default=True)
oid: str = Column(String, nullable=True)
draft = Column(Boolean, default=True)
oid = Column(String, nullable=True)
@property
async def stat(self):
@ -67,5 +69,5 @@ class Shout(Base):
"viewed": await ViewedStorage.get_shout(self.slug),
"reacted": len(await ReactedStorage.get_shout(self.slug)),
"commented": len(await ReactedStorage.get_comments(self.slug)),
"rating": await ReactedStorage.get_rating(self.slug)
"rating": await ReactedStorage.get_rating(self.slug),
}

View File

@ -2,30 +2,32 @@ from datetime import datetime
from sqlalchemy import Column, String, ForeignKey, DateTime, JSON as JSONType
from base.orm import Base
class ShoutTopic(Base):
__tablename__ = 'shout_topic'
id = None
shout = Column(ForeignKey('shout.slug'), primary_key = True)
topic = Column(ForeignKey('topic.slug'), primary_key = True)
class ShoutTopic(Base):
__tablename__ = "shout_topic"
id = None # type: ignore
shout = Column(ForeignKey("shout.slug"), primary_key=True)
topic = Column(ForeignKey("topic.slug"), primary_key=True)
class TopicFollower(Base):
__tablename__ = "topic_followers"
id = None
follower = Column(ForeignKey('user.slug'), primary_key = True)
topic = Column(ForeignKey('topic.slug'), primary_key = True)
createdAt: str = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
id = None # type: ignore
follower = Column(ForeignKey("user.slug"), primary_key=True)
topic = Column(ForeignKey("topic.slug"), primary_key=True)
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
class Topic(Base):
__tablename__ = 'topic'
__tablename__ = "topic"
id = None
slug: str = Column(String, primary_key = True)
title: str = Column(String, nullable=False, comment="Title")
body: str = Column(String, nullable=True, comment="Body")
pic: str = Column(String, nullable=True, comment="Picture")
id = None # type: ignore
slug = Column(String, primary_key=True)
title = Column(String, nullable=False, comment="Title")
body = Column(String, nullable=True, comment="Body")
pic = Column(String, nullable=True, comment="Picture")
children = Column(JSONType, nullable=True, default=[], comment="list of children topics")
community = Column(ForeignKey("community.slug"), nullable=False, comment="Community")
oid: str = Column(String, nullable=True, comment="Old ID")
oid = Column(String, nullable=True, comment="Old ID")

View File

@ -1,62 +1,74 @@
from datetime import datetime
from sqlalchemy import Column, Integer, String, ForeignKey, Boolean, DateTime, JSON as JSONType
from sqlalchemy import (
Column,
Integer,
String,
ForeignKey,
Boolean,
DateTime,
JSON as JSONType,
)
from sqlalchemy.orm import relationship
from base.orm import Base, local_session
from orm.rbac import Role
from services.auth.roles import RoleStorage
class UserNotifications(Base):
__tablename__ = 'user_notifications'
id: int = Column(Integer, primary_key = True)
user_id: int = Column(Integer, ForeignKey("user.id"))
kind: str = Column(String, ForeignKey("notification.kind"))
values: JSONType = Column(JSONType, nullable = True) # [ <var1>, .. ]
class UserNotifications(Base):
__tablename__ = "user_notifications"
# id auto
user_id = Column(Integer, ForeignKey("user.id"))
kind = Column(String, ForeignKey("notification.kind"))
values = Column(JSONType, nullable=True) # [ <var1>, .. ]
class UserRating(Base):
__tablename__ = "user_rating"
id = None
rater = Column(ForeignKey('user.slug'), primary_key = True)
user = Column(ForeignKey('user.slug'), primary_key = True)
id = None # type: ignore
rater = Column(ForeignKey("user.slug"), primary_key=True)
user = Column(ForeignKey("user.slug"), primary_key=True)
value = Column(Integer)
class UserRole(Base):
__tablename__ = "user_role"
id = None
user_id = Column(ForeignKey('user.id'), primary_key = True)
role_id = Column(ForeignKey('role.id'), primary_key = True)
id = None # type: ignore
user_id = Column(ForeignKey("user.id"), primary_key=True)
role_id = Column(ForeignKey("role.id"), primary_key=True)
class AuthorFollower(Base):
__tablename__ = "author_follower"
id = None
follower = Column(ForeignKey('user.slug'), primary_key = True)
author = Column(ForeignKey('user.slug'), primary_key = True)
id = None # type: ignore
follower = Column(ForeignKey("user.slug"), primary_key=True)
author = Column(ForeignKey("user.slug"), primary_key=True)
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
class User(Base):
__tablename__ = "user"
email: str = Column(String, unique=True, nullable=False, comment="Email")
username: str = Column(String, nullable=False, comment="Login")
password: str = Column(String, nullable=True, comment="Password")
bio: str = Column(String, nullable=True, comment="Bio")
userpic: str = Column(String, nullable=True, comment="Userpic")
name: str = Column(String, nullable=True, comment="Display name")
slug: str = Column(String, unique=True, comment="User's slug")
muted: bool = Column(Boolean, default=False)
emailConfirmed: bool = Column(Boolean, default=False)
createdAt: DateTime = Column(DateTime, nullable=False, default = datetime.now, comment="Created at")
wasOnlineAt: DateTime = Column(DateTime, nullable=False, default = datetime.now, comment="Was online at")
deletedAt: DateTime = Column(DateTime, nullable=True, comment="Deleted at")
links: JSONType = Column(JSONType, nullable=True, comment="Links")
oauth: str = Column(String, nullable=True)
email = Column(String, unique=True, nullable=False, comment="Email")
username = Column(String, nullable=False, comment="Login")
password = Column(String, nullable=True, comment="Password")
bio = Column(String, nullable=True, comment="Bio")
userpic = Column(String, nullable=True, comment="Userpic")
name = Column(String, nullable=True, comment="Display name")
slug = Column(String, unique=True, comment="User's slug")
muted = Column(Boolean, default=False)
emailConfirmed = Column(Boolean, default=False)
createdAt = Column(DateTime, nullable=False, default=datetime.now, comment="Created at")
wasOnlineAt = Column(DateTime, nullable=False, default=datetime.now, comment="Was online at")
deletedAt = Column(DateTime, nullable=True, comment="Deleted at")
links = Column(JSONType, nullable=True, comment="Links")
oauth = Column(String, nullable=True)
notifications = relationship(lambda: UserNotifications)
ratings = relationship(UserRating, foreign_keys=UserRating.user)
roles = relationship(lambda: Role, secondary=UserRole.__tablename__)
oid: str = Column(String, nullable = True)
oid = Column(String, nullable=True)
@staticmethod
def init_table():
@ -69,7 +81,7 @@ class User(Base):
username="welcome@discours.io",
name="Дискурс",
slug="discours",
userpic = 'https://discours.io/images/logo-mini.svg',
userpic="https://discours.io/images/logo-mini.svg",
)
User.default_user = default
@ -77,13 +89,13 @@ class User(Base):
async def get_permission(self):
scope = {}
for user_role in self.roles:
role = await RoleStorage.get_role(user_role.id)
role: Role = await RoleStorage.get_role(user_role.id) # type: ignore
for p in role.permissions:
if not p.resource_id in scope:
if p.resource_id not in scope:
scope[p.resource_id] = set()
scope[p.resource_id].add(p.operation_id)
return scope
if __name__ == "__main__":
print(User.get_permission(user_id=1))
print(User.get_permission(user_id=1)) # type: ignore

View File

@ -17,3 +17,4 @@ requests
bcrypt
websockets
bson
flake8

View File

@ -1,32 +1,69 @@
from resolvers.auth import login, sign_out, is_email_used, register, confirm, auth_forget, auth_reset
from resolvers.zine import get_shout_by_slug, follow, unfollow, view_shout, \
top_month, top_overall, recent_published, recent_all, top_viewed, \
shouts_by_authors, shouts_by_topics, shouts_by_communities
from resolvers.profile import get_users_by_slugs, get_current_user, get_user_reacted_shouts, get_user_roles
from resolvers.topics import topic_follow, topic_unfollow, topics_by_author, topics_by_community, topics_all
from resolvers.auth import (
login,
sign_out,
is_email_used,
register,
confirm,
auth_forget,
auth_reset,
)
from resolvers.zine import (
get_shout_by_slug,
follow,
unfollow,
view_shout,
top_month,
top_overall,
recent_published,
recent_all,
top_viewed,
shouts_by_authors,
shouts_by_topics,
shouts_by_communities,
)
from resolvers.profile import (
get_users_by_slugs,
get_current_user,
get_user_reacted_shouts,
get_user_roles,
)
from resolvers.topics import (
topic_follow,
topic_unfollow,
topics_by_author,
topics_by_community,
topics_all,
)
# from resolvers.feed import shouts_for_feed, my_candidates
from resolvers.reactions import create_reaction, delete_reaction, update_reaction, get_all_reactions
from resolvers.reactions import (
create_reaction,
delete_reaction,
update_reaction,
get_all_reactions,
)
from resolvers.collab import invite_author, remove_author
from resolvers.editor import create_shout, delete_shout, update_shout
from resolvers.community import create_community, delete_community, get_community, get_communities
from resolvers.community import (
create_community,
delete_community,
get_community,
get_communities,
)
__all__ = [
"follow",
"unfollow",
# auth
"login",
"register",
"is_email_used",
"confirm",
"auth_forget",
"auth_reset"
"sign_out",
"auth_reset" "sign_out",
# profile
"get_current_user",
"get_users_by_slugs",
# zine
"shouts_for_feed",
"my_candidates",
@ -43,7 +80,6 @@ __all__ = [
"view_shout",
"view_reaction",
"get_shout_by_slug",
# editor
"create_shout",
"update_shout",
@ -51,20 +87,17 @@ __all__ = [
# collab
"invite_author",
"remove_author"
# topics
"topics_all",
"topics_by_community",
"topics_by_author",
"topic_follow",
"topic_unfollow",
# communities
"get_community",
"get_communities",
"create_community",
"delete_community",
# reactions
"get_shout_reactions",
"reactions_follow",

View File

@ -13,9 +13,10 @@ from resolvers.profile import get_user_info
from base.exceptions import InvalidPassword, InvalidToken
from settings import JWT_AUTH_HEADER
@mutation.field("confirmEmail")
async def confirm(*_, confirm_token):
''' confirm owning email address '''
"""confirm owning email address"""
auth_token, user = await Authorize.confirm(confirm_token)
if auth_token:
user.emailConfirmed = True
@ -27,16 +28,18 @@ async def confirm(*_, confirm_token):
@mutation.field("registerUser")
async def register(*_, email: str, password: str = ""):
''' creates new user account '''
"""creates new user account"""
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if user:
return {"error": "user already exist"}
user_dict = {"email": email}
username = email.split('@')[0]
username = email.split("@")[0]
user_dict["username"] = username
user_dict["slug"] = quote_plus(translit(username, 'ru', reversed=True).replace('.', '-').lower())
user_dict["slug"] = quote_plus(
translit(username, "ru", reversed=True).replace(".", "-").lower()
)
if password:
user_dict["password"] = Password.encode(password)
user = User(**user_dict)
@ -49,9 +52,10 @@ async def register(*_, email: str, password: str = ""):
return {"user": user}
@mutation.field("requestPasswordUpdate")
async def auth_forget(_, info, email):
''' send email to recover account '''
"""send email to recover account"""
with local_session() as session:
user = session.query(User).filter(User.email == email).first()
if not user:
@ -61,9 +65,10 @@ async def auth_forget(_, info, email):
return {}
@mutation.field("updatePassword")
async def auth_reset(_, info, password, resetToken):
''' set the new password '''
"""set the new password"""
try:
user_id = await ResetPassword.verify(resetToken)
except InvalidToken as e:
@ -78,6 +83,7 @@ async def auth_reset(_, info, password, resetToken):
return {}
@query.field("signIn")
async def login(_, info: GraphQLResolveInfo, email: str, password: str = ""):
@ -96,7 +102,7 @@ async def login(_, info: GraphQLResolveInfo, email: str, password: str = ""):
return {"error": "email not confirmed"}
try:
device = info.context["request"].headers['device']
device = info.context["request"].headers["device"]
except KeyError:
device = "pc"
auto_delete = False if device == "mobile" else True # why autodelete with mobile?
@ -113,7 +119,7 @@ async def login(_, info: GraphQLResolveInfo, email: str, password: str = ""):
return {
"token": token,
"user": orm_user,
"info": await get_user_info(orm_user.slug)
"info": await get_user_info(orm_user.slug),
}
@ -124,6 +130,7 @@ async def sign_out(_, info: GraphQLResolveInfo):
status = await Authorize.revoke(token)
return True
@query.field("isEmailUsed")
async def is_email_used(_, info, email):
with local_session() as session:

View File

@ -6,6 +6,7 @@ from orm.user import User
from base.resolvers import query, mutation
from auth.authenticate import login_required
@query.field("getCollabs")
@login_required
async def get_collabs(_, info):
@ -43,6 +44,7 @@ async def invite_author(_, info, author, shout):
return {}
@mutation.field("removeAuthor")
@login_required
async def remove_author(_, info, author, shout):

View File

@ -7,40 +7,45 @@ from datetime import datetime
from typing import Collection
from sqlalchemy import and_
@mutation.field("createCollection")
@login_required
async def create_collection(_, info, input):
auth = info.context["request"].auth
user_id = auth.user_id
collection = Collection.create(
slug = input.get('slug', ''),
title = input.get('title', ''),
desc = input.get('desc', ''),
pic = input.get('pic', '')
slug=input.get("slug", ""),
title=input.get("title", ""),
desc=input.get("desc", ""),
pic=input.get("pic", ""),
)
return {"collection": collection}
@mutation.field("updateCollection")
@login_required
async def update_collection(_, info, input):
auth = info.context["request"].auth
user_id = auth.user_id
collection_slug = input.get('slug', '')
collection_slug = input.get("slug", "")
with local_session() as session:
owner = session.query(User).filter(User.id == user_id) # note list here
collection = session.query(Collection).filter(Collection.slug == collection_slug).first()
collection = (
session.query(Collection).filter(Collection.slug == collection_slug).first()
)
editors = [e.slug for e in collection.editors]
if not collection:
return {"error": "invalid collection id"}
if collection.createdBy not in (owner + editors):
return {"error": "access denied"}
collection.title = input.get('title', '')
collection.desc = input.get('desc', '')
collection.pic = input.get('pic', '')
collection.title = input.get("title", "")
collection.desc = input.get("desc", "")
collection.pic = input.get("pic", "")
collection.updatedAt = datetime.now()
session.commit()
@mutation.field("deleteCollection")
@login_required
async def delete_collection(_, info, slug):
@ -57,6 +62,7 @@ async def delete_collection(_, info, slug):
return {}
@query.field("getUserCollections")
async def get_user_collections(_, info, userslug):
collections = []
@ -64,15 +70,25 @@ async def get_user_collections(_, info, userslug):
user = session.query(User).filter(User.slug == userslug).first()
if user:
# TODO: check rights here
collections = session.\
query(Collection).\
where(and_(Collection.createdBy == userslug, Collection.publishedAt != None)).\
all()
collections = (
session.query(Collection)
.where(
and_(
Collection.createdBy == userslug, Collection.publishedAt != None
)
)
.all()
)
for c in collections:
shouts = session.query(ShoutCollection).filter(ShoutCollection.collection == c.id).all()
shouts = (
session.query(ShoutCollection)
.filter(ShoutCollection.collection == c.id)
.all()
)
c.amount = len(shouts)
return collections
@query.field("getMyCollections")
async def get_user_collections(_, info, userslug):
collections = []
@ -80,20 +96,32 @@ async def get_user_collections(_, info, userslug):
user = session.query(User).filter(User.slug == userslug).first()
if user:
# TODO: check rights here
collections = session.\
query(Collection).\
where(and_(Collection.createdBy == userslug, Collection.publishedAt != None)).\
all()
collections = (
session.query(Collection)
.where(
and_(
Collection.createdBy == userslug, Collection.publishedAt != None
)
)
.all()
)
for c in collections:
shouts = session.query(ShoutCollection).filter(ShoutCollection.collection == c.id).all()
shouts = (
session.query(ShoutCollection)
.filter(ShoutCollection.collection == c.id)
.all()
)
c.amount = len(shouts)
return collections
@query.field("getMyColelctions")
@login_required
async def get_my_collections(_, info):
auth = info.context["request"].auth
user_id = auth.user_id
with local_session() as session:
collections = session.query(Collection).when(Collection.createdBy == user_id).all()
collections = (
session.query(Collection).when(Collection.createdBy == user_id).all()
)
return collections

View File

@ -7,6 +7,7 @@ from datetime import datetime
from typing import List
from sqlalchemy import and_
@mutation.field("createCommunity")
@login_required
async def create_community(_, info, input):
@ -14,35 +15,39 @@ async def create_community(_, info, input):
user_id = auth.user_id
community = Community.create(
slug = input.get('slug', ''),
title = input.get('title', ''),
desc = input.get('desc', ''),
pic = input.get('pic', '')
slug=input.get("slug", ""),
title=input.get("title", ""),
desc=input.get("desc", ""),
pic=input.get("pic", ""),
)
return {"community": community}
@mutation.field("updateCommunity")
@login_required
async def update_community(_, info, input):
auth = info.context["request"].auth
user_id = auth.user_id
community_slug = input.get('slug', '')
community_slug = input.get("slug", "")
with local_session() as session:
owner = session.query(User).filter(User.id == user_id) # note list here
community = session.query(Community).filter(Community.slug == community_slug).first()
community = (
session.query(Community).filter(Community.slug == community_slug).first()
)
editors = [e.slug for e in community.editors]
if not community:
return {"error": "invalid community id"}
if community.createdBy not in (owner + editors):
return {"error": "access denied"}
community.title = input.get('title', '')
community.desc = input.get('desc', '')
community.pic = input.get('pic', '')
community.title = input.get("title", "")
community.desc = input.get("desc", "")
community.pic = input.get("pic", "")
community.updatedAt = datetime.now()
session.commit()
@mutation.field("deleteCommunity")
@login_required
async def delete_community(_, info, slug):
@ -60,6 +65,7 @@ async def delete_community(_, info, slug):
return {}
@query.field("getCommunity")
async def get_community(_, info, slug):
with local_session() as session:
@ -69,34 +75,44 @@ async def get_community(_, info, slug):
return community
@query.field("getCommunities")
async def get_communities(_, info):
with local_session() as session:
communities = session.query(Community)
return communities
def community_follow(user, slug):
CommunityFollower.create(
follower = user.slug,
community = slug
)
CommunityFollower.create(follower=user.slug, community=slug)
def community_unfollow(user, slug):
with local_session() as session:
following = session.query(CommunityFollower).\
filter(and_(CommunityFollower.follower == user.slug, CommunityFollower.community == slug)).\
first()
following = (
session.query(CommunityFollower)
.filter(
and_(
CommunityFollower.follower == user.slug,
CommunityFollower.community == slug,
)
)
.first()
)
if not following:
raise Exception("[orm.community] following was not exist")
session.delete(following)
session.commit()
@query.field("userFollowedCommunities")
def get_followed_communities(_, user_slug) -> List[Community]:
ccc = []
with local_session() as session:
ccc = session.query(Community.slug).\
join(CommunityFollower).\
where(CommunityFollower.follower == user_slug).\
all()
ccc = (
session.query(Community.slug)
.join(CommunityFollower)
.where(CommunityFollower.follower == user_slug)
.all()
)
return ccc

View File

@ -20,10 +20,7 @@ async def create_shout(_, info, input):
del input["topic_slugs"]
new_shout = Shout.create(**input)
ShoutAuthor.create(
shout = new_shout.slug,
user = user.slug
)
ShoutAuthor.create(shout=new_shout.slug, user=user.slug)
reactions_follow(user, new_shout.slug, True)
@ -31,23 +28,15 @@ async def create_shout(_, info, input):
topic_slugs.append(input["mainTopic"])
for slug in topic_slugs:
topic = ShoutTopic.create(
shout = new_shout.slug,
topic = slug)
topic = ShoutTopic.create(shout=new_shout.slug, topic=slug)
new_shout.topic_slugs = topic_slugs
task = GitTask(
input,
user.username,
user.email,
"new shout %s" % (new_shout.slug)
)
task = GitTask(input, user.username, user.email, "new shout %s" % (new_shout.slug))
# await ShoutCommentsStorage.send_shout(new_shout)
return {
"shout" : new_shout
}
return {"shout": new_shout}
@mutation.field("updateShout")
@login_required
@ -62,18 +51,14 @@ async def update_shout(_, info, input):
shout = session.query(Shout).filter(Shout.slug == slug).first()
if not shout:
return {
"error" : "shout not found"
}
return {"error": "shout not found"}
authors = [author.id for author in shout.authors]
if not user_id in authors:
scopes = auth.scopes
print(scopes)
if not Resource.shout_id in scopes:
return {
"error" : "access denied"
}
return {"error": "access denied"}
shout.update(input)
shout.updatedAt = datetime.now()
@ -81,20 +66,12 @@ async def update_shout(_, info, input):
session.close()
for topic in input.get("topic_slugs", []):
ShoutTopic.create(
shout = slug,
topic = topic)
ShoutTopic.create(shout=slug, topic=topic)
task = GitTask(
input,
user.username,
user.email,
"update shout %s" % (slug)
)
task = GitTask(input, user.username, user.email, "update shout %s" % (slug))
return {"shout": shout}
return {
"shout" : shout
}
@mutation.field("deleteShout")
@login_required
@ -114,5 +91,4 @@ async def delete_shout(_, info, slug):
shout.deletedAt = datetime.now()
session.commit()
return {}

View File

@ -7,36 +7,44 @@ from orm.topic import TopicFollower
from orm.user import AuthorFollower
from typing import List
@query.field("shoutsForFeed")
@login_required
def get_user_feed(_, info, page, size) -> List[Shout]:
user = info.context["request"].user
shouts = []
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutAuthor).\
join(AuthorFollower).\
where(AuthorFollower.follower == user.slug).\
order_by(desc(Shout.createdAt))
topicrows = session.query(Shout).\
join(ShoutTopic).\
join(TopicFollower).\
where(TopicFollower.follower == user.slug).\
order_by(desc(Shout.createdAt))
shouts = (
session.query(Shout)
.join(ShoutAuthor)
.join(AuthorFollower)
.where(AuthorFollower.follower == user.slug)
.order_by(desc(Shout.createdAt))
)
topicrows = (
session.query(Shout)
.join(ShoutTopic)
.join(TopicFollower)
.where(TopicFollower.follower == user.slug)
.order_by(desc(Shout.createdAt))
)
shouts = shouts.union(topicrows).limit(size).offset(page * size).all()
return shouts
@query.field("myCandidates")
@login_required
async def user_unpublished_shouts(_, info, page=1, size=10) -> List[Shout]:
user = info.context["request"].user
shouts = []
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutAuthor).\
where(and_(Shout.publishedAt == None, ShoutAuthor.user == user.slug)).\
order_by(desc(Shout.createdAt)).\
limit(size).\
offset( page * size).\
all()
shouts = (
session.query(Shout)
.join(ShoutAuthor)
.where(and_(Shout.publishedAt == None, ShoutAuthor.user == user.slug))
.order_by(desc(Shout.createdAt))
.limit(size)
.offset(page * size)
.all()
)
return shouts

View File

@ -4,12 +4,14 @@ import asyncio, uuid, json
from datetime import datetime
from base.redis import redis
class ChatFollowing:
queue = asyncio.Queue()
def __init__(self, chat_id):
self.chat_id = chat_id
class MessagesStorage:
lock = asyncio.Lock()
chats = []
@ -31,11 +33,13 @@ class MessagesStorage:
if message_result.message["chatId"] == chat.chat_id:
chat.queue.put_nowait(message_result)
class MessageResult:
def __init__(self, status, message):
self.status = status
self.message = message
async def get_unread_counter(user_slug):
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
if not chats:
@ -49,6 +53,7 @@ async def get_unread_counter(user_slug):
return unread
async def add_user_to_chat(user_slug, chat_id, chat=None):
chats = await redis.execute("GET", f"chats_by_user/{user_slug}")
if not chats:
@ -65,6 +70,7 @@ async def add_user_to_chat(user_slug, chat_id, chat = None):
chat["users"] = list(users)
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
@mutation.field("createChat")
@login_required
async def create_chat(_, info, description):
@ -76,7 +82,7 @@ async def create_chat(_, info, description):
"createdAt": str(datetime.now),
"createdBy": user.slug,
"id": str(chat_id),
"users" : [user.slug]
"users": [user.slug],
}
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
@ -86,16 +92,21 @@ async def create_chat(_, info, description):
return {"chatId": chat_id}
async def load_messages(chatId, size, page):
message_ids = await redis.lrange(f"chats/{chatId}/message_ids",
size * (page -1), size * page - 1)
message_ids = await redis.lrange(
f"chats/{chatId}/message_ids", size * (page - 1), size * page - 1
)
messages = []
if message_ids:
message_keys = [f"chats/{chatId}/messages/{id.decode('UTF-8')}" for id in message_ids]
message_keys = [
f"chats/{chatId}/messages/{id.decode('UTF-8')}" for id in message_ids
]
messages = await redis.mget(*message_keys)
messages = [json.loads(msg) for msg in messages]
return messages
@query.field("userChats")
@login_required
async def user_chats(_, info):
@ -109,6 +120,7 @@ async def user_chats(_, info):
return {"chats": chats}
@query.field("enterChat")
@login_required
async def enter_chat(_, info, chatId, size):
@ -123,10 +135,8 @@ async def enter_chat(_, info, chatId, size):
await add_user_to_chat(user.slug, chatId, chat)
return {
"chat" : chat,
"messages" : messages
}
return {"chat": chat, "messages": messages}
@mutation.field("createMessage")
@login_required
@ -146,23 +156,28 @@ async def create_message(_, info, chatId, body, replyTo = None):
"author": user.slug,
"body": body,
"replyTo": replyTo,
"createdAt" : datetime.now().isoformat()
"createdAt": datetime.now().isoformat(),
}
await redis.execute("SET", f"chats/{chatId}/messages/{message_id}", json.dumps(new_message))
await redis.execute(
"SET", f"chats/{chatId}/messages/{message_id}", json.dumps(new_message)
)
await redis.execute("LPUSH", f"chats/{chatId}/message_ids", str(message_id))
await redis.execute("SET", f"chats/{chatId}/next_message_id", str(message_id + 1))
chat = json.loads(chat)
users = chat["users"]
for user_slug in users:
await redis.execute("LPUSH", f"chats/{chatId}/unread/{user_slug}", str(message_id))
await redis.execute(
"LPUSH", f"chats/{chatId}/unread/{user_slug}", str(message_id)
)
result = MessageResult("NEW", new_message)
await MessagesStorage.put(result)
return {"message": new_message}
@query.field("getMessages")
@login_required
async def get_messages(_, info, chatId, size, page):
@ -174,6 +189,7 @@ async def get_messages(_, info, chatId, size, page):
return messages
@mutation.field("updateMessage")
@login_required
async def update_message(_, info, chatId, id, body):
@ -201,6 +217,7 @@ async def update_message(_, info, chatId, id, body):
return {"message": message}
@mutation.field("deleteMessage")
@login_required
async def delete_message(_, info, chatId, id):
@ -230,6 +247,7 @@ async def delete_message(_, info, chatId, id):
return {}
@mutation.field("markAsRead")
@login_required
async def mark_as_read(_, info, chatId, ids):
@ -249,6 +267,7 @@ async def mark_as_read(_, info, chatId, ids):
return {}
@subscription.source("chatUpdated")
@login_required
async def message_generator(obj, info, chatId):
@ -261,6 +280,7 @@ async def message_generator(obj, info, chatId):
finally:
await MessagesStorage.remove_chat(following_chat)
@subscription.field("chatUpdated")
def message_resolver(message, info, chatId):
return message

View File

@ -14,17 +14,22 @@ from sqlalchemy import and_, desc
from sqlalchemy.orm import selectinload
from typing import List
@query.field("userReactedShouts")
async def get_user_reacted_shouts(_, info, slug, page, size) -> List[Shout]:
user = await UserStorage.get_user_by_slug(slug)
if not user: return {}
if not user:
return []
with local_session() as session:
shouts = session.query(Shout).\
join(Reaction).\
where(Reaction.createdBy == user.slug).\
order_by(desc(Reaction.createdAt)).\
limit(size).\
offset(page * size).all()
shouts = (
session.query(Shout)
.join(Reaction)
.where(Reaction.createdBy == user.slug)
.order_by(desc(Reaction.createdAt))
.limit(size)
.offset(page * size)
.all()
)
return shouts
@ -33,10 +38,12 @@ async def get_user_reacted_shouts(_, info, slug, page, size) -> List[Shout]:
def get_followed_topics(_, slug) -> List[Topic]:
rows = []
with local_session() as session:
rows = session.query(Topic).\
join(TopicFollower).\
where(TopicFollower.follower == slug).\
all()
rows = (
session.query(Topic)
.join(TopicFollower)
.where(TopicFollower.follower == slug)
.all()
)
return rows
@ -44,22 +51,27 @@ def get_followed_topics(_, slug) -> List[Topic]:
def get_followed_authors(_, slug) -> List[User]:
authors = []
with local_session() as session:
authors = session.query(User).\
join(AuthorFollower, User.slug == AuthorFollower.author).\
where(AuthorFollower.follower == slug).\
all()
authors = (
session.query(User)
.join(AuthorFollower, User.slug == AuthorFollower.author)
.where(AuthorFollower.follower == slug)
.all()
)
return authors
@query.field("userFollowers")
async def user_followers(_, slug) -> List[User]:
with local_session() as session:
users = session.query(User).\
join(AuthorFollower, User.slug == AuthorFollower.follower).\
where(AuthorFollower.author == slug).\
all()
users = (
session.query(User)
.join(AuthorFollower, User.slug == AuthorFollower.follower)
.where(AuthorFollower.author == slug)
.all()
)
return users
# for mutation.field("refreshSession")
async def get_user_info(slug):
return {
@ -67,7 +79,7 @@ async def get_user_info(slug):
"topics": [t.slug for t in get_followed_topics(0, slug)],
"authors": [a.slug for a in get_followed_authors(0, slug)],
"reactions": [r.shout for r in get_shout_reactions(0, slug)],
"communities": [c.slug for c in get_followed_communities(0, slug)]
"communities": [c.slug for c in get_followed_communities(0, slug)],
}
@ -82,16 +94,19 @@ async def get_current_user(_, info):
return {
"token": "", # same token?
"user": user,
"info": await get_user_info(user.slug)
"info": await get_user_info(user.slug),
}
@query.field("getUsersBySlugs")
async def get_users_by_slugs(_, info, slugs):
with local_session() as session:
users = session.query(User).\
options(selectinload(User.ratings)).\
filter(User.slug.in_(slugs)).all()
users = (
session.query(User)
.options(selectinload(User.ratings))
.filter(User.slug in slugs)
.all()
)
return users
@ -99,10 +114,13 @@ async def get_users_by_slugs(_, info, slugs):
async def get_user_roles(_, info, slug):
with local_session() as session:
user = session.query(User).where(User.slug == slug).first()
roles = session.query(Role).\
options(selectinload(Role.permissions)).\
join(UserRole).\
where(UserRole.user_id == user.id).all()
roles = (
session.query(Role)
.options(selectinload(Role.permissions))
.join(UserRole)
.where(UserRole.user_id == user.id)
.all()
)
return roles
@ -113,7 +131,8 @@ async def update_profile(_, info, profile):
user_id = auth.user_id
with local_session() as session:
user = session.query(User).filter(User.id == user_id).first()
user.update(profile)
if user:
User.update(user, **profile)
session.commit()
return {}
@ -123,45 +142,48 @@ async def update_profile(_, info, profile):
async def rate_user(_, info, slug, value):
user = info.context["request"].user
with local_session() as session:
rating = session.query(UserRating).\
filter(and_(UserRating.rater == user.slug, UserRating.user == slug)).\
first()
rating = (
session.query(UserRating)
.filter(and_(UserRating.rater == user.slug, UserRating.user == slug))
.first()
)
if rating:
rating.value = value
session.commit()
return {}
try:
UserRating.create(
rater=user.slug,
user=slug,
value=value
)
UserRating.create(rater=user.slug, user=slug, value=value)
except Exception as err:
return {"error": err}
return {}
# for mutation.field("follow")
def author_follow(user, slug):
AuthorFollower.create(
follower=user.slug,
author=slug
)
AuthorFollower.create(follower=user.slug, author=slug)
# for mutation.field("unfollow")
def author_unfollow(user, slug):
with local_session() as session:
flw = session.query(AuthorFollower).\
filter(and_(AuthorFollower.follower == user.slug, AuthorFollower.author == slug)).\
first()
flw = (
session.query(AuthorFollower)
.filter(
and_(
AuthorFollower.follower == user.slug, AuthorFollower.author == slug
)
)
.first()
)
if not flw:
raise Exception("[resolvers.profile] follower not exist, cant unfollow")
else:
session.delete(flw)
session.commit()
@query.field("authorsAll")
def get_authors_all(_, info, page, size):
end = page * size
start = end - size
return UserStorage.get_all_users()[start:end]
return list(UserStorage.get_all_users())[start:end] # type: ignore

View File

@ -10,11 +10,17 @@ from datetime import datetime
from services.auth.users import UserStorage
from services.stat.reacted import ReactedStorage
def reactions_follow(user, slug, auto=False):
with local_session() as session:
fw = session.query(ShoutReactionsFollower).\
filter(ShoutReactionsFollower.follower == user.slug, ShoutReactionsFollower.shout == slug).\
first()
fw = (
session.query(ShoutReactionsFollower)
.filter(
ShoutReactionsFollower.follower == user.slug,
ShoutReactionsFollower.shout == slug,
)
.first()
)
if auto and fw:
return
elif not auto and fw:
@ -25,17 +31,19 @@ def reactions_follow(user, slug, auto=False):
return
# print("[resolvers.reactions] was followed before")
ShoutReactionsFollower.create(
follower=user.slug,
shout=slug,
auto=auto)
ShoutReactionsFollower.create(follower=user.slug, shout=slug, auto=auto)
def reactions_unfollow(user, slug):
with local_session() as session:
following = session.query(ShoutReactionsFollower).\
filter(ShoutReactionsFollower.follower == user.slug, ShoutReactionsFollower.shout == slug).\
first()
following = (
session.query(ShoutReactionsFollower)
.filter(
ShoutReactionsFollower.follower == user.slug,
ShoutReactionsFollower.shout == slug,
)
.first()
)
if not following:
# print("[resolvers.reactions] was not followed", slug)
return
@ -56,7 +64,7 @@ async def create_reaction(_, info, inp):
reaction = Reaction.create(**inp)
ReactedStorage.increment(reaction.shout, reaction.replyTo)
try:
reactions_follow(user, inp['shout'], True)
reactions_follow(user, inp["shout"], True)
except Exception as e:
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
@ -76,13 +84,13 @@ async def update_reaction(_, info, inp):
return {"error": "invalid reaction id"}
if reaction.createdBy != user.slug:
return {"error": "access denied"}
reaction.body = inp['body']
reaction.body = inp["body"]
reaction.updatedAt = datetime.now()
if reaction.kind != inp['kind']:
if reaction.kind != inp["kind"]:
# NOTE: change mind detection can be here
pass
if inp.get('range'):
reaction.range = inp.get('range')
if inp.get("range"):
reaction.range = inp.get("range")
session.commit()
return {"reaction": reaction}
@ -104,29 +112,39 @@ async def delete_reaction(_, info, id):
session.commit()
return {}
@query.field("reactionsByShout")
async def get_shout_reactions(_, info, slug, page, size):
offset = page * size
reactions = []
with local_session() as session:
reactions = session.query(Reaction).\
filter(Reaction.shout == slug).\
limit(size).offset(offset).all()
reactions = (
session.query(Reaction)
.filter(Reaction.shout == slug)
.limit(size)
.offset(offset)
.all()
)
for r in reactions:
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
return reactions
@query.field("reactionsForSlugs")
async def get_shout_reactions(_, info, slugs, page, size):
offset = page * size
reactions = []
with local_session() as session:
for slug in slugs:
reactions += session.query(Reaction).\
filter(Reaction.shout == slug).\
limit(size).offset(offset).all()
reactions += (
session.query(Reaction)
.filter(Reaction.shout == slug)
.limit(size)
.offset(offset)
.all()
)
for r in reactions:
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
return reactions
@ -135,22 +153,31 @@ async def get_all_reactions(_, info, page=1, size=10):
offset = page * size
reactions = []
with local_session() as session:
reactions = session.query(Reaction).\
filter(Reaction.deletedAt == None).\
order_by(desc("createdAt")).\
offset(offset).limit(size)
reactions = (
session.query(Reaction)
.filter(Reaction.deletedAt == None)
.order_by(desc("createdAt"))
.offset(offset)
.limit(size)
)
for r in reactions:
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
reactions = list(reactions)
reactions.sort(key=lambda x: x.createdAt, reverse=True)
return reactions
@query.field("reactionsByAuthor")
async def get_reactions_by_author(_, info, slug, page=1, size=50):
offset = page * size
reactions = []
with local_session() as session:
reactions = session.query(Reaction).filter(Reaction.createdBy == slug).limit(size).offset(offset)
reactions = (
session.query(Reaction)
.filter(Reaction.createdBy == slug)
.limit(size)
.offset(offset)
)
for r in reactions:
r.createdBy = await UserStorage.get_user(r.createdBy or 'discours')
r.createdBy = await UserStorage.get_user(r.createdBy or "discours")
return reactions

View File

@ -8,6 +8,7 @@ from base.resolvers import mutation, query
from auth.authenticate import login_required
from sqlalchemy import and_
@query.field("topicsAll")
async def topics_all(_, info, page=1, size=50):
topics = await TopicStorage.get_topics_all(page, size)
@ -15,6 +16,7 @@ async def topics_all(_, info, page = 1, size = 50):
topic.stat = await TopicStat.get_stat(topic.slug)
return topics
@query.field("topicsByCommunity")
async def topics_by_community(_, info, community):
topics = await TopicStorage.get_topics_by_community(community)
@ -22,16 +24,17 @@ async def topics_by_community(_, info, community):
topic.stat = await TopicStat.get_stat(topic.slug)
return topics
@query.field("topicsByAuthor")
async def topics_by_author(_, info, author):
slugs = set()
with local_session() as session:
shouts = session.query(Shout).\
filter(Shout.authors.any(User.slug == author))
shouts = session.query(Shout).filter(Shout.authors.any(User.slug == author))
for shout in shouts:
slugs.update([topic.slug for topic in shout.topics])
return await TopicStorage.get_topics(slugs)
@mutation.field("createTopic")
@login_required
async def create_topic(_, info, input):
@ -40,6 +43,7 @@ async def create_topic(_, info, input):
return {"topic": new_topic}
@mutation.field("updateTopic")
@login_required
async def update_topic(_, info, input):
@ -59,16 +63,20 @@ async def update_topic(_, info, input):
return {"topic": topic}
def topic_follow(user, slug):
TopicFollower.create(
follower = user.slug,
topic = slug)
TopicFollower.create(follower=user.slug, topic=slug)
def topic_unfollow(user, slug):
with local_session() as session:
sub = session.query(TopicFollower).\
filter(and_(TopicFollower.follower == user.slug, TopicFollower.topic == slug)).\
first()
sub = (
session.query(TopicFollower)
.filter(
and_(TopicFollower.follower == user.slug, TopicFollower.topic == slug)
)
.first()
)
if not sub:
raise Exception("[resolvers.topics] follower not exist")
session.delete(sub)

View File

@ -13,55 +13,69 @@ from resolvers.reactions import reactions_follow, reactions_unfollow
from auth.authenticate import login_required
from sqlalchemy import select, desc, and_, text
from sqlalchemy.orm import selectinload
from sqlalchemy.dialects import postgresql
@query.field("topViewed")
async def top_viewed(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.top_viewed[(page - 1) * size : page * size]
@query.field("topMonth")
async def top_month(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.top_month[(page - 1) * size : page * size]
@query.field("topOverall")
async def top_overall(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.top_overall[(page - 1) * size : page * size]
@query.field("recentPublished")
async def recent_published(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.recent_published[(page - 1) * size : page * size]
@query.field("recentAll")
async def recent_all(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.recent_all[(page - 1) * size : page * size]
@query.field("recentReacted")
async def recent_reacted(_, info, page, size):
async with ShoutsCache.lock:
return ShoutsCache.recent_reacted[(page - 1) * size : page * size]
@mutation.field("viewShout")
async def view_shout(_, info, slug):
await ViewedStorage.inc_shout(slug)
return {"error": ""}
@query.field("getShoutBySlug")
async def get_shout_by_slug(_, info, slug):
all_fields = [node.name.value for node in info.field_nodes[0].selection_set.selections]
all_fields = [
node.name.value for node in info.field_nodes[0].selection_set.selections
]
selected_fields = set(["authors", "topics"]).intersection(all_fields)
select_options = [selectinload(getattr(Shout, field)) for field in selected_fields]
shout = {}
with local_session() as session:
try: s = text(open('src/queries/shout-by-slug.sql', 'r').read() % slug)
except: pass
shout = session.query(Shout).\
options(select_options).\
filter(Shout.slug == slug).first()
try:
s = text(open("src/queries/shout-by-slug.sql", "r").read() % slug)
except:
pass
shout = (
session.query(Shout)
.options(select_options)
.filter(Shout.slug == slug)
.first()
)
if not shout:
print(f"shout with slug {slug} not exist")
@ -71,57 +85,68 @@ async def get_shout_by_slug(_, info, slug):
a.caption = await ShoutAuthorStorage.get_author_caption(slug, a.slug)
return shout
@query.field("shoutsByTopics")
async def shouts_by_topics(_, info, slugs, page, size):
page = page - 1
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutTopic).\
where(and_(ShoutTopic.topic.in_(slugs), Shout.publishedAt != None)).\
order_by(desc(Shout.publishedAt)).\
limit(size).\
offset(page * size)
shouts = (
session.query(Shout)
.join(ShoutTopic)
.where(and_(ShoutTopic.topic.in_(slugs), Shout.publishedAt != None))
.order_by(desc(Shout.publishedAt))
.limit(size)
.offset(page * size)
)
for s in shouts:
for a in s.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
return shouts
@query.field("shoutsByCollection")
async def shouts_by_topics(_, info, collection, page, size):
page = page - 1
shouts = []
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutCollection, ShoutCollection.collection == collection).\
where(and_(ShoutCollection.shout == Shout.slug, Shout.publishedAt != None)).\
order_by(desc(Shout.publishedAt)).\
limit(size).\
offset(page * size)
shouts = (
session.query(Shout)
.join(ShoutCollection, ShoutCollection.collection == collection)
.where(and_(ShoutCollection.shout == Shout.slug, Shout.publishedAt != None))
.order_by(desc(Shout.publishedAt))
.limit(size)
.offset(page * size)
)
for s in shouts:
for a in s.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
return shouts
@query.field("shoutsByAuthors")
async def shouts_by_authors(_, info, slugs, page, size):
page = page - 1
with local_session() as session:
shouts = session.query(Shout).\
join(ShoutAuthor).\
where(and_(ShoutAuthor.user.in_(slugs), Shout.publishedAt != None)).\
order_by(desc(Shout.publishedAt)).\
limit(size).\
offset(page * size)
shouts = (
session.query(Shout)
.join(ShoutAuthor)
.where(and_(ShoutAuthor.user.in_(slugs), Shout.publishedAt != None))
.order_by(desc(Shout.publishedAt))
.limit(size)
.offset(page * size)
)
for s in shouts:
for a in s.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
return shouts
SINGLE_COMMUNITY = True
@query.field("shoutsByCommunities")
async def shouts_by_communities(_, info, slugs, page, size):
if SINGLE_COMMUNITY:
@ -130,21 +155,29 @@ async def shouts_by_communities(_, info, slugs, page, size):
page = page - 1
with local_session() as session:
# TODO fix postgres high load
shouts = session.query(Shout).distinct().\
join(ShoutTopic).\
where(and_(Shout.publishedAt != None,\
ShoutTopic.topic.in_(\
select(Topic.slug).where(Topic.community.in_(slugs))\
))).\
order_by(desc(Shout.publishedAt)).\
limit(size).\
offset(page * size)
shouts = (
session.query(Shout)
.distinct()
.join(ShoutTopic)
.where(
and_(
Shout.publishedAt != None,
ShoutTopic.topic.in_(
select(Topic.slug).where(Topic.community.in_(slugs))
),
)
)
.order_by(desc(Shout.publishedAt))
.limit(size)
.offset(page * size)
)
for s in shouts:
for a in s.authors:
a.caption = await ShoutAuthorStorage.get_author_caption(s.slug, a.slug)
return shouts
@mutation.field("follow")
@login_required
async def follow(_, info, what, slug):
@ -163,6 +196,7 @@ async def follow(_, info, what, slug):
return {}
@mutation.field("unfollow")
@login_required
async def unfollow(_, info, what, slug):

View File

@ -1,8 +1,8 @@
import asyncio
from sqlalchemy.orm import selectinload
from orm.rbac import Role
class RoleStorage:
roles = {}
lock = asyncio.Lock()
@ -10,11 +10,9 @@ class RoleStorage:
@staticmethod
def init(session):
self = RoleStorage
roles = session.query(Role).\
options(selectinload(Role.permissions)).all()
roles = session.query(Role).options(selectinload(Role.permissions)).all()
self.roles = dict([(role.id, role) for role in roles])
print('[auth.roles] %d precached' % len(roles))
print("[auth.roles] %d precached" % len(roles))
@staticmethod
async def get_role(id):

View File

@ -10,10 +10,13 @@ class UserStorage:
@staticmethod
def init(session):
self = UserStorage
users = session.query(User).\
options(selectinload(User.roles), selectinload(User.ratings)).all()
users = (
session.query(User)
.options(selectinload(User.roles), selectinload(User.ratings))
.all()
)
self.users = dict([(user.id, user) for user in users])
print('[auth.users] %d precached' % len(self.users))
print("[auth.users] %d precached" % len(self.users))
@staticmethod
async def get_user(id):

View File

@ -2,12 +2,14 @@ import asyncio
from datetime import datetime
from sqlalchemy.types import Enum
from sqlalchemy import Column, DateTime, ForeignKey, Boolean
# from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy import Enum
import enum
from base.orm import Base, local_session
from orm.topic import ShoutTopic
class ReactionKind(enum.Enum):
AGREE = 1 # +1
DISAGREE = 2 # -1
@ -23,44 +25,41 @@ class ReactionKind(enum.Enum):
DISLIKE = 12 # -1
# TYPE = <reaction index> # rating diff
def kind_to_rate(kind) -> int:
if kind in [
ReactionKind.AGREE,
ReactionKind.LIKE,
ReactionKind.PROOF,
ReactionKind.ACCEPT
]: return 1
ReactionKind.ACCEPT,
]:
return 1
elif kind in [
ReactionKind.DISAGREE,
ReactionKind.DISLIKE,
ReactionKind.DISPROOF,
ReactionKind.REJECT
]: return -1
else: return 0
ReactionKind.REJECT,
]:
return -1
else:
return 0
class ReactedByDay(Base):
__tablename__ = "reacted_by_day"
id = None
reaction = Column(ForeignKey("reaction.id"), primary_key=True)
shout = Column(ForeignKey('shout.slug'), primary_key=True)
replyTo = Column(ForeignKey('reaction.id'), nullable=True)
shout = Column(ForeignKey("shout.slug"), primary_key=True)
replyTo = Column(ForeignKey("reaction.id"), nullable=True)
kind: int = Column(Enum(ReactionKind), nullable=False, comment="Reaction kind")
day = Column(DateTime, primary_key=True, default=datetime.now)
comment = Column(Boolean, default=False)
class ReactedStorage:
reacted = {
'shouts': {},
'topics': {},
'reactions': {}
}
rating = {
'shouts': {},
'topics': {},
'reactions': {}
}
reacted = {"shouts": {}, "topics": {}, "reactions": {}}
rating = {"shouts": {}, "topics": {}, "reactions": {}}
reactions = []
to_flush = []
period = 30 * 60 # sec
@ -70,44 +69,50 @@ class ReactedStorage:
async def get_shout(shout_slug):
self = ReactedStorage
async with self.lock:
return self.reacted['shouts'].get(shout_slug, [])
return self.reacted["shouts"].get(shout_slug, [])
@staticmethod
async def get_topic(topic_slug):
self = ReactedStorage
async with self.lock:
return self.reacted['topics'].get(topic_slug, [])
return self.reacted["topics"].get(topic_slug, [])
@staticmethod
async def get_comments(shout_slug):
self = ReactedStorage
async with self.lock:
return list(filter(lambda r: r.comment, self.reacted['shouts'].get(shout_slug, [])))
return list(
filter(lambda r: r.comment, self.reacted["shouts"].get(shout_slug, []))
)
@staticmethod
async def get_topic_comments(topic_slug):
self = ReactedStorage
async with self.lock:
return list(filter(lambda r: r.comment, self.reacted['topics'].get(topic_slug, [])))
return list(
filter(lambda r: r.comment, self.reacted["topics"].get(topic_slug, []))
)
@staticmethod
async def get_reaction_comments(reaction_id):
self = ReactedStorage
async with self.lock:
return list(filter(lambda r: r.comment, self.reacted['reactions'].get(reaction_id)))
return list(
filter(lambda r: r.comment, self.reacted["reactions"].get(reaction_id))
)
@staticmethod
async def get_reaction(reaction_id):
self = ReactedStorage
async with self.lock:
return self.reacted['reactions'].get(reaction_id, [])
return self.reacted["reactions"].get(reaction_id, [])
@staticmethod
async def get_rating(shout_slug):
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted['shouts'].get(shout_slug, []):
for r in self.reacted["shouts"].get(shout_slug, []):
rating = rating + kind_to_rate(r.kind)
return rating
@ -116,7 +121,7 @@ class ReactedStorage:
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted['topics'].get(topic_slug, []):
for r in self.reacted["topics"].get(topic_slug, []):
rating = rating + kind_to_rate(r.kind)
return rating
@ -125,7 +130,7 @@ class ReactedStorage:
self = ReactedStorage
rating = 0
async with self.lock:
for r in self.reacted['reactions'].get(reaction_id, []):
for r in self.reacted["reactions"].get(reaction_id, []):
rating = rating + kind_to_rate(r.kind)
return rating
@ -135,47 +140,71 @@ class ReactedStorage:
async with self.lock:
with local_session() as session:
r = {
"day": datetime.now().replace(hour=0, minute=0, second=0, microsecond=0),
"day": datetime.now().replace(
hour=0, minute=0, second=0, microsecond=0
),
"reaction": reaction.id,
"kind": reaction.kind,
"shout": reaction.shout
"shout": reaction.shout,
}
if reaction.replyTo: r['replyTo'] = reaction.replyTo
if reaction.body: r['comment'] = True
reaction = ReactedByDay.create(**r)
self.reacted['shouts'][reaction.shout] = self.reacted['shouts'].get(reaction.shout, [])
self.reacted['shouts'][reaction.shout].append(reaction)
if reaction.replyTo:
self.reacted['reaction'][reaction.replyTo] = self.reacted['reactions'].get(reaction.shout, [])
self.reacted['reaction'][reaction.replyTo].append(reaction)
self.rating['reactions'][reaction.replyTo] = self.rating['reactions'].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
r["replyTo"] = reaction.replyTo
if reaction.body:
r["comment"] = True
reaction = ReactedByDay.create(**r)
self.reacted["shouts"][reaction.shout] = self.reacted["shouts"].get(
reaction.shout, []
)
self.reacted["shouts"][reaction.shout].append(reaction)
if reaction.replyTo:
self.reacted["reaction"][reaction.replyTo] = self.reacted[
"reactions"
].get(reaction.shout, [])
self.reacted["reaction"][reaction.replyTo].append(reaction)
self.rating["reactions"][reaction.replyTo] = self.rating[
"reactions"
].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
else:
self.rating['shouts'][reaction.replyTo] = self.rating['shouts'].get(reaction.shout, 0) + kind_to_rate(reaction.kind)
self.rating["shouts"][reaction.replyTo] = self.rating["shouts"].get(
reaction.shout, 0
) + kind_to_rate(reaction.kind)
@staticmethod
def init(session):
self = ReactedStorage
all_reactions = session.query(ReactedByDay).all()
print('[stat.reacted] %d reactions total' % len(all_reactions))
print("[stat.reacted] %d reactions total" % len(all_reactions))
for reaction in all_reactions:
shout = reaction.shout
topics = session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout).all()
topics = (
session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout).all()
)
kind = reaction.kind
self.reacted['shouts'][shout] = self.reacted['shouts'].get(shout, [])
self.reacted['shouts'][shout].append(reaction)
self.rating['shouts'][shout] = self.rating['shouts'].get(shout, 0) + kind_to_rate(kind)
self.reacted["shouts"][shout] = self.reacted["shouts"].get(shout, [])
self.reacted["shouts"][shout].append(reaction)
self.rating["shouts"][shout] = self.rating["shouts"].get(
shout, 0
) + kind_to_rate(kind)
for t in topics:
self.reacted['topics'][t] = self.reacted['topics'].get(t, [])
self.reacted['topics'][t].append(reaction)
self.rating['topics'][t] = self.rating['topics'].get(t, 0) + kind_to_rate(kind) # rating
self.reacted["topics"][t] = self.reacted["topics"].get(t, [])
self.reacted["topics"][t].append(reaction)
self.rating["topics"][t] = self.rating["topics"].get(
t, 0
) + kind_to_rate(
kind
) # rating
if reaction.replyTo:
self.reacted['reactions'][reaction.replyTo] = self.reacted['reactions'].get(reaction.replyTo, [])
self.reacted['reactions'][reaction.replyTo].append(reaction)
self.rating['reactions'][reaction.replyTo] = self.rating['reactions'].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
ttt = self.reacted['topics'].values()
print('[stat.reacted] %d topics reacted' % len(ttt))
print('[stat.reacted] %d shouts reacted' % len(self.reacted['shouts']))
print('[stat.reacted] %d reactions reacted' % len(self.reacted['reactions']))
self.reacted["reactions"][reaction.replyTo] = self.reacted[
"reactions"
].get(reaction.replyTo, [])
self.reacted["reactions"][reaction.replyTo].append(reaction)
self.rating["reactions"][reaction.replyTo] = self.rating[
"reactions"
].get(reaction.replyTo, 0) + kind_to_rate(reaction.kind)
ttt = self.reacted["topics"].values()
print("[stat.reacted] %d topics reacted" % len(ttt))
print("[stat.reacted] %d shouts reacted" % len(self.reacted["shouts"]))
print("[stat.reacted] %d reactions reacted" % len(self.reacted["reactions"]))

View File

@ -5,6 +5,8 @@ from services.stat.viewed import ViewedStorage
from services.zine.shoutauthor import ShoutAuthorStorage
from orm.topic import ShoutTopic, TopicFollower
from typing import Dict
class TopicStat:
shouts_by_topic = {}
authors_by_topic = {}
@ -24,7 +26,9 @@ class TopicStat:
if topic in self.shouts_by_topic:
self.shouts_by_topic[topic].append(shout)
else:
self.shouts_by_topic[topic] = [shout, ]
self.shouts_by_topic[topic] = [
shout,
]
authors = await ShoutAuthorStorage.get_authors(shout)
if topic in self.authors_by_topic:
@ -32,8 +36,8 @@ class TopicStat:
else:
self.authors_by_topic[topic] = set(authors)
print('[stat.topics] authors sorted')
print('[stat.topics] shouts sorted')
print("[stat.topics] authors sorted")
print("[stat.topics] shouts sorted")
self.followers_by_topic = {}
followings = session.query(TopicFollower)
@ -44,7 +48,7 @@ class TopicStat:
self.followers_by_topic[topic].append(user)
else:
self.followers_by_topic[topic] = [user]
print('[stat.topics] followers sorted')
print("[stat.topics] followers sorted")
@staticmethod
async def get_shouts(topic):
@ -82,4 +86,3 @@ class TopicStat:
except Exception as err:
print("[stat.topics] errror: %s" % (err))
await asyncio.sleep(self.period)

View File

@ -10,17 +10,13 @@ class ViewedByDay(Base):
__tablename__ = "viewed_by_day"
id = None
shout = Column(ForeignKey('shout.slug'), primary_key=True)
shout = Column(ForeignKey("shout.slug"), primary_key=True)
day = Column(DateTime, primary_key=True, default=datetime.now)
value = Column(Integer)
class ViewedStorage:
viewed = {
'shouts': {},
'topics': {},
'reactions': {}
}
viewed = {"shouts": {}, "topics": {}, "reactions": {}}
this_day_views = {}
to_flush = []
period = 30 * 60 # sec
@ -33,39 +29,41 @@ class ViewedStorage:
for view in views:
shout = view.shout
topics = session.query(ShoutTopic.topic).filter(ShoutTopic.shout == shout).all()
topics = (
session.query(ShoutTopic.topic).filter(ShoutTopic.shout == shout).all()
)
value = view.value
if shout:
old_value = self.viewed['shouts'].get(shout, 0)
self.viewed['shouts'][shout] = old_value + value
old_value = self.viewed["shouts"].get(shout, 0)
self.viewed["shouts"][shout] = old_value + value
for t in topics:
old_topic_value = self.viewed['topics'].get(t, 0)
self.viewed['topics'][t] = old_topic_value + value
old_topic_value = self.viewed["topics"].get(t, 0)
self.viewed["topics"][t] = old_topic_value + value
if not shout in self.this_day_views:
self.this_day_views[shout] = view
this_day_view = self.this_day_views[shout]
if this_day_view.day < view.day:
self.this_day_views[shout] = view
print('[stat.viewed] %d shouts viewed' % len(views))
print("[stat.viewed] %d shouts viewed" % len(views))
@staticmethod
async def get_shout(shout_slug):
self = ViewedStorage
async with self.lock:
return self.viewed['shouts'].get(shout_slug, 0)
return self.viewed["shouts"].get(shout_slug, 0)
@staticmethod
async def get_topic(topic_slug):
self = ViewedStorage
async with self.lock:
return self.viewed['topics'].get(topic_slug, 0)
return self.viewed["topics"].get(topic_slug, 0)
@staticmethod
async def get_reaction(reaction_id):
self = ViewedStorage
async with self.lock:
return self.viewed['reactions'].get(reaction_id, 0)
return self.viewed["reactions"].get(reaction_id, 0)
@staticmethod
async def increment(shout_slug):
@ -81,14 +79,19 @@ class ViewedStorage:
else:
this_day_view.value = this_day_view.value + 1
this_day_view.modified = True
self.viewed['shouts'][shout_slug] = self.viewed['shouts'].get(shout_slug, 0) + 1
self.viewed["shouts"][shout_slug] = (
self.viewed["shouts"].get(shout_slug, 0) + 1
)
with local_session() as session:
topics = session.query(ShoutTopic.topic).where(ShoutTopic.shout == shout_slug).all()
topics = (
session.query(ShoutTopic.topic)
.where(ShoutTopic.shout == shout_slug)
.all()
)
for t in topics:
self.viewed['topics'][t] = self.viewed['topics'].get(t, 0) + 1
self.viewed["topics"][t] = self.viewed["topics"].get(t, 0) + 1
flag_modified(this_day_view, "value")
@staticmethod
async def flush_changes(session):
self = ViewedStorage

View File

@ -3,8 +3,10 @@ from pathlib import Path
import asyncio
from settings import SHOUTS_REPO
class GitTask:
''' every shout update use a new task '''
"""every shout update use a new task"""
queue = asyncio.Queue()
def __init__(self, input, username, user_email, comment):
@ -21,12 +23,13 @@ class GitTask:
Path(repo_path).mkdir()
cmd = "cd %s && git init && " \
"git config user.name 'discours' && " \
"git config user.email 'discours@discours.io' && " \
"touch initial && git add initial && " \
"git commit -m 'init repo'" \
% (repo_path)
cmd = (
"cd %s && git init && "
"git config user.name 'discours' && "
"git config user.email 'discours@discours.io' && "
"touch initial && git add initial && "
"git commit -m 'init repo'" % (repo_path)
)
output = subprocess.check_output(cmd, shell=True)
print(output)
@ -42,12 +45,16 @@ class GitTask:
shout_filename = "%s.mdx" % (self.slug)
shout_full_filename = "%s/%s" % (repo_path, shout_filename)
with open(shout_full_filename, mode='w', encoding='utf-8') as shout_file:
shout_file.write(bytes(self.shout_body,'utf-8').decode('utf-8','ignore'))
with open(shout_full_filename, mode="w", encoding="utf-8") as shout_file:
shout_file.write(bytes(self.shout_body, "utf-8").decode("utf-8", "ignore"))
author = "%s <%s>" % (self.username, self.user_email)
cmd = "cd %s && git add %s && git commit -m '%s' --author='%s'" % \
(repo_path, shout_filename, self.comment, author)
cmd = "cd %s && git add %s && git commit -m '%s' --author='%s'" % (
repo_path,
shout_filename,
self.comment,
author,
)
output = subprocess.check_output(cmd, shell=True)
print(output)

View File

@ -1,4 +1,3 @@
import asyncio
from base.orm import local_session
from orm.shout import ShoutAuthor
@ -16,7 +15,7 @@ class ShoutAuthorStorage:
for sa in sas:
self.authors_by_shout[sa.shout] = self.authors_by_shout.get(sa.shout, [])
self.authors_by_shout[sa.shout].append([sa.user, sa.caption])
print('[zine.authors] %d shouts preprocessed' % len(self.authors_by_shout))
print("[zine.authors] %d shouts preprocessed" % len(self.authors_by_shout))
@staticmethod
async def get_authors(shout):

View File

@ -1,4 +1,3 @@
import asyncio
from datetime import datetime, timedelta
from sqlalchemy import and_, desc, func, select
@ -18,11 +17,13 @@ class ShoutsCache:
@staticmethod
async def prepare_recent_published():
with local_session() as session:
stmt = select(Shout).\
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
where(Shout.publishedAt != None).\
order_by(desc("publishedAt")).\
limit(ShoutsCache.limit)
stmt = (
select(Shout)
.options(selectinload(Shout.authors), selectinload(Shout.topics))
.where(Shout.publishedAt != None)
.order_by(desc("publishedAt"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
@ -35,13 +36,12 @@ class ShoutsCache:
@staticmethod
async def prepare_recent_all():
with local_session() as session:
stmt = select(Shout).\
options(
selectinload(Shout.authors),
selectinload(Shout.topics)
).\
order_by(desc("createdAt")).\
limit(ShoutsCache.limit)
stmt = (
select(Shout)
.options(selectinload(Shout.authors), selectinload(Shout.topics))
.order_by(desc("createdAt"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
@ -55,16 +55,18 @@ class ShoutsCache:
@staticmethod
async def prepare_recent_reacted():
with local_session() as session:
stmt = select(Shout, func.max(Reaction.createdAt).label("reactionCreatedAt")).\
options(
stmt = (
select(Shout, func.max(Reaction.createdAt).label("reactionCreatedAt"))
.options(
selectinload(Shout.authors),
selectinload(Shout.topics),
).\
join(Reaction, Reaction.shout == Shout.slug).\
where(and_(Shout.publishedAt != None, Reaction.deletedAt == None)).\
group_by(Shout.slug).\
order_by(desc("reactionCreatedAt")).\
limit(ShoutsCache.limit)
)
.join(Reaction, Reaction.shout == Shout.slug)
.where(and_(Shout.publishedAt != None, Reaction.deletedAt == None))
.group_by(Shout.slug)
.order_by(desc("reactionCreatedAt"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
@ -75,19 +77,23 @@ class ShoutsCache:
ShoutsCache.recent_reacted = shouts
print("[zine.cache] %d recently reacted shouts " % len(shouts))
@staticmethod
async def prepare_top_overall():
with local_session() as session:
# with reacted times counter
stmt = select(Shout,
func.count(Reaction.id).label("reacted")).\
options(selectinload(Shout.authors), selectinload(Shout.topics), selectinload(Shout.reactions)).\
join(Reaction).\
where(and_(Shout.publishedAt != None, Reaction.deletedAt == None)).\
group_by(Shout.slug).\
order_by(desc("reacted")).\
limit(ShoutsCache.limit)
stmt = (
select(Shout, func.count(Reaction.id).label("reacted"))
.options(
selectinload(Shout.authors),
selectinload(Shout.topics),
selectinload(Shout.reactions),
)
.join(Reaction)
.where(and_(Shout.publishedAt != None, Reaction.deletedAt == None))
.group_by(Shout.slug)
.order_by(desc("reacted"))
.limit(ShoutsCache.limit)
)
shouts = []
# with rating synthetic counter
for row in session.execute(stmt):
@ -103,13 +109,15 @@ class ShoutsCache:
async def prepare_top_month():
month_ago = datetime.now() - timedelta(days=30)
with local_session() as session:
stmt = select(Shout, func.count(Reaction.id).label("reacted")).\
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
join(Reaction).\
where(and_(Shout.createdAt > month_ago, Shout.publishedAt != None)).\
group_by(Shout.slug).\
order_by(desc("reacted")).\
limit(ShoutsCache.limit)
stmt = (
select(Shout, func.count(Reaction.id).label("reacted"))
.options(selectinload(Shout.authors), selectinload(Shout.topics))
.join(Reaction)
.where(and_(Shout.createdAt > month_ago, Shout.publishedAt != None))
.group_by(Shout.slug)
.order_by(desc("reacted"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout
@ -124,13 +132,15 @@ class ShoutsCache:
async def prepare_top_viewed():
month_ago = datetime.now() - timedelta(days=30)
with local_session() as session:
stmt = select(Shout, func.sum(ViewedByDay.value).label("viewed")).\
options(selectinload(Shout.authors), selectinload(Shout.topics)).\
join(ViewedByDay).\
where(and_(ViewedByDay.day > month_ago, Shout.publishedAt != None)).\
group_by(Shout.slug).\
order_by(desc("viewed")).\
limit(ShoutsCache.limit)
stmt = (
select(Shout, func.sum(ViewedByDay.value).label("viewed"))
.options(selectinload(Shout.authors), selectinload(Shout.topics))
.join(ViewedByDay)
.where(and_(ViewedByDay.day > month_ago, Shout.publishedAt != None))
.group_by(Shout.slug)
.order_by(desc("viewed"))
.limit(ShoutsCache.limit)
)
shouts = []
for row in session.execute(stmt):
shout = row.Shout

View File

@ -14,7 +14,7 @@ class TopicStorage:
for topic in self.topics.values():
self.load_parents(topic)
print('[zine.topics] %d precached' % len(self.topics.keys()))
print("[zine.topics] %d precached" % len(self.topics.keys()))
@staticmethod
def load_parents(topic):
@ -47,7 +47,9 @@ class TopicStorage:
async def get_topics_by_community(community):
self = TopicStorage
async with self.lock:
topics = filter(lambda topic: topic.community == community, self.topics.values())
topics = filter(
lambda topic: topic.community == community, self.topics.values()
)
return list(topics)
@staticmethod

View File

@ -1,4 +1,3 @@
from pathlib import Path
from os import environ
PORT = 8080
@ -8,9 +7,16 @@ BACKEND_URL = environ.get("BACKEND_URL") or "https://localhost:8080"
OAUTH_CALLBACK_URL = environ.get("OAUTH_CALLBACK_URL") or "https://localhost:8080"
RESET_PWD_URL = environ.get("RESET_PWD_URL") or "https://localhost:8080/reset_pwd"
CONFIRM_EMAIL_URL = environ.get("CONFIRM_EMAIL_URL") or "https://new.discours.io"
ERROR_URL_ON_FRONTEND = environ.get("ERROR_URL_ON_FRONTEND") or "https://new.discours.io"
ERROR_URL_ON_FRONTEND = (
environ.get("ERROR_URL_ON_FRONTEND") or "https://new.discours.io"
)
DB_URL = environ.get("DATABASE_URL") or environ.get("DB_URL") or "postgresql://postgres@localhost:5432/discoursio" or "sqlite:///db.sqlite3"
DB_URL = (
environ.get("DATABASE_URL")
or environ.get("DB_URL")
or "postgresql://postgres@localhost:5432/discoursio"
or "sqlite:///db.sqlite3"
)
JWT_ALGORITHM = "HS256"
JWT_SECRET_KEY = "8f1bd7696ffb482d8486dfbc6e7d16dd-secret-key"
JWT_LIFE_SPAN = 24 * 60 * 60 # seconds
@ -26,7 +32,7 @@ OAUTH_CLIENTS = {}
for provider in OAUTH_PROVIDERS:
OAUTH_CLIENTS[provider] = {
"id": environ.get(provider + "_OAUTH_ID"),
"key" : environ.get(provider + "_OAUTH_KEY")
"key": environ.get(provider + "_OAUTH_KEY"),
}
SHOUTS_REPO = "content"