Merge remote-tracking branch 'origin/main' into storages-to-qeuries
This commit is contained in:
commit
70744966fa
10
README.md
10
README.md
|
@ -24,10 +24,8 @@ apt install redis nginx
|
|||
|
||||
First, install Postgres. Then you'll need some data
|
||||
```
|
||||
|
||||
psql -U postgres
|
||||
> create database discoursio;
|
||||
> \q
|
||||
brew install postgres
|
||||
createdb discoursio
|
||||
python server.py migrate
|
||||
```
|
||||
|
||||
|
@ -42,3 +40,7 @@ python3 server.py dev
|
|||
|
||||
Put the header 'Authorization' with token from signIn query or registerUser mutation.
|
||||
|
||||
# How to debug Ackee
|
||||
|
||||
Set ACKEE_TOKEN var
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from auth.credentials import AuthCredentials, AuthUser
|
|||
from services.auth.users import UserStorage
|
||||
from settings import SESSION_TOKEN_HEADER
|
||||
from auth.tokenstorage import SessionToken
|
||||
from base.exceptions import InvalidToken
|
||||
from base.exceptions import InvalidToken, OperationNotAllowed, Unauthorized
|
||||
|
||||
|
||||
class JWTAuthenticate(AuthenticationBackend):
|
||||
|
@ -30,27 +30,26 @@ class JWTAuthenticate(AuthenticationBackend):
|
|||
try:
|
||||
if len(token.split('.')) > 1:
|
||||
payload = await SessionToken.verify(token)
|
||||
if payload is None:
|
||||
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
||||
user = await UserStorage.get_user(payload.user_id)
|
||||
if not user:
|
||||
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
||||
scopes = await user.get_permission()
|
||||
return (
|
||||
AuthCredentials(
|
||||
user_id=payload.user_id,
|
||||
scopes=scopes,
|
||||
logged_in=True
|
||||
),
|
||||
user,
|
||||
)
|
||||
else:
|
||||
InvalidToken("please try again")
|
||||
except Exception as exc:
|
||||
print("[auth.authenticate] session token verify error")
|
||||
print(exc)
|
||||
return AuthCredentials(scopes=[], error_message=str(exc)), AuthUser(
|
||||
user_id=None
|
||||
)
|
||||
|
||||
if payload is None:
|
||||
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
||||
|
||||
user = await UserStorage.get_user(payload.user_id)
|
||||
if not user:
|
||||
return AuthCredentials(scopes=[]), AuthUser(user_id=None)
|
||||
|
||||
scopes = await user.get_permission()
|
||||
return (
|
||||
AuthCredentials(user_id=payload.user_id, scopes=scopes, logged_in=True),
|
||||
user,
|
||||
)
|
||||
return AuthCredentials(scopes=[], error_message=str(exc)), AuthUser(user_id=None)
|
||||
|
||||
|
||||
def login_required(func):
|
||||
|
@ -58,10 +57,9 @@ def login_required(func):
|
|||
async def wrap(parent, info: GraphQLResolveInfo, *args, **kwargs):
|
||||
# print('[auth.authenticate] login required for %r with info %r' % (func, info)) # debug only
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
if auth and auth.user_id:
|
||||
print(auth) # debug only
|
||||
# print(auth)
|
||||
if not auth.logged_in:
|
||||
return {"error": auth.error_message or "Please login"}
|
||||
raise OperationNotAllowed(auth.error_message or "Please login")
|
||||
return await func(parent, info, *args, **kwargs)
|
||||
|
||||
return wrap
|
||||
|
@ -73,9 +71,9 @@ def permission_required(resource, operation, func):
|
|||
print('[auth.authenticate] permission_required for %r with info %r' % (func, info)) # debug only
|
||||
auth: AuthCredentials = info.context["request"].auth
|
||||
if not auth.logged_in:
|
||||
return {"error": auth.error_message or "Please login"}
|
||||
raise Unauthorized(auth.error_message or "Please login")
|
||||
|
||||
# TODO: add check permission logix
|
||||
# TODO: add actual check permission logix here
|
||||
|
||||
return await func(parent, info, *args, **kwargs)
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ from typing import List, Optional, Text
|
|||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from base.exceptions import OperationNotAllowed
|
||||
from base.exceptions import Unauthorized
|
||||
|
||||
|
||||
class Permission(BaseModel):
|
||||
|
@ -17,11 +17,13 @@ class AuthCredentials(BaseModel):
|
|||
|
||||
@property
|
||||
def is_admin(self):
|
||||
# TODO: check admin logix
|
||||
return True
|
||||
|
||||
async def permissions(self) -> List[Permission]:
|
||||
if self.user_id is None:
|
||||
raise OperationNotAllowed("Please login first")
|
||||
raise Unauthorized("Please login first")
|
||||
# TODO: implement permissions logix
|
||||
return NotImplemented()
|
||||
|
||||
|
||||
|
|
|
@ -10,13 +10,13 @@ lang_subject = {
|
|||
}
|
||||
|
||||
|
||||
async def send_auth_email(user, token, lang="ru"):
|
||||
async def send_auth_email(user, token, template="email_confirmation", lang="ru"):
|
||||
try:
|
||||
to = "%s <%s>" % (user.name, user.email)
|
||||
if lang not in ['ru', 'en']:
|
||||
lang = 'ru'
|
||||
subject = lang_subject.get(lang, lang_subject["en"])
|
||||
template = "email_confirmation_" + lang
|
||||
template = template + "_" + lang
|
||||
payload = {
|
||||
"from": noreply,
|
||||
"to": to,
|
||||
|
|
|
@ -34,7 +34,7 @@ class JWTCodec:
|
|||
issuer="discours"
|
||||
)
|
||||
r = TokenPayload(**payload)
|
||||
print('[auth.jwtcodec] debug payload %r' % r)
|
||||
# print('[auth.jwtcodec] debug payload %r' % r)
|
||||
return r
|
||||
except jwt.InvalidIssuedAtError:
|
||||
print('[auth.jwtcodec] invalid issued at: %r' % r)
|
||||
|
|
|
@ -2,7 +2,7 @@ from authlib.integrations.starlette_client import OAuth
|
|||
from starlette.responses import RedirectResponse
|
||||
from auth.identity import Identity
|
||||
from auth.tokenstorage import TokenStorage
|
||||
from settings import OAUTH_CLIENTS
|
||||
from settings import OAUTH_CLIENTS, FRONTEND_URL
|
||||
|
||||
oauth = OAuth()
|
||||
|
||||
|
@ -84,6 +84,6 @@ async def oauth_authorize(request):
|
|||
}
|
||||
user = Identity.oauth(user_input)
|
||||
session_token = await TokenStorage.create_session(user)
|
||||
response = RedirectResponse(url="https://new.discours.io/confirm")
|
||||
response = RedirectResponse(url=FRONTEND_URL + "/confirm")
|
||||
response.set_cookie("token", session_token)
|
||||
return response
|
||||
|
|
|
@ -12,6 +12,7 @@ class RedisCache:
|
|||
if self._instance is not None:
|
||||
return
|
||||
self._instance = await from_url(self._uri, encoding="utf-8")
|
||||
# print(self._instance)
|
||||
|
||||
async def disconnect(self):
|
||||
if self._instance is None:
|
||||
|
@ -23,10 +24,11 @@ class RedisCache:
|
|||
async def execute(self, command, *args, **kwargs):
|
||||
while not self._instance:
|
||||
await sleep(1)
|
||||
try:
|
||||
await self._instance.execute_command(command, *args, **kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
print("[redis] " + command + ' ' + ' '.join(args))
|
||||
return await self._instance.execute_command(command, *args, **kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
async def lrange(self, key, start, stop):
|
||||
return await self._instance.lrange(key, start, stop)
|
||||
|
|
|
@ -314,9 +314,6 @@ async def handle_auto():
|
|||
|
||||
async def main():
|
||||
if len(sys.argv) > 1:
|
||||
cmd = sys.argv[1]
|
||||
if type(cmd) == str:
|
||||
print("[migration] command: " + cmd)
|
||||
init_tables()
|
||||
await handle_auto()
|
||||
else:
|
||||
|
|
|
@ -4,7 +4,7 @@ from datetime import datetime, timezone
|
|||
|
||||
import frontmatter
|
||||
|
||||
from .extract import extract_html, prepare_html_body
|
||||
from .extract import extract_html, extract_media
|
||||
from .utils import DateTimeEncoder
|
||||
|
||||
OLD_DATE = "2016-03-05 22:22:00.350000"
|
||||
|
@ -50,11 +50,12 @@ def export_mdx(r):
|
|||
def export_body(shout, storage):
|
||||
entry = storage["content_items"]["by_oid"][shout["oid"]]
|
||||
if entry:
|
||||
shout["body"], media = prepare_html_body(entry) # prepare_md_body(entry)
|
||||
body = extract_html(entry)
|
||||
media = extract_media(entry)
|
||||
shout["body"] = body # prepare_html_body(entry) # prepare_md_body(entry)
|
||||
shout["media"] = media
|
||||
export_mdx(shout)
|
||||
print("[export] html for %s" % shout["slug"])
|
||||
body, _media = extract_html(entry)
|
||||
open(contentDir + shout["slug"] + ".html", "w").write(body)
|
||||
else:
|
||||
raise Exception("no content_items entry found")
|
||||
|
|
|
@ -3,7 +3,8 @@ import os
|
|||
import re
|
||||
import uuid
|
||||
|
||||
from .html2text import html2text
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
TOOLTIP_REGEX = r"(\/\/\/(.+)\/\/\/)"
|
||||
contentDir = os.path.join(
|
||||
|
@ -258,47 +259,44 @@ def extract_md(body, oid=""):
|
|||
return newbody
|
||||
|
||||
|
||||
def prepare_md_body(entry):
|
||||
# body modifications
|
||||
body = ""
|
||||
def extract_media(entry):
|
||||
''' normalized media extraction method '''
|
||||
# media [ { title pic url body } ]}
|
||||
kind = entry.get("type")
|
||||
addon = ""
|
||||
if kind == "Video":
|
||||
addon = ""
|
||||
for m in entry.get("media", []):
|
||||
if "youtubeId" in m:
|
||||
addon += "<VideoPlayer youtubeId='" + m["youtubeId"] + "' />\n"
|
||||
if not kind:
|
||||
print(entry)
|
||||
raise Exception("shout no layout")
|
||||
media = []
|
||||
for m in entry.get("media") or []:
|
||||
# title
|
||||
title = m.get("title", "").replace("\n", " ").replace(" ", " ")
|
||||
artist = m.get("performer") or m.get("artist")
|
||||
if artist:
|
||||
title = artist + " - " + title
|
||||
|
||||
# pic
|
||||
url = m.get("fileUrl") or m.get("url", "")
|
||||
pic = ""
|
||||
if m.get("thumborId"):
|
||||
pic = cdn + "/unsafe/1600x/" + m["thumborId"]
|
||||
|
||||
# url
|
||||
if not url:
|
||||
if kind == "Image":
|
||||
url = pic
|
||||
elif "youtubeId" in m:
|
||||
url = "https://youtube.com/?watch=" + m["youtubeId"]
|
||||
elif "vimeoId" in m:
|
||||
addon += "<VideoPlayer vimeoId='" + m["vimeoId"] + "' />\n"
|
||||
else:
|
||||
print("[extract] media is not supported")
|
||||
print(m)
|
||||
body = "import VideoPlayer from '$/components/Article/VideoPlayer'\n\n" + addon
|
||||
|
||||
elif kind == "Music":
|
||||
addon = ""
|
||||
for m in entry.get("media", []):
|
||||
artist = m.get("performer")
|
||||
trackname = ""
|
||||
if artist:
|
||||
trackname += artist + " - "
|
||||
if "title" in m:
|
||||
trackname += m.get("title", "")
|
||||
addon += (
|
||||
'<AudioPlayer src="'
|
||||
+ m.get("fileUrl", "")
|
||||
+ '" title="'
|
||||
+ trackname
|
||||
+ '" />\n'
|
||||
)
|
||||
body = "import AudioPlayer from '$/components/Article/AudioPlayer'\n\n" + addon
|
||||
|
||||
body_orig, media = extract_html(entry)
|
||||
if body_orig:
|
||||
body += extract_md(html2text(body_orig), entry["_id"])
|
||||
if not body:
|
||||
print("[extract] empty MDX body")
|
||||
return body, media
|
||||
url = "https://vimeo.com/" + m["vimeoId"]
|
||||
# body
|
||||
body = m.get("body") or m.get("literatureBody") or ""
|
||||
media.append({
|
||||
"url": url,
|
||||
"pic": pic,
|
||||
"title": title,
|
||||
"body": body
|
||||
})
|
||||
return media
|
||||
|
||||
|
||||
def prepare_html_body(entry):
|
||||
|
@ -308,7 +306,7 @@ def prepare_html_body(entry):
|
|||
addon = ""
|
||||
if kind == "Video":
|
||||
addon = ""
|
||||
for m in entry.get("media", []):
|
||||
for m in entry.get("media") or []:
|
||||
if "youtubeId" in m:
|
||||
addon += '<iframe width="420" height="345" src="http://www.youtube.com/embed/'
|
||||
addon += m["youtubeId"]
|
||||
|
@ -325,7 +323,7 @@ def prepare_html_body(entry):
|
|||
|
||||
elif kind == "Music":
|
||||
addon = ""
|
||||
for m in entry.get("media", []):
|
||||
for m in entry.get("media") or []:
|
||||
artist = m.get("performer")
|
||||
trackname = ""
|
||||
if artist:
|
||||
|
@ -339,68 +337,12 @@ def prepare_html_body(entry):
|
|||
addon += '"></audio></figure>'
|
||||
body += addon
|
||||
|
||||
body, media = extract_html(entry)
|
||||
body = extract_html(entry)
|
||||
# if body_orig: body += extract_md(html2text(body_orig), entry['_id'])
|
||||
if not body:
|
||||
print("[extract] empty HTML body")
|
||||
return body, media
|
||||
return body
|
||||
|
||||
|
||||
def extract_html(entry):
|
||||
body_orig = (entry.get("body") or "").replace('\(', '(').replace('\)', ')')
|
||||
media = entry.get("media", [])
|
||||
kind = entry.get("type") or ""
|
||||
print("[extract] kind: " + kind)
|
||||
mbodies = set([])
|
||||
if media:
|
||||
# print('[extract] media is found')
|
||||
for m in media:
|
||||
mbody = m.get("body", "")
|
||||
addon = ""
|
||||
if kind == "Literature":
|
||||
mbody = m.get("literatureBody") or m.get("body", "")
|
||||
elif kind == "Image":
|
||||
cover = ""
|
||||
if "thumborId" in entry:
|
||||
cover = cdn + "/unsafe/1600x/" + entry["thumborId"]
|
||||
if not cover:
|
||||
if "image" in entry:
|
||||
cover = entry["image"].get("url", "")
|
||||
if "cloudinary" in cover:
|
||||
cover = ""
|
||||
# else: print('[extract] cover: ' + cover)
|
||||
title = m.get("title", "").replace("\n", " ").replace(" ", " ")
|
||||
u = m.get("thumborId") or cover or ""
|
||||
if title:
|
||||
addon += "<h4>" + title + "</h4>\n"
|
||||
if not u.startswith("http"):
|
||||
u = s3 + u
|
||||
if not u:
|
||||
print("[extract] no image url for " + str(m))
|
||||
if "cloudinary" in u:
|
||||
u = "img/lost.svg"
|
||||
if u != cover or (u == cover and media.index(m) == 0):
|
||||
addon += '<img src="' + u + '" alt="' + title + '" />\n'
|
||||
if addon:
|
||||
body_orig += addon
|
||||
# print('[extract] item addon: ' + addon)
|
||||
# if addon: print('[extract] addon: %s' % addon)
|
||||
if mbody and mbody not in mbodies:
|
||||
mbodies.add(mbody)
|
||||
body_orig += mbody
|
||||
if len(list(mbodies)) != len(media):
|
||||
print(
|
||||
"[extract] %d/%d media item bodies appended"
|
||||
% (len(list(mbodies)), len(media))
|
||||
)
|
||||
# print('[extract] media items body: \n' + body_orig)
|
||||
if not body_orig:
|
||||
for up in entry.get("bodyHistory", []) or []:
|
||||
body_orig = up.get("text", "") or ""
|
||||
if body_orig:
|
||||
print("[extract] got html body from history")
|
||||
break
|
||||
if not body_orig:
|
||||
print("[extract] empty HTML body")
|
||||
# body_html = str(BeautifulSoup(body_orig, features="html.parser"))
|
||||
return body_orig, media
|
||||
body_html = str(BeautifulSoup(body_orig, features="html.parser"))
|
||||
return body_html
|
||||
|
|
|
@ -4,7 +4,7 @@ from dateutil.parser import parse as date_parse
|
|||
from sqlalchemy.exc import IntegrityError
|
||||
from transliterate import translit
|
||||
from base.orm import local_session
|
||||
from migration.extract import prepare_html_body
|
||||
from migration.extract import extract_html, extract_media
|
||||
from orm.reaction import Reaction, ReactionKind
|
||||
from orm.shout import Shout, ShoutTopic, ShoutReactionsFollower
|
||||
from orm.user import User
|
||||
|
@ -103,11 +103,11 @@ async def migrate(entry, storage):
|
|||
"authors": [],
|
||||
"topics": set([])
|
||||
}
|
||||
topics_by_oid = storage["topics"]["by_oid"]
|
||||
users_by_oid = storage["users"]["by_oid"]
|
||||
|
||||
# author
|
||||
oid = entry.get("createdBy", entry.get("_id", entry.get("oid")))
|
||||
userdata = users_by_oid.get(oid)
|
||||
users_by_oid = storage["users"]["by_oid"]
|
||||
user_oid = entry.get("createdBy", "")
|
||||
userdata = users_by_oid.get(user_oid)
|
||||
user = None
|
||||
if not userdata:
|
||||
app = entry.get("application")
|
||||
|
@ -139,6 +139,8 @@ async def migrate(entry, storage):
|
|||
# timestamps
|
||||
r["createdAt"] = date_parse(entry.get("createdAt", OLD_DATE))
|
||||
r["updatedAt"] = date_parse(entry["updatedAt"]) if "updatedAt" in entry else ts
|
||||
|
||||
# visibility
|
||||
if entry.get("published"):
|
||||
r["publishedAt"] = date_parse(entry.get("publishedAt", OLD_DATE))
|
||||
r["visibility"] = "public"
|
||||
|
@ -150,25 +152,67 @@ async def migrate(entry, storage):
|
|||
session.commit()
|
||||
else:
|
||||
r["visibility"] = "authors"
|
||||
|
||||
if "deletedAt" in entry:
|
||||
r["deletedAt"] = date_parse(entry["deletedAt"])
|
||||
|
||||
# topics
|
||||
category = entry.get("category")
|
||||
for oid in [category, ] + entry.get("tags", []):
|
||||
t = storage["topics"]["by_oid"].get(oid)
|
||||
if t:
|
||||
tslug = storage["topics"]["by_oid"][oid]["slug"]
|
||||
r["topics"].add(tslug)
|
||||
r["topics"] = list(r["topics"])
|
||||
# main topic
|
||||
mt = topics_by_oid.get(category)
|
||||
if mt and mt.get("slug"):
|
||||
r["mainTopic"] = storage["replacements"].get(mt["slug"]) or r["topics"][0]
|
||||
r['topics'] = await add_topics_follower(entry, storage, userslug)
|
||||
r['mainTopic'] = r['topics'][0]
|
||||
|
||||
entry["topics"] = r["topics"]
|
||||
entry["cover"] = r["cover"]
|
||||
|
||||
# body
|
||||
r["body"] = extract_html(entry)
|
||||
media = extract_media(entry)
|
||||
if media:
|
||||
r["media"] = json.dumps(media, ensure_ascii=True)
|
||||
|
||||
shout_dict = r.copy()
|
||||
|
||||
# user
|
||||
user = await get_user(userslug, userdata, storage, user_oid)
|
||||
shout_dict["authors"] = [user, ]
|
||||
del shout_dict["topics"]
|
||||
try:
|
||||
# save shout to db
|
||||
await create_shout(shout_dict, userslug)
|
||||
except IntegrityError as e:
|
||||
print(e)
|
||||
await resolve_create_shout(shout_dict, userslug)
|
||||
except Exception as e:
|
||||
raise Exception(e)
|
||||
|
||||
# shout topics aftermath
|
||||
shout_dict["topics"] = await topics_aftermath(r, storage)
|
||||
|
||||
# content_item ratings to reactions
|
||||
await content_ratings_to_reactions(entry, shout_dict["slug"])
|
||||
|
||||
# shout views
|
||||
await ViewedStorage.increment(shout_dict["slug"], amount=entry.get("views", 1))
|
||||
# del shout_dict['ratings']
|
||||
|
||||
shout_dict["oid"] = entry.get("_id", "")
|
||||
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
|
||||
storage["shouts"]["by_slug"][slug] = shout_dict
|
||||
return shout_dict
|
||||
|
||||
|
||||
async def add_topics_follower(entry, storage, userslug):
|
||||
topics = set([])
|
||||
category = entry.get("category")
|
||||
topics_by_oid = storage["topics"]["by_oid"]
|
||||
oids = [category, ] + entry.get("tags", [])
|
||||
for toid in oids:
|
||||
tslug = topics_by_oid.get(toid, {}).get("slug")
|
||||
if tslug:
|
||||
topics.add(tslug)
|
||||
ttt = list(topics)
|
||||
# add author as TopicFollower
|
||||
with local_session() as session:
|
||||
for tpc in r['topics']:
|
||||
for tpc in topics:
|
||||
try:
|
||||
tf = session.query(
|
||||
TopicFollower
|
||||
|
@ -184,24 +228,19 @@ async def migrate(entry, storage):
|
|||
auto=True
|
||||
)
|
||||
session.add(tf)
|
||||
session.commit()
|
||||
except IntegrityError:
|
||||
print('[migration.shout] hidden by topic ' + tpc)
|
||||
r["visibility"] = "authors"
|
||||
r["publishedAt"] = None
|
||||
r["topics"].remove(tpc)
|
||||
# main topic
|
||||
maintopic = storage["replacements"].get(topics_by_oid.get(category, {}).get("slug"))
|
||||
if maintopic in ttt:
|
||||
ttt.remove(maintopic)
|
||||
ttt.insert(0, maintopic)
|
||||
return ttt
|
||||
|
||||
entry["topics"] = r["topics"]
|
||||
entry["cover"] = r["cover"]
|
||||
|
||||
# body
|
||||
r["body"], media = prepare_html_body(entry)
|
||||
if media:
|
||||
r["media"] = json.dumps(media, ensure_ascii=True)
|
||||
# save shout to db
|
||||
s = object()
|
||||
shout_dict = r.copy()
|
||||
async def get_user(userslug, userdata, storage, oid):
|
||||
user = None
|
||||
del shout_dict["topics"]
|
||||
with local_session() as session:
|
||||
if not user and userslug:
|
||||
user = session.query(User).filter(User.slug == userslug).first()
|
||||
|
@ -216,60 +255,56 @@ async def migrate(entry, storage):
|
|||
userdata["id"] = user.id
|
||||
userdata["createdAt"] = user.createdAt
|
||||
storage["users"]["by_slug"][userdata["slug"]] = userdata
|
||||
storage["users"]["by_oid"][entry["_id"]] = userdata
|
||||
|
||||
storage["users"]["by_oid"][oid] = userdata
|
||||
if not user:
|
||||
raise Exception("could not get a user")
|
||||
shout_dict["authors"] = [user, ]
|
||||
try:
|
||||
await create_shout(shout_dict, userslug)
|
||||
except IntegrityError as e:
|
||||
with local_session() as session:
|
||||
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
|
||||
bump = False
|
||||
if s:
|
||||
if s.authors[0] != userslug:
|
||||
# create new with different slug
|
||||
shout_dict["slug"] += '-' + shout_dict["layout"]
|
||||
try:
|
||||
await create_shout(shout_dict, userslug)
|
||||
except IntegrityError as e:
|
||||
print(e)
|
||||
bump = True
|
||||
else:
|
||||
# update old
|
||||
for key in shout_dict:
|
||||
if key in s.__dict__:
|
||||
if s.__dict__[key] != shout_dict[key]:
|
||||
print(
|
||||
"[migration] shout already exists, but differs in %s"
|
||||
% key
|
||||
)
|
||||
bump = True
|
||||
else:
|
||||
print("[migration] shout already exists, but lacks %s" % key)
|
||||
bump = True
|
||||
if bump:
|
||||
s.update(shout_dict)
|
||||
else:
|
||||
print("[migration] something went wrong with shout: \n%r" % shout_dict)
|
||||
raise e
|
||||
session.commit()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print(s)
|
||||
raise Exception
|
||||
return user
|
||||
|
||||
# shout topics aftermath
|
||||
shout_dict["topics"] = []
|
||||
for tpc in r["topics"]:
|
||||
|
||||
async def resolve_create_shout(shout_dict, userslug):
|
||||
with local_session() as session:
|
||||
s = session.query(Shout).filter(Shout.slug == shout_dict["slug"]).first()
|
||||
bump = False
|
||||
if s:
|
||||
if s.authors[0] != userslug:
|
||||
# create new with different slug
|
||||
shout_dict["slug"] += '-' + shout_dict["layout"]
|
||||
try:
|
||||
await create_shout(shout_dict, userslug)
|
||||
except IntegrityError as e:
|
||||
print(e)
|
||||
bump = True
|
||||
else:
|
||||
# update old
|
||||
for key in shout_dict:
|
||||
if key in s.__dict__:
|
||||
if s.__dict__[key] != shout_dict[key]:
|
||||
print(
|
||||
"[migration] shout already exists, but differs in %s"
|
||||
% key
|
||||
)
|
||||
bump = True
|
||||
else:
|
||||
print("[migration] shout already exists, but lacks %s" % key)
|
||||
bump = True
|
||||
if bump:
|
||||
s.update(shout_dict)
|
||||
else:
|
||||
print("[migration] something went wrong with shout: \n%r" % shout_dict)
|
||||
raise Exception("")
|
||||
session.commit()
|
||||
|
||||
|
||||
async def topics_aftermath(entry, storage):
|
||||
r = []
|
||||
for tpc in filter(lambda x: bool(x), entry["topics"]):
|
||||
oldslug = tpc
|
||||
newslug = storage["replacements"].get(oldslug, oldslug)
|
||||
if newslug:
|
||||
with local_session() as session:
|
||||
shout_topic_old = (
|
||||
session.query(ShoutTopic)
|
||||
.filter(ShoutTopic.shout == shout_dict["slug"])
|
||||
.filter(ShoutTopic.shout == entry["slug"])
|
||||
.filter(ShoutTopic.topic == oldslug)
|
||||
.first()
|
||||
)
|
||||
|
@ -278,25 +313,27 @@ async def migrate(entry, storage):
|
|||
else:
|
||||
shout_topic_new = (
|
||||
session.query(ShoutTopic)
|
||||
.filter(ShoutTopic.shout == shout_dict["slug"])
|
||||
.filter(ShoutTopic.shout == entry["slug"])
|
||||
.filter(ShoutTopic.topic == newslug)
|
||||
.first()
|
||||
)
|
||||
if not shout_topic_new:
|
||||
try:
|
||||
ShoutTopic.create(
|
||||
**{"shout": shout_dict["slug"], "topic": newslug}
|
||||
**{"shout": entry["slug"], "topic": newslug}
|
||||
)
|
||||
except Exception:
|
||||
print("[migration] shout topic error: " + newslug)
|
||||
session.commit()
|
||||
if newslug not in shout_dict["topics"]:
|
||||
shout_dict["topics"].append(newslug)
|
||||
if newslug not in r:
|
||||
r.append(newslug)
|
||||
else:
|
||||
print("[migration] ignored topic slug: \n%r" % tpc["slug"])
|
||||
# raise Exception
|
||||
return r
|
||||
|
||||
# content_item ratings to reactions
|
||||
|
||||
async def content_ratings_to_reactions(entry, slug):
|
||||
try:
|
||||
with local_session() as session:
|
||||
for content_rating in entry.get("ratings", []):
|
||||
|
@ -316,7 +353,7 @@ async def migrate(entry, storage):
|
|||
if content_rating["value"] > 0
|
||||
else ReactionKind.DISLIKE,
|
||||
"createdBy": reactedBy.slug,
|
||||
"shout": shout_dict["slug"],
|
||||
"shout": slug,
|
||||
}
|
||||
cts = content_rating.get("createdAt")
|
||||
if cts:
|
||||
|
@ -340,11 +377,3 @@ async def migrate(entry, storage):
|
|||
session.commit()
|
||||
except Exception:
|
||||
raise Exception("[migration] content_item.ratings error: \n%r" % content_rating)
|
||||
|
||||
# shout views
|
||||
await ViewedStorage.increment(shout_dict["slug"], amount=entry.get("views", 1))
|
||||
# del shout_dict['ratings']
|
||||
shout_dict["oid"] = entry.get("_id")
|
||||
storage["shouts"]["by_oid"][entry["_id"]] = shout_dict
|
||||
storage["shouts"]["by_slug"][slug] = shout_dict
|
||||
return shout_dict
|
||||
|
|
|
@ -547,6 +547,7 @@
|
|||
"poetry-slam": "poetry-slam",
|
||||
"pokoy": "peace",
|
||||
"police": "police",
|
||||
"politicheskoe-fentezi": "political-fantasy",
|
||||
"politics": "politics",
|
||||
"politzaklyuchennye": "political-prisoners",
|
||||
"polsha": "poland",
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from base.orm import local_session
|
||||
from migration.extract import extract_md, html2text
|
||||
from migration.extract import extract_md
|
||||
from migration.html2text import html2text
|
||||
from orm import Topic
|
||||
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ def migrate(entry):
|
|||
"username": email,
|
||||
"email": email,
|
||||
"createdAt": parse(entry["createdAt"]),
|
||||
"emailConfirmed": bool(entry["emails"][0]["verified"]),
|
||||
"emailConfirmed": ("@discours.io" in email) or bool(entry["emails"][0]["verified"]),
|
||||
"muted": False, # amnesty
|
||||
"bio": entry["profile"].get("bio", ""),
|
||||
"notifications": [],
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
{{ $upstream_port := index $port_map_list 2 }}
|
||||
|
||||
map $http_origin $allow_origin {
|
||||
~^https?:\/\/((.*\.)?localhost(:\d+)?|discoursio-webapp(-(.*))?\.vercel\.app|(.*\.)?discours\.io)$ $http_origin;
|
||||
~^https?:\/\/((.*\.)?localhost(:\d+)?|discoursio-webapp-git(.*)?\.vercel\.app|(.*\.)?discours\.io(:\d+)?)$ $http_origin;
|
||||
default "";
|
||||
}
|
||||
|
||||
|
@ -113,8 +113,8 @@ server {
|
|||
proxy_set_header X-Request-Start $msec;
|
||||
{{ if $.PROXY_X_FORWARDED_SSL }}proxy_set_header X-Forwarded-Ssl {{ $.PROXY_X_FORWARDED_SSL }};{{ end }}
|
||||
|
||||
if ($request_method = 'OPTIONS') {
|
||||
add_header 'Access-Control-Allow-Origin' '$allow_origin' always;
|
||||
if ($request_method = 'OPTIONS') {
|
||||
add_header 'Access-Control-Allow-Origin' $allow_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS';
|
||||
#
|
||||
# Custom headers and headers various browsers *should* be OK with but aren't
|
||||
|
@ -131,7 +131,7 @@ server {
|
|||
}
|
||||
|
||||
if ($request_method = 'POST') {
|
||||
add_header 'Access-Control-Allow-Origin' '$allow_origin' always;
|
||||
add_header 'Access-Control-Allow-Origin' $allow_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always;
|
||||
add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always;
|
||||
|
@ -139,7 +139,7 @@ server {
|
|||
}
|
||||
|
||||
if ($request_method = 'GET') {
|
||||
add_header 'Access-Control-Allow-Origin' '$allow_origin' always;
|
||||
add_header 'Access-Control-Allow-Origin' $allow_origin always;
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, OPTIONS' always;
|
||||
add_header 'Access-Control-Allow-Headers' 'DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range,Authorization' always;
|
||||
add_header 'Access-Control-Expose-Headers' 'Content-Length,Content-Range' always;
|
||||
|
|
|
@ -49,8 +49,8 @@ from resolvers.zine.load import (
|
|||
from resolvers.inbox.chats import (
|
||||
create_chat,
|
||||
delete_chat,
|
||||
update_chat,
|
||||
invite_to_chat
|
||||
update_chat
|
||||
|
||||
)
|
||||
from resolvers.inbox.messages import (
|
||||
create_message,
|
||||
|
@ -112,7 +112,6 @@ __all__ = [
|
|||
# inbox
|
||||
"load_chats",
|
||||
"load_messages_by",
|
||||
"invite_to_chat",
|
||||
"create_chat",
|
||||
"delete_chat",
|
||||
"update_chat",
|
||||
|
|
|
@ -13,12 +13,12 @@ from auth.identity import Identity, Password
|
|||
from auth.jwtcodec import JWTCodec
|
||||
from auth.tokenstorage import TokenStorage
|
||||
from base.exceptions import (BaseHttpException, InvalidPassword, InvalidToken,
|
||||
ObjectNotExist, OperationNotAllowed)
|
||||
ObjectNotExist, OperationNotAllowed, Unauthorized)
|
||||
from base.orm import local_session
|
||||
from base.resolvers import mutation, query
|
||||
from orm import Role, User
|
||||
from resolvers.zine.profile import user_subscriptions
|
||||
from settings import SESSION_TOKEN_HEADER
|
||||
from settings import SESSION_TOKEN_HEADER, FRONTEND_URL
|
||||
|
||||
|
||||
@mutation.field("getSession")
|
||||
|
@ -37,7 +37,7 @@ async def get_current_user(_, info):
|
|||
"news": await user_subscriptions(user.slug),
|
||||
}
|
||||
else:
|
||||
raise OperationNotAllowed("No session token present in request, try to login")
|
||||
raise Unauthorized("No session token present in request, try to login")
|
||||
|
||||
|
||||
@mutation.field("confirmEmail")
|
||||
|
@ -75,7 +75,7 @@ async def confirm_email_handler(request):
|
|||
if "error" in res:
|
||||
raise BaseHttpException(res['error'])
|
||||
else:
|
||||
response = RedirectResponse(url="https://new.discours.io")
|
||||
response = RedirectResponse(url=FRONTEND_URL)
|
||||
response.set_cookie("token", res["token"]) # session token
|
||||
return response
|
||||
|
||||
|
@ -133,7 +133,7 @@ async def register_by_email(_, _info, email: str, password: str = "", name: str
|
|||
|
||||
|
||||
@mutation.field("sendLink")
|
||||
async def auth_send_link(_, _info, email, lang="ru"):
|
||||
async def auth_send_link(_, _info, email, lang="ru", template="email_confirmation"):
|
||||
"""send link with confirm code to email"""
|
||||
with local_session() as session:
|
||||
user = session.query(User).filter(User.email == email).first()
|
||||
|
@ -141,7 +141,7 @@ async def auth_send_link(_, _info, email, lang="ru"):
|
|||
raise ObjectNotExist("User not found")
|
||||
else:
|
||||
token = await TokenStorage.create_onetime(user)
|
||||
await send_auth_email(user, token, lang)
|
||||
await send_auth_email(user, token, lang, template)
|
||||
return user
|
||||
|
||||
|
||||
|
|
|
@ -7,43 +7,6 @@ from base.redis import redis
|
|||
from base.resolvers import mutation
|
||||
|
||||
|
||||
async def add_user_to_chat(user_slug: str, chat_id: str, chat=None):
|
||||
for member in chat["users"]:
|
||||
chats_ids = await redis.execute("GET", f"chats_by_user/{member}")
|
||||
if chats_ids:
|
||||
chats_ids = list(json.loads(chats_ids))
|
||||
else:
|
||||
chats_ids = []
|
||||
if chat_id not in chats_ids:
|
||||
chats_ids.append(chat_id)
|
||||
await redis.execute("SET", f"chats_by_user/{member}", json.dumps(chats_ids))
|
||||
|
||||
|
||||
@mutation.field("inviteChat")
|
||||
async def invite_to_chat(_, info, invited: str, chat_id: str):
|
||||
''' invite user with :slug to chat with :chat_id '''
|
||||
user = info.context["request"].user
|
||||
chat = await redis.execute("GET", f"chats/{chat_id}")
|
||||
if not chat:
|
||||
return {
|
||||
"error": "chat not exist"
|
||||
}
|
||||
chat = dict(json.loads(chat))
|
||||
if not chat['private'] and user.slug not in chat['admins']:
|
||||
return {
|
||||
"error": "only admins can invite to private chat",
|
||||
"chat": chat
|
||||
}
|
||||
else:
|
||||
chat["users"].append(invited)
|
||||
await add_user_to_chat(user.slug, chat_id, chat)
|
||||
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
|
||||
return {
|
||||
"error": None,
|
||||
"chat": chat
|
||||
}
|
||||
|
||||
|
||||
@mutation.field("updateChat")
|
||||
@login_required
|
||||
async def update_chat(_, info, chat_new: dict):
|
||||
|
@ -68,12 +31,11 @@ async def update_chat(_, info, chat_new: dict):
|
|||
"title": chat_new.get("title", chat["title"]),
|
||||
"description": chat_new.get("description", chat["description"]),
|
||||
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
"admins": chat_new.get("admins", chat["admins"]),
|
||||
"admins": chat_new.get("admins", chat.get("admins") or []),
|
||||
"users": chat_new.get("users", chat["users"])
|
||||
})
|
||||
await add_user_to_chat(user.slug, chat_id, chat)
|
||||
await redis.execute("SET", f"chats/{chat.id}", json.dumps(chat))
|
||||
await redis.execute("SET", f"chats/{chat.id}/next_message_id", 0)
|
||||
await redis.execute("COMMIT")
|
||||
|
||||
return {
|
||||
"error": None,
|
||||
|
@ -85,23 +47,43 @@ async def update_chat(_, info, chat_new: dict):
|
|||
@login_required
|
||||
async def create_chat(_, info, title="", members=[]):
|
||||
user = info.context["request"].user
|
||||
chat_id = str(uuid.uuid4())
|
||||
chat = {}
|
||||
if user.slug not in members:
|
||||
members.append(user.slug)
|
||||
|
||||
# reuse chat craeted before if exists
|
||||
if len(members) == 2 and title == "":
|
||||
chats1 = await redis.execute("SMEMBERS", f"chats_by_user/{members[0].slug}")
|
||||
chats2 = await redis.execute("SMEMBERS", f"chats_by_user/{members[1].slug}")
|
||||
chat = None
|
||||
for c in chats1.intersection(chats2):
|
||||
chat = await redis.execute("GET", f"chats/{c.decode('utf-8')}")
|
||||
if chat:
|
||||
chat = json.loads(chat)
|
||||
if chat.title == "":
|
||||
break
|
||||
if chat:
|
||||
return {
|
||||
"chat": chat,
|
||||
"error": "existed"
|
||||
}
|
||||
|
||||
chat_id = str(uuid.uuid4())
|
||||
chat = {
|
||||
"title": title,
|
||||
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
"createdBy": user.slug,
|
||||
"id": chat_id,
|
||||
"users": members,
|
||||
"admins": [user.slug, ]
|
||||
"title": title,
|
||||
"createdBy": user.slug,
|
||||
"createdAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
"updatedAt": int(datetime.now(tz=timezone.utc).timestamp()),
|
||||
"admins": []
|
||||
}
|
||||
|
||||
await add_user_to_chat(user.slug, chat_id, chat)
|
||||
for m in members:
|
||||
await redis.execute("SADD", f"chats_by_user/{m}", chat_id)
|
||||
await redis.execute("SET", f"chats/{chat_id}", json.dumps(chat))
|
||||
await redis.execute("SET", f"chats/{chat_id}/next_message_id", str(0))
|
||||
|
||||
await redis.execute("COMMIT")
|
||||
return {
|
||||
"error": None,
|
||||
"chat": chat
|
||||
|
@ -117,6 +99,8 @@ async def delete_chat(_, info, chat_id: str):
|
|||
chat = dict(json.loads(chat))
|
||||
if user.slug in chat['admins']:
|
||||
await redis.execute("DEL", f"chats/{chat_id}")
|
||||
await redis.execute("SREM", "chats_by_user/" + user, chat_id)
|
||||
await redis.execute("COMMIT")
|
||||
else:
|
||||
return {
|
||||
"error": "chat not exist"
|
||||
|
|
|
@ -5,89 +5,120 @@ from auth.authenticate import login_required
|
|||
from base.redis import redis
|
||||
from base.orm import local_session
|
||||
from base.resolvers import query
|
||||
from base.exceptions import ObjectNotExist, Unauthorized
|
||||
from orm.user import User
|
||||
from resolvers.zine.profile import followed_authors
|
||||
from .unread import get_unread_counter
|
||||
|
||||
|
||||
async def load_messages(chatId: str, limit: int, offset: int):
|
||||
''' load :limit messages for :chatId with :offset '''
|
||||
async def load_messages(chat_id: str, limit: int, offset: int):
|
||||
''' load :limit messages for :chat_id with :offset '''
|
||||
messages = []
|
||||
message_ids = await redis.lrange(
|
||||
f"chats/{chatId}/message_ids", 0 - offset - limit, 0 - offset
|
||||
f"chats/{chat_id}/message_ids", offset + limit, offset
|
||||
)
|
||||
if message_ids:
|
||||
message_keys = [
|
||||
f"chats/{chatId}/messages/{mid}" for mid in message_ids
|
||||
f"chats/{chat_id}/messages/{mid}" for mid in message_ids
|
||||
]
|
||||
messages = await redis.mget(*message_keys)
|
||||
messages = [json.loads(msg) for msg in messages]
|
||||
return {
|
||||
"messages": messages,
|
||||
"error": None
|
||||
}
|
||||
return messages
|
||||
|
||||
|
||||
@query.field("loadChats")
|
||||
@login_required
|
||||
async def load_chats(_, info, limit: int, offset: int):
|
||||
async def load_chats(_, info, limit: int = 50, offset: int = 0):
|
||||
""" load :limit chats of current user with :offset """
|
||||
user = info.context["request"].user
|
||||
if user:
|
||||
chats = await redis.execute("GET", f"chats_by_user/{user.slug}")
|
||||
if chats:
|
||||
chats = list(json.loads(chats))[offset:offset + limit]
|
||||
if not chats:
|
||||
chats = []
|
||||
for c in chats:
|
||||
c['messages'] = await load_messages(c['id'], limit, offset)
|
||||
c['unread'] = await get_unread_counter(c['id'], user.slug)
|
||||
return {
|
||||
"chats": chats,
|
||||
"error": None
|
||||
}
|
||||
print('[inbox] load user\'s chats %s' % user.slug)
|
||||
else:
|
||||
return {
|
||||
"error": "please login",
|
||||
"chats": []
|
||||
}
|
||||
raise Unauthorized("Please login to load chats")
|
||||
cids = await redis.execute("SMEMBERS", "chats_by_user/" + user.slug)
|
||||
if cids:
|
||||
cids = list(cids)[offset:offset + limit]
|
||||
if not cids:
|
||||
print('[inbox.load] no chats were found')
|
||||
cids = []
|
||||
chats = []
|
||||
for cid in cids:
|
||||
c = await redis.execute("GET", "chats/" + cid.decode("utf-8"))
|
||||
if c:
|
||||
c = dict(json.loads(c))
|
||||
c['messages'] = await load_messages(cid, 5, 0)
|
||||
c['unread'] = await get_unread_counter(cid, user.slug)
|
||||
with local_session() as session:
|
||||
c['members'] = []
|
||||
for userslug in c["users"]:
|
||||
a = session.query(User).where(User.slug == userslug).first().dict()
|
||||
c['members'].append({
|
||||
"slug": userslug,
|
||||
"userpic": a["userpic"],
|
||||
"name": a["name"],
|
||||
"lastSeen": a["lastSeen"],
|
||||
})
|
||||
chats.append(c)
|
||||
return {
|
||||
"chats": chats,
|
||||
"error": None
|
||||
}
|
||||
|
||||
|
||||
async def search_user_chats(by, messages: set, slug: str, limit, offset):
|
||||
cids = set([])
|
||||
by_author = by.get('author')
|
||||
body_like = by.get('body')
|
||||
cids.unioin(set(await redis.execute("SMEMBERS", "chats_by_user/" + slug)))
|
||||
if by_author:
|
||||
# all author's messages
|
||||
cids.union(set(await redis.execute("SMEMBERS", f"chats_by_user/{by_author}")))
|
||||
# author's messages in filtered chat
|
||||
messages.union(set(filter(lambda m: m["author"] == by_author, list(messages))))
|
||||
for c in cids:
|
||||
messages.union(set(await load_messages(c, limit, offset)))
|
||||
if body_like:
|
||||
# search in all messages in all user's chats
|
||||
for c in cids:
|
||||
# FIXME: user redis scan here
|
||||
mmm = set(await load_messages(c, limit, offset))
|
||||
for m in mmm:
|
||||
if body_like in m["body"]:
|
||||
messages.add(m)
|
||||
else:
|
||||
# search in chat's messages
|
||||
messages.union(set(filter(lambda m: body_like in m["body"], list(messages))))
|
||||
return messages
|
||||
|
||||
|
||||
@query.field("loadMessagesBy")
|
||||
@login_required
|
||||
async def load_messages_by(_, info, by, limit: int = 50, offset: int = 0):
|
||||
''' load :amolimitunt messages of :chat_id with :offset '''
|
||||
user = info.context["request"].user
|
||||
my_chats = await redis.execute("GET", f"chats_by_user/{user.slug}")
|
||||
chat_id = by.get('chat')
|
||||
if chat_id:
|
||||
chat = await redis.execute("GET", f"chats/{chat_id}")
|
||||
async def load_messages_by(_, info, by, limit: int = 10, offset: int = 0):
|
||||
''' load :limit messages of :chat_id with :offset '''
|
||||
messages = set([])
|
||||
by_chat = by.get('chat')
|
||||
if by_chat:
|
||||
chat = await redis.execute("GET", f"chats/{by_chat}")
|
||||
if not chat:
|
||||
return {
|
||||
"error": "chat not exist"
|
||||
}
|
||||
messages = await load_messages(chat_id, limit, offset)
|
||||
user_id = by.get('author')
|
||||
if user_id:
|
||||
chats = await redis.execute("GET", f"chats_by_user/{user_id}")
|
||||
our_chats = list(set(chats) & set(my_chats))
|
||||
for c in our_chats:
|
||||
messages += await load_messages(c, limit, offset)
|
||||
body_like = by.get('body')
|
||||
if body_like:
|
||||
for c in my_chats:
|
||||
mmm = await load_messages(c, limit, offset)
|
||||
for m in mmm:
|
||||
if body_like in m["body"]:
|
||||
messages.append(m)
|
||||
raise ObjectNotExist("Chat not exists")
|
||||
# everyone's messages in filtered chat
|
||||
messages.union(set(await load_messages(by_chat, limit, offset)))
|
||||
|
||||
user = info.context["request"].user
|
||||
if user and len(messages) == 0:
|
||||
messages.union(search_user_chats(by, messages, user.slug, limit, offset))
|
||||
|
||||
days = by.get("days")
|
||||
if days:
|
||||
messages = filter(
|
||||
messages.union(set(filter(
|
||||
lambda m: datetime.now(tz=timezone.utc) - int(m["createdAt"]) < timedelta(days=by.get("days")),
|
||||
messages
|
||||
)
|
||||
list(messages)
|
||||
)))
|
||||
return {
|
||||
"messages": messages,
|
||||
"messages": sorted(
|
||||
lambda m: m.createdAt,
|
||||
list(messages)
|
||||
),
|
||||
"error": None
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,6 @@ async def get_total_unread_counter(user_slug: str):
|
|||
if chats:
|
||||
chats = json.loads(chats)
|
||||
for chat_id in chats:
|
||||
n = await get_unread_counter(chat_id, user_slug)
|
||||
n = await get_unread_counter(chat_id.decode('utf-8'), user_slug)
|
||||
unread += n
|
||||
return unread
|
||||
|
|
|
@ -44,8 +44,11 @@ def apply_filters(q, filters, user=None):
|
|||
filters = {} if filters is None else filters
|
||||
if filters.get("reacted") and user:
|
||||
q.join(Reaction, Reaction.createdBy == user.slug)
|
||||
if filters.get("visibility"):
|
||||
v = filters.get("visibility")
|
||||
if v == "public":
|
||||
q = q.filter(Shout.visibility == filters.get("visibility"))
|
||||
if v == "community":
|
||||
q = q.filter(Shout.visibility.in_(["public", "community"]))
|
||||
if filters.get("layout"):
|
||||
q = q.filter(Shout.layout == filters.get("layout"))
|
||||
if filters.get("author"):
|
||||
|
@ -74,7 +77,6 @@ def add_stat_columns(q):
|
|||
async def load_shout(_, info, slug):
|
||||
with local_session() as session:
|
||||
q = select(Shout).options(
|
||||
# TODO add cation
|
||||
joinedload(Shout.authors),
|
||||
joinedload(Shout.topics),
|
||||
)
|
||||
|
|
|
@ -13,21 +13,18 @@ from orm.user import AuthorFollower, Role, User, UserRating, UserRole
|
|||
|
||||
# from .community import followed_communities
|
||||
from resolvers.inbox.unread import get_total_unread_counter
|
||||
from .topics import get_topic_stat
|
||||
|
||||
|
||||
async def user_subscriptions(slug: str):
|
||||
return {
|
||||
"unread": await get_total_unread_counter(slug), # unread inbox messages counter
|
||||
"topics": [t.slug for t in await followed_topics(slug)], # followed topics slugs
|
||||
"authors": [a.slug for a in await followed_authors(slug)], # followed authors slugs
|
||||
"reactions": await ReactedStorage.get_shouts_by_author(slug),
|
||||
"reactions": await followed_reactions(slug)
|
||||
# "communities": [c.slug for c in followed_communities(slug)], # communities
|
||||
}
|
||||
|
||||
|
||||
async def get_author_stat(slug):
|
||||
# TODO: implement author stat
|
||||
with local_session() as session:
|
||||
return {
|
||||
"shouts": session.query(ShoutAuthor).where(ShoutAuthor.user == slug).count(),
|
||||
|
@ -39,11 +36,29 @@ async def get_author_stat(slug):
|
|||
).where(
|
||||
Reaction.createdBy == slug
|
||||
).filter(
|
||||
func.length(Reaction.body) > 0
|
||||
Reaction.body.is_not(None)
|
||||
).count()
|
||||
}
|
||||
|
||||
|
||||
# @query.field("userFollowedDiscussions")
|
||||
@login_required
|
||||
async def followed_discussions(_, info, slug) -> List[Topic]:
|
||||
return await followed_reactions(slug)
|
||||
|
||||
|
||||
async def followed_reactions(slug):
|
||||
with local_session() as session:
|
||||
user = session.query(User).where(User.slug == slug).first()
|
||||
return session.query(
|
||||
Reaction.shout
|
||||
).where(
|
||||
Reaction.createdBy == slug
|
||||
).filter(
|
||||
Reaction.createdAt > user.lastSeen
|
||||
).all()
|
||||
|
||||
|
||||
@query.field("userFollowedTopics")
|
||||
@login_required
|
||||
async def get_followed_topics(_, info, slug) -> List[Topic]:
|
||||
|
|
|
@ -146,7 +146,11 @@ async def create_reaction(_, info, inp):
|
|||
except Exception as e:
|
||||
print(f"[resolvers.reactions] error on reactions autofollowing: {e}")
|
||||
|
||||
reaction.stat = await get_reaction_stat(reaction.id)
|
||||
reaction.stat = {
|
||||
"commented": 0,
|
||||
"reacted": 0,
|
||||
"rating": 0
|
||||
}
|
||||
return {"reaction": reaction}
|
||||
|
||||
|
||||
|
@ -158,11 +162,16 @@ async def update_reaction(_, info, inp):
|
|||
|
||||
with local_session() as session:
|
||||
user = session.query(User).where(User.id == user_id).first()
|
||||
reaction = session.query(Reaction).filter(Reaction.id == inp.id).first()
|
||||
q = select(Reaction).filter(Reaction.id == inp.id)
|
||||
q = calc_reactions(q)
|
||||
|
||||
[reaction, rating, commented, reacted] = session.execute(q).unique().one()
|
||||
|
||||
if not reaction:
|
||||
return {"error": "invalid reaction id"}
|
||||
if reaction.createdBy != user.slug:
|
||||
return {"error": "access denied"}
|
||||
|
||||
reaction.body = inp["body"]
|
||||
reaction.updatedAt = datetime.now(tz=timezone.utc)
|
||||
if reaction.kind != inp["kind"]:
|
||||
|
@ -171,8 +180,11 @@ async def update_reaction(_, info, inp):
|
|||
if inp.get("range"):
|
||||
reaction.range = inp.get("range")
|
||||
session.commit()
|
||||
|
||||
reaction.stat = await get_reaction_stat(reaction.id)
|
||||
reaction.stat = {
|
||||
"commented": commented,
|
||||
"reacted": reacted,
|
||||
"rating": rating
|
||||
}
|
||||
|
||||
return {"reaction": reaction}
|
||||
|
||||
|
@ -195,9 +207,11 @@ async def delete_reaction(_, info, rid):
|
|||
|
||||
|
||||
def map_result_item(result_item):
|
||||
reaction = result_item[0]
|
||||
user = result_item[1]
|
||||
[user, shout, reaction] = result_item
|
||||
print(reaction)
|
||||
reaction.createdBy = user
|
||||
reaction.shout = shout
|
||||
reaction.replyTo = reaction
|
||||
return reaction
|
||||
|
||||
|
||||
|
@ -220,10 +234,17 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
|
|||
"""
|
||||
|
||||
CreatedByUser = aliased(User)
|
||||
|
||||
ReactedShout = aliased(Shout)
|
||||
RepliedReaction = aliased(Reaction)
|
||||
q = select(
|
||||
Reaction, CreatedByUser
|
||||
).join(CreatedByUser, Reaction.createdBy == CreatedByUser.slug)
|
||||
Reaction, CreatedByUser, ReactedShout, RepliedReaction
|
||||
).join(
|
||||
CreatedByUser, Reaction.createdBy == CreatedByUser.slug
|
||||
).join(
|
||||
ReactedShout, Reaction.shout == ReactedShout.slug
|
||||
).join(
|
||||
RepliedReaction, Reaction.replyTo == RepliedReaction.id
|
||||
)
|
||||
|
||||
if by.get("shout"):
|
||||
q = q.filter(Reaction.shout == by["shout"])
|
||||
|
@ -243,20 +264,28 @@ async def load_reactions_by(_, _info, by, limit=50, offset=0):
|
|||
order_way = asc if by.get("sort", "").startswith("-") else desc
|
||||
order_field = by.get("sort") or Reaction.createdAt
|
||||
q = q.group_by(
|
||||
Reaction.id, CreatedByUser.id
|
||||
Reaction.id, CreatedByUser.id, ReactedShout.id
|
||||
).order_by(
|
||||
order_way(order_field)
|
||||
)
|
||||
|
||||
q = calc_reactions(q)
|
||||
q = q.where(Reaction.deletedAt.is_(None))
|
||||
q = q.limit(limit).offset(offset)
|
||||
|
||||
reactions = []
|
||||
with local_session() as session:
|
||||
reactions = list(map(map_result_item, session.execute(q)))
|
||||
for reaction in reactions:
|
||||
reaction.stat = await get_reaction_stat(reaction.id)
|
||||
for [
|
||||
[reaction, rating, commented, reacted], shout, reply
|
||||
] in list(map(map_result_item, session.execute(q))):
|
||||
reaction.shout = shout
|
||||
reaction.replyTo = reply
|
||||
reaction.stat = {
|
||||
"rating": rating,
|
||||
"commented": commented,
|
||||
"reacted": reacted
|
||||
}
|
||||
reactions.append(reaction)
|
||||
|
||||
if by.get("stat"):
|
||||
reactions.sort(lambda r: r.stat.get(by["stat"]) or r.createdAt)
|
||||
if by.get("stat"):
|
||||
reactions.sort(lambda r: r.stat.get(by["stat"]) or r.createdAt)
|
||||
|
||||
return reactions
|
||||
|
|
|
@ -3,23 +3,14 @@ from sqlalchemy import and_, select
|
|||
from auth.authenticate import login_required
|
||||
from base.orm import local_session
|
||||
from base.resolvers import mutation, query
|
||||
from orm import Shout
|
||||
from orm.topic import Topic, TopicFollower
|
||||
# from services.stat.reacted import ReactedStorage
|
||||
|
||||
|
||||
# from services.stat.viewed import ViewedStorage
|
||||
|
||||
from orm import Shout
|
||||
|
||||
async def get_topic_stat(slug):
|
||||
return {
|
||||
"shouts": len(TopicStat.shouts_by_topic.get(slug, {}).keys()),
|
||||
"authors": len(TopicStat.authors_by_topic.get(slug, {}).keys()),
|
||||
"followers": len(TopicStat.followers_by_topic.get(slug, {}).keys()),
|
||||
# "viewed": await ViewedStorage.get_topic(slug),
|
||||
# "reacted": len(await ReactedStorage.get_topic(slug)),
|
||||
# "commented": len(await ReactedStorage.get_topic_comments(slug)),
|
||||
# "rating": await ReactedStorage.get_topic_rating(slug)
|
||||
"followers": len(TopicStat.followers_by_topic.get(slug, {}).keys())
|
||||
}
|
||||
|
||||
|
||||
|
@ -96,11 +87,12 @@ async def topic_follow(user, slug):
|
|||
async def topic_unfollow(user, slug):
|
||||
with local_session() as session:
|
||||
sub = (
|
||||
session.query(TopicFollower)
|
||||
.filter(
|
||||
and_(TopicFollower.follower == user.slug, TopicFollower.topic == slug)
|
||||
)
|
||||
.first()
|
||||
session.query(TopicFollower).filter(
|
||||
and_(
|
||||
TopicFollower.follower == user.slug,
|
||||
TopicFollower.topic == slug
|
||||
)
|
||||
).first()
|
||||
)
|
||||
if not sub:
|
||||
raise Exception("[resolvers.topics] follower not exist")
|
||||
|
|
|
@ -29,9 +29,9 @@ type ChatMember {
|
|||
name: String!
|
||||
userpic: String
|
||||
lastSeen: DateTime
|
||||
invitedAt: DateTime
|
||||
invitedBy: String # user slug
|
||||
# TODO: add more
|
||||
# invitedAt: DateTime
|
||||
# invitedBy: String # user slug
|
||||
# TODO: keep invite databit
|
||||
}
|
||||
|
||||
type AuthorStat {
|
||||
|
@ -151,7 +151,6 @@ type Mutation {
|
|||
createChat(title: String, members: [String]!): Result!
|
||||
updateChat(chat: ChatInput!): Result!
|
||||
deleteChat(chatId: String!): Result!
|
||||
inviteChat(chatId: String!, userslug: String!): Result!
|
||||
|
||||
createMessage(chat: String!, body: String!, replyTo: String): Result!
|
||||
updateMessage(chatId: String!, id: Int!, body: String!): Result!
|
||||
|
@ -161,7 +160,7 @@ type Mutation {
|
|||
# auth
|
||||
getSession: AuthResult!
|
||||
registerUser(email: String!, password: String, name: String): AuthResult!
|
||||
sendLink(email: String!, lang: String): Result!
|
||||
sendLink(email: String!, lang: String, template: String): Result!
|
||||
confirmEmail(token: String!): AuthResult!
|
||||
|
||||
# shout
|
||||
|
@ -440,7 +439,7 @@ type Shout {
|
|||
deletedBy: User
|
||||
publishedBy: User
|
||||
publishedAt: DateTime
|
||||
media: String
|
||||
media: String # json [ { title pic url body }, .. ]
|
||||
stat: Stat
|
||||
}
|
||||
|
||||
|
@ -515,13 +514,14 @@ type Message {
|
|||
type Chat {
|
||||
id: String!
|
||||
createdAt: Int!
|
||||
createdBy: User!
|
||||
createdBy: String!
|
||||
updatedAt: Int!
|
||||
title: String
|
||||
description: String
|
||||
users: [User]!
|
||||
admins: [User]
|
||||
messages: [Message]!
|
||||
users: [String]
|
||||
members: [ChatMember]
|
||||
admins: [String]
|
||||
messages: [Message]
|
||||
unread: Int
|
||||
private: Boolean
|
||||
}
|
||||
|
|
|
@ -20,11 +20,13 @@ class SearchService:
|
|||
cached = await redis.execute("GET", text)
|
||||
if not cached:
|
||||
async with SearchService.lock:
|
||||
by = {
|
||||
options = {
|
||||
"title": text,
|
||||
"body": text
|
||||
"body": text,
|
||||
"limit": limit,
|
||||
"offset": offset
|
||||
}
|
||||
payload = await load_shouts_by(None, None, by, limit, offset)
|
||||
payload = await load_shouts_by(None, None, options)
|
||||
await redis.execute("SET", text, json.dumps(payload))
|
||||
return payload
|
||||
else:
|
||||
|
|
|
@ -76,9 +76,9 @@ class ViewedStorage:
|
|||
self.client = create_client({
|
||||
"Authorization": "Bearer %s" % str(token)
|
||||
}, schema=schema_str)
|
||||
print("[stat.viewed] authorized permanentely by ackee.discours.io: %s" % token)
|
||||
print("[stat.viewed] * authorized permanentely by ackee.discours.io: %s" % token)
|
||||
else:
|
||||
print("[stat.viewed] please set ACKEE_TOKEN")
|
||||
print("[stat.viewed] * please set ACKEE_TOKEN")
|
||||
self.disabled = True
|
||||
|
||||
@staticmethod
|
||||
|
@ -86,27 +86,26 @@ class ViewedStorage:
|
|||
""" query all the pages from ackee sorted by views count """
|
||||
start = time.time()
|
||||
self = ViewedStorage
|
||||
async with self.lock:
|
||||
try:
|
||||
self.pages = await self.client.execute_async(load_pages)
|
||||
self.pages = self.pages["domains"][0]["statistics"]["pages"]
|
||||
print("[stat.viewed] ⎪ ackee pages updated")
|
||||
shouts = {}
|
||||
try:
|
||||
self.pages = await self.client.execute_async(load_pages)
|
||||
self.pages = self.pages["domains"][0]["statistics"]["pages"]
|
||||
print("[stat.viewed] ackee pages updated")
|
||||
shouts = {}
|
||||
try:
|
||||
for page in self.pages:
|
||||
p = page["value"].split("?")[0]
|
||||
slug = p.split('discours.io/')[-1]
|
||||
shouts[slug] = page["count"]
|
||||
for slug, v in shouts:
|
||||
await ViewedStorage.increment(slug, v)
|
||||
except Exception:
|
||||
pass
|
||||
print("[stat.viewed] %d pages collected " % len(shouts.keys()))
|
||||
except Exception as e:
|
||||
raise e
|
||||
for page in self.pages:
|
||||
p = page["value"].split("?")[0]
|
||||
slug = p.split('discours.io/')[-1]
|
||||
shouts[slug] = page["count"]
|
||||
for slug, v in shouts:
|
||||
await ViewedStorage.increment(slug, v)
|
||||
except Exception:
|
||||
pass
|
||||
print("[stat.viewed] ⎪ %d pages collected " % len(shouts.keys()))
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
end = time.time()
|
||||
print("[stat.viewed] update_pages took %fs " % (end - start))
|
||||
print("[stat.viewed] ⎪ update_pages took %fs " % (end - start))
|
||||
|
||||
@staticmethod
|
||||
async def get_facts():
|
||||
|
@ -180,21 +179,22 @@ class ViewedStorage:
|
|||
async with self.lock:
|
||||
while True:
|
||||
try:
|
||||
print("[stat.viewed] ⎧ updating views...")
|
||||
await self.update_pages()
|
||||
failed = 0
|
||||
except Exception:
|
||||
failed += 1
|
||||
print("[stat.viewed] update failed #%d, wait 10 seconds" % failed)
|
||||
print("[stat.viewed] ⎩ update failed #%d, wait 10 seconds" % failed)
|
||||
if failed > 3:
|
||||
print("[stat.viewed] not trying to update anymore")
|
||||
print("[stat.viewed] ⎩ not trying to update anymore")
|
||||
break
|
||||
if failed == 0:
|
||||
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
||||
t = format(when.astimezone().isoformat())
|
||||
print("[stat.viewed] next update: %s" % (
|
||||
print("[stat.viewed] ⎩ next update: %s" % (
|
||||
t.split("T")[0] + " " + t.split("T")[1].split(".")[0]
|
||||
))
|
||||
await asyncio.sleep(self.period)
|
||||
else:
|
||||
await asyncio.sleep(10)
|
||||
print("[stat.viewed] trying to update data again...")
|
||||
print("[stat.viewed] ⎧ trying to update data again...")
|
||||
|
|
49
services/zine/shoutauthor.py
Normal file
49
services/zine/shoutauthor.py
Normal file
|
@ -0,0 +1,49 @@
|
|||
import asyncio
|
||||
import time
|
||||
from base.orm import local_session
|
||||
from orm.shout import ShoutAuthor
|
||||
|
||||
|
||||
class ShoutAuthorStorage:
|
||||
authors_by_shout = {}
|
||||
lock = asyncio.Lock()
|
||||
# period = 30 * 60 # sec
|
||||
|
||||
@staticmethod
|
||||
async def load_captions(session):
|
||||
self = ShoutAuthorStorage
|
||||
sas = session.query(ShoutAuthor).all()
|
||||
for sa in sas:
|
||||
self.authors_by_shout[sa.shout] = self.authors_by_shout.get(sa.shout, {})
|
||||
self.authors_by_shout[sa.shout][sa.user] = sa.caption
|
||||
print("[zine.authors] ⎧ %d shouts indexed by authors" % len(self.authors_by_shout))
|
||||
|
||||
@staticmethod
|
||||
async def get_author_caption(shout, author):
|
||||
self = ShoutAuthorStorage
|
||||
async with self.lock:
|
||||
return self.authors_by_shout.get(shout, {}).get(author)
|
||||
|
||||
@staticmethod
|
||||
async def set_author_caption(shout, author, caption):
|
||||
self = ShoutAuthorStorage
|
||||
async with self.lock:
|
||||
self.authors_by_shout[shout] = self.authors_by_shout.get(shout, {})
|
||||
self.authors_by_shout[shout][author] = caption
|
||||
return {
|
||||
"error": None,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
async def worker():
|
||||
self = ShoutAuthorStorage
|
||||
async with self.lock:
|
||||
# while True:
|
||||
try:
|
||||
with local_session() as session:
|
||||
ts = time.time()
|
||||
await self.load_captions(session)
|
||||
print("[zine.authors] ⎩ load_captions took %fs " % (time.time() - ts))
|
||||
except Exception as err:
|
||||
print("[zine.authors] ⎩ error indexing by author: %s" % (err))
|
||||
# await asyncio.sleep(self.period)
|
97
services/zine/topics.py
Normal file
97
services/zine/topics.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
import asyncio
|
||||
from base.orm import local_session
|
||||
from orm.topic import Topic
|
||||
from orm.shout import Shout
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import select
|
||||
|
||||
|
||||
class TopicStorage:
|
||||
topics = {}
|
||||
lock = asyncio.Lock()
|
||||
random_topics = []
|
||||
|
||||
@staticmethod
|
||||
def init(session):
|
||||
self = TopicStorage
|
||||
topics = session.query(Topic)
|
||||
self.topics = dict([(topic.slug, topic) for topic in topics])
|
||||
for tpc in self.topics.values():
|
||||
# self.load_parents(tpc)
|
||||
pass
|
||||
|
||||
print("[zine.topics] %d precached" % len(self.topics.keys()))
|
||||
|
||||
# @staticmethod
|
||||
# def load_parents(topic):
|
||||
# self = TopicStorage
|
||||
# parents = []
|
||||
# for parent in self.topics.values():
|
||||
# if topic.slug in parent.children:
|
||||
# parents.append(parent.slug)
|
||||
# topic.parents = parents
|
||||
# return topic
|
||||
|
||||
@staticmethod
|
||||
def get_random_topics(amount):
|
||||
return TopicStorage.random_topics[0:amount]
|
||||
|
||||
@staticmethod
|
||||
def renew_topics_random():
|
||||
with local_session() as session:
|
||||
q = select(Topic).join(Shout).group_by(Topic.id).having(sa.func.count(Shout.id) > 2).order_by(
|
||||
sa.func.random()).limit(50)
|
||||
TopicStorage.random_topics = list(map(
|
||||
lambda result_item: result_item.Topic, session.execute(q)
|
||||
))
|
||||
|
||||
@staticmethod
|
||||
async def worker():
|
||||
self = TopicStorage
|
||||
async with self.lock:
|
||||
while True:
|
||||
try:
|
||||
self.renew_topics_random()
|
||||
except Exception as err:
|
||||
print("[zine.topics] error %s" % (err))
|
||||
await asyncio.sleep(300) # 5 mins
|
||||
|
||||
@staticmethod
|
||||
async def get_topics_all():
|
||||
self = TopicStorage
|
||||
async with self.lock:
|
||||
return list(self.topics.values())
|
||||
|
||||
@staticmethod
|
||||
async def get_topics_by_slugs(slugs):
|
||||
self = TopicStorage
|
||||
async with self.lock:
|
||||
if not slugs:
|
||||
return self.topics.values()
|
||||
topics = filter(lambda topic: topic.slug in slugs, self.topics.values())
|
||||
return list(topics)
|
||||
|
||||
@staticmethod
|
||||
async def get_topics_by_community(community):
|
||||
self = TopicStorage
|
||||
async with self.lock:
|
||||
topics = filter(
|
||||
lambda topic: topic.community == community, self.topics.values()
|
||||
)
|
||||
return list(topics)
|
||||
|
||||
@staticmethod
|
||||
async def get_topics_by_author(author):
|
||||
self = TopicStorage
|
||||
async with self.lock:
|
||||
topics = filter(
|
||||
lambda topic: topic.community == author, self.topics.values()
|
||||
)
|
||||
return list(topics)
|
||||
|
||||
@staticmethod
|
||||
async def update_topic(topic):
|
||||
self = TopicStorage
|
||||
async with self.lock:
|
||||
self.topics[topic.slug] = topic
|
||||
# self.load_parents(topic)
|
|
@ -22,7 +22,7 @@ for provider in OAUTH_PROVIDERS:
|
|||
"id": environ.get(provider + "_OAUTH_ID"),
|
||||
"key": environ.get(provider + "_OAUTH_KEY"),
|
||||
}
|
||||
|
||||
FRONTEND_URL = environ.get("FRONTEND_URL") or "http://localhost:3000"
|
||||
SHOUTS_REPO = "content"
|
||||
SESSION_TOKEN_HEADER = "Authorization"
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user