fmt2
This commit is contained in:
@@ -1,57 +1,60 @@
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientSession
|
||||
from strawberry.extensions import Extension
|
||||
|
||||
from settings import AUTH_URL
|
||||
from services.db import local_session
|
||||
from orm.author import Author
|
||||
from services.db import local_session
|
||||
from settings import AUTH_URL
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("\t[services.auth]\t")
|
||||
logger = logging.getLogger('\t[services.auth]\t')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
async def check_auth(req) -> str | None:
|
||||
token = req.headers.get("Authorization")
|
||||
user_id = ""
|
||||
token = req.headers.get('Authorization')
|
||||
user_id = ''
|
||||
if token:
|
||||
query_name = "validate_jwt_token"
|
||||
operation = "ValidateToken"
|
||||
query_name = 'validate_jwt_token'
|
||||
operation = 'ValidateToken'
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
variables = {
|
||||
"params": {
|
||||
"token_type": "access_token",
|
||||
"token": token,
|
||||
'params': {
|
||||
'token_type': 'access_token',
|
||||
'token': token,
|
||||
}
|
||||
}
|
||||
|
||||
gql = {
|
||||
"query": f"query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{ {query_name}(params: $params) {{ is_valid claims }} }}',
|
||||
'variables': variables,
|
||||
'operationName': operation,
|
||||
}
|
||||
try:
|
||||
# Asynchronous HTTP request to the authentication server
|
||||
async with ClientSession() as session:
|
||||
async with session.post(AUTH_URL, json=gql, headers=headers) as response:
|
||||
print(f"[services.auth] HTTP Response {response.status} {await response.text()}")
|
||||
print(f'[services.auth] HTTP Response {response.status} {await response.text()}')
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
errors = data.get("errors")
|
||||
errors = data.get('errors')
|
||||
if errors:
|
||||
print(f"[services.auth] errors: {errors}")
|
||||
print(f'[services.auth] errors: {errors}')
|
||||
else:
|
||||
user_id = data.get("data", {}).get(query_name, {}).get("claims", {}).get("sub")
|
||||
user_id = data.get('data', {}).get(query_name, {}).get('claims', {}).get('sub')
|
||||
if user_id:
|
||||
print(f"[services.auth] got user_id: {user_id}")
|
||||
print(f'[services.auth] got user_id: {user_id}')
|
||||
return user_id
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
# Handling and logging exceptions during authentication check
|
||||
print(f"[services.auth] Error {e}")
|
||||
print(f'[services.auth] Error {e}')
|
||||
|
||||
return None
|
||||
|
||||
@@ -59,14 +62,14 @@ async def check_auth(req) -> str | None:
|
||||
class LoginRequiredMiddleware(Extension):
|
||||
async def on_request_start(self):
|
||||
context = self.execution_context.context
|
||||
req = context.get("request")
|
||||
req = context.get('request')
|
||||
user_id = await check_auth(req)
|
||||
if user_id:
|
||||
context["user_id"] = user_id.strip()
|
||||
context['user_id'] = user_id.strip()
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if author:
|
||||
context["author_id"] = author.id
|
||||
context["user_id"] = user_id or None
|
||||
context['author_id'] = author.id
|
||||
context['user_id'] = user_id or None
|
||||
|
||||
self.execution_context.context = context
|
||||
|
@@ -4,47 +4,49 @@ import aiohttp
|
||||
|
||||
from settings import API_BASE
|
||||
|
||||
headers = {"Content-Type": "application/json"}
|
||||
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
|
||||
|
||||
# TODO: rewrite to orm usage?
|
||||
|
||||
|
||||
async def _request_endpoint(query_name, body) -> Any:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(API_BASE, headers=headers, json=body) as response:
|
||||
print(f"[services.core] {query_name} HTTP Response {response.status} {await response.text()}")
|
||||
print(f'[services.core] {query_name} HTTP Response {response.status} {await response.text()}')
|
||||
if response.status == 200:
|
||||
r = await response.json()
|
||||
if r:
|
||||
return r.get("data", {}).get(query_name, {})
|
||||
return r.get('data', {}).get(query_name, {})
|
||||
return []
|
||||
|
||||
|
||||
async def get_followed_shouts(author_id: int):
|
||||
query_name = "load_shouts_followed"
|
||||
operation = "GetFollowedShouts"
|
||||
query_name = 'load_shouts_followed'
|
||||
operation = 'GetFollowedShouts'
|
||||
|
||||
query = f"""query {operation}($author_id: Int!, limit: Int, offset: Int) {{
|
||||
{query_name}(author_id: $author_id, limit: $limit, offset: $offset) {{ id slug title }}
|
||||
}}"""
|
||||
|
||||
gql = {
|
||||
"query": query,
|
||||
"operationName": operation,
|
||||
"variables": {"author_id": author_id, "limit": 1000, "offset": 0}, # FIXME: too big limit
|
||||
'query': query,
|
||||
'operationName': operation,
|
||||
'variables': {'author_id': author_id, 'limit': 1000, 'offset': 0}, # FIXME: too big limit
|
||||
}
|
||||
|
||||
return await _request_endpoint(query_name, gql)
|
||||
|
||||
|
||||
async def get_shout(shout_id):
|
||||
query_name = "get_shout"
|
||||
operation = "GetShout"
|
||||
query_name = 'get_shout'
|
||||
operation = 'GetShout'
|
||||
|
||||
query = f"""query {operation}($slug: String, $shout_id: Int) {{
|
||||
{query_name}(slug: $slug, shout_id: $shout_id) {{ id slug title authors {{ id slug name pic }} }}
|
||||
}}"""
|
||||
|
||||
gql = {"query": query, "operationName": operation, "variables": {"slug": None, "shout_id": shout_id}}
|
||||
gql = {'query': query, 'operationName': operation, 'variables': {'slug': None, 'shout_id': shout_id}}
|
||||
|
||||
return await _request_endpoint(query_name, gql)
|
||||
|
@@ -9,15 +9,16 @@ from sqlalchemy.sql.schema import Table
|
||||
|
||||
from settings import DB_URL
|
||||
|
||||
|
||||
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
|
||||
|
||||
T = TypeVar("T")
|
||||
T = TypeVar('T')
|
||||
|
||||
REGISTRY: Dict[str, type] = {}
|
||||
|
||||
|
||||
# @contextmanager
|
||||
def local_session(src=""):
|
||||
def local_session(src=''):
|
||||
return Session(bind=engine, expire_on_commit=False)
|
||||
|
||||
# try:
|
||||
@@ -45,7 +46,7 @@ class Base(declarative_base()):
|
||||
__init__: Callable
|
||||
__allow_unmapped__ = True
|
||||
__abstract__ = True
|
||||
__table_args__ = {"extend_existing": True}
|
||||
__table_args__ = {'extend_existing': True}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
@@ -54,12 +55,12 @@ class Base(declarative_base()):
|
||||
|
||||
def dict(self) -> Dict[str, Any]:
|
||||
column_names = self.__table__.columns.keys()
|
||||
if "_sa_instance_state" in column_names:
|
||||
column_names.remove("_sa_instance_state")
|
||||
if '_sa_instance_state' in column_names:
|
||||
column_names.remove('_sa_instance_state')
|
||||
try:
|
||||
return {c: getattr(self, c) for c in column_names}
|
||||
except Exception as e:
|
||||
print(f"[services.db] Error dict: {e}")
|
||||
print(f'[services.db] Error dict: {e}')
|
||||
return {}
|
||||
|
||||
def update(self, values: Dict[str, Any]) -> None:
|
||||
|
@@ -1,14 +1,16 @@
|
||||
import json
|
||||
|
||||
import redis.asyncio as aredis
|
||||
import asyncio
|
||||
from settings import REDIS_URL
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("\t[services.redis]\t")
|
||||
import redis.asyncio as aredis
|
||||
|
||||
from settings import REDIS_URL
|
||||
|
||||
|
||||
logger = logging.getLogger('\t[services.redis]\t')
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
class RedisCache:
|
||||
def __init__(self, uri=REDIS_URL):
|
||||
self._uri: str = uri
|
||||
@@ -25,11 +27,11 @@ class RedisCache:
|
||||
async def execute(self, command, *args, **kwargs):
|
||||
if self._client:
|
||||
try:
|
||||
logger.debug(command + " " + " ".join(args))
|
||||
logger.debug(command + ' ' + ' '.join(args))
|
||||
r = await self._client.execute_command(command, *args, **kwargs)
|
||||
return r
|
||||
except Exception as e:
|
||||
logger.error(f"{e}")
|
||||
logger.error(f'{e}')
|
||||
return None
|
||||
|
||||
async def subscribe(self, *channels):
|
||||
@@ -59,15 +61,15 @@ class RedisCache:
|
||||
|
||||
while True:
|
||||
message = await pubsub.get_message()
|
||||
if message and isinstance(message["data"], (str, bytes, bytearray)):
|
||||
logger.debug("pubsub got msg")
|
||||
if message and isinstance(message['data'], (str, bytes, bytearray)):
|
||||
logger.debug('pubsub got msg')
|
||||
try:
|
||||
yield json.loads(message["data"]), message.get("channel")
|
||||
yield json.loads(message['data']), message.get('channel')
|
||||
except Exception as e:
|
||||
logger.error(f"{e}")
|
||||
logger.error(f'{e}')
|
||||
await asyncio.sleep(1)
|
||||
|
||||
|
||||
redis = RedisCache()
|
||||
|
||||
__all__ = ["redis"]
|
||||
__all__ = ['redis']
|
||||
|
Reference in New Issue
Block a user