fmt
This commit is contained in:
@@ -9,67 +9,67 @@ from settings import ADMIN_SECRET, AUTH_URL
|
||||
|
||||
async def request_data(gql, headers=None):
|
||||
if headers is None:
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
response = await client.post(AUTH_URL, json=gql, headers=headers)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
errors = data.get('errors')
|
||||
errors = data.get("errors")
|
||||
if errors:
|
||||
logger.error(f'HTTP Errors: {errors}')
|
||||
logger.error(f"HTTP Errors: {errors}")
|
||||
else:
|
||||
return data
|
||||
except Exception as e:
|
||||
# Handling and logging exceptions during authentication check
|
||||
logger.error(f'request_data error: {e}')
|
||||
logger.error(f"request_data error: {e}")
|
||||
return None
|
||||
|
||||
|
||||
async def check_auth(req):
|
||||
token = req.headers.get('Authorization')
|
||||
user_id = ''
|
||||
token = req.headers.get("Authorization")
|
||||
user_id = ""
|
||||
user_roles = []
|
||||
if token:
|
||||
# Logging the authentication token
|
||||
logger.debug(f'{token}')
|
||||
query_name = 'validate_jwt_token'
|
||||
operation = 'ValidateToken'
|
||||
variables = {'params': {'token_type': 'access_token', 'token': token}}
|
||||
logger.debug(f"{token}")
|
||||
query_name = "validate_jwt_token"
|
||||
operation = "ValidateToken"
|
||||
variables = {"params": {"token_type": "access_token", "token": token}}
|
||||
|
||||
gql = {
|
||||
'query': f'query {operation}($params: ValidateJWTTokenInput!) {{'
|
||||
+ f'{query_name}(params: $params) {{ is_valid claims }} '
|
||||
+ '}',
|
||||
'variables': variables,
|
||||
'operationName': operation,
|
||||
"query": f"query {operation}($params: ValidateJWTTokenInput!) {{"
|
||||
+ f"{query_name}(params: $params) {{ is_valid claims }} "
|
||||
+ "}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
data = await request_data(gql)
|
||||
if data:
|
||||
logger.debug(data)
|
||||
user_data = data.get('data', {}).get(query_name, {}).get('claims', {})
|
||||
user_id = user_data.get('sub', '')
|
||||
user_roles = user_data.get('allowed_roles', [])
|
||||
user_data = data.get("data", {}).get(query_name, {}).get("claims", {})
|
||||
user_id = user_data.get("sub", "")
|
||||
user_roles = user_data.get("allowed_roles", [])
|
||||
return user_id, user_roles
|
||||
|
||||
|
||||
async def add_user_role(user_id):
|
||||
logger.info(f'add author role for user_id: {user_id}')
|
||||
query_name = '_update_user'
|
||||
operation = 'UpdateUserRoles'
|
||||
logger.info(f"add author role for user_id: {user_id}")
|
||||
query_name = "_update_user"
|
||||
operation = "UpdateUserRoles"
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'x-authorizer-admin-secret': ADMIN_SECRET,
|
||||
"Content-Type": "application/json",
|
||||
"x-authorizer-admin-secret": ADMIN_SECRET,
|
||||
}
|
||||
variables = {'params': {'roles': 'author, reader', 'id': user_id}}
|
||||
variables = {"params": {"roles": "author, reader", "id": user_id}}
|
||||
gql = {
|
||||
'query': f'mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}',
|
||||
'variables': variables,
|
||||
'operationName': operation,
|
||||
"query": f"mutation {operation}($params: UpdateUserInput!) {{ {query_name}(params: $params) {{ id roles }} }}",
|
||||
"variables": variables,
|
||||
"operationName": operation,
|
||||
}
|
||||
data = await request_data(gql, headers)
|
||||
if data:
|
||||
user_id = data.get('data', {}).get(query_name, {}).get('id')
|
||||
user_id = data.get("data", {}).get(query_name, {}).get("id")
|
||||
return user_id
|
||||
|
||||
|
||||
@@ -77,15 +77,15 @@ def login_required(f):
|
||||
@wraps(f)
|
||||
async def decorated_function(*args, **kwargs):
|
||||
info = args[1]
|
||||
req = info.context.get('request')
|
||||
req = info.context.get("request")
|
||||
authorized = await check_auth(req)
|
||||
if authorized:
|
||||
logger.info(authorized)
|
||||
user_id, user_roles = authorized
|
||||
if user_id and user_roles:
|
||||
logger.info(f' got {user_id} roles: {user_roles}')
|
||||
info.context['user_id'] = user_id.strip()
|
||||
info.context['roles'] = user_roles
|
||||
logger.info(f" got {user_id} roles: {user_roles}")
|
||||
info.context["user_id"] = user_id.strip()
|
||||
info.context["roles"] = user_roles
|
||||
return await f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
@@ -99,11 +99,11 @@ def auth_request(f):
|
||||
if authorized:
|
||||
user_id, user_roles = authorized
|
||||
if user_id and user_roles:
|
||||
logger.info(f' got {user_id} roles: {user_roles}')
|
||||
req['user_id'] = user_id.strip()
|
||||
req['roles'] = user_roles
|
||||
logger.info(f" got {user_id} roles: {user_roles}")
|
||||
req["user_id"] = user_id.strip()
|
||||
req["roles"] = user_roles
|
||||
return await f(*args, **kwargs)
|
||||
else:
|
||||
raise HTTPException(status_code=401, detail='Unauthorized')
|
||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||
|
||||
return decorated_function
|
||||
|
@@ -7,31 +7,33 @@ from services.encoders import CustomJSONEncoder
|
||||
from services.rediscache import redis
|
||||
|
||||
DEFAULT_FOLLOWS = {
|
||||
'topics': [],
|
||||
'authors': [],
|
||||
'communities': [{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}],
|
||||
"topics": [],
|
||||
"authors": [],
|
||||
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||
}
|
||||
|
||||
|
||||
async def cache_author(author: dict):
|
||||
payload = json.dumps(author, cls=CustomJSONEncoder)
|
||||
await redis.execute('SET', f'user:{author.get("user")}', payload)
|
||||
await redis.execute('SET', f'author:{author.get("id")}', payload)
|
||||
await redis.execute("SET", f'user:{author.get("user")}', payload)
|
||||
await redis.execute("SET", f'author:{author.get("id")}', payload)
|
||||
|
||||
# update stat all field for followers' caches in <authors> list
|
||||
followers_str = await redis.execute('GET', f'author:{author.get("id")}:followers')
|
||||
followers_str = await redis.execute("GET", f'author:{author.get("id")}:followers')
|
||||
followers = []
|
||||
if followers_str:
|
||||
followers = json.loads(followers_str)
|
||||
if isinstance(followers, list):
|
||||
for follower in followers:
|
||||
follower_follows_authors = []
|
||||
follower_follows_authors_str = await redis.execute('GET', f'author:{author.get("id")}:follows-authors')
|
||||
follower_follows_authors_str = await redis.execute(
|
||||
"GET", f'author:{author.get("id")}:follows-authors'
|
||||
)
|
||||
if follower_follows_authors_str:
|
||||
follower_follows_authors = json.loads(follower_follows_authors_str)
|
||||
c = 0
|
||||
for old_author in follower_follows_authors:
|
||||
if int(old_author.get('id')) == int(author.get('id', 0)):
|
||||
if int(old_author.get("id")) == int(author.get("id", 0)):
|
||||
follower_follows_authors[c] = author
|
||||
break # exit the loop since we found and updated the author
|
||||
c += 1
|
||||
@@ -40,19 +42,23 @@ async def cache_author(author: dict):
|
||||
follower_follows_authors.append(author)
|
||||
|
||||
# update stat field for all authors' caches in <followers> list
|
||||
follows_str = await redis.execute('GET', f'author:{author.get("id")}:follows-authors')
|
||||
follows_str = await redis.execute(
|
||||
"GET", f'author:{author.get("id")}:follows-authors'
|
||||
)
|
||||
follows_authors = []
|
||||
if follows_str:
|
||||
follows_authors = json.loads(follows_str)
|
||||
if isinstance(follows_authors, list):
|
||||
for followed_author in follows_authors:
|
||||
followed_author_followers = []
|
||||
followed_author_followers_str = await redis.execute('GET', f'author:{author.get("id")}:followers')
|
||||
followed_author_followers_str = await redis.execute(
|
||||
"GET", f'author:{author.get("id")}:followers'
|
||||
)
|
||||
if followed_author_followers_str:
|
||||
followed_author_followers = json.loads(followed_author_followers_str)
|
||||
c = 0
|
||||
for old_follower in followed_author_followers:
|
||||
if int(old_follower.get('id')) == int(author.get('id', 0)):
|
||||
if int(old_follower.get("id")) == int(author.get("id", 0)):
|
||||
followed_author_followers[c] = author
|
||||
break # exit the loop since we found and updated the author
|
||||
c += 1
|
||||
@@ -64,50 +70,52 @@ async def cache_author(author: dict):
|
||||
async def cache_follows(follower: Author, entity_type: str, entity, is_insert=True):
|
||||
# prepare
|
||||
follows = []
|
||||
redis_key = f'author:{follower.id}:follows-{entity_type}s'
|
||||
follows_str = await redis.execute('GET', redis_key)
|
||||
redis_key = f"author:{follower.id}:follows-{entity_type}s"
|
||||
follows_str = await redis.execute("GET", redis_key)
|
||||
if isinstance(follows_str, str):
|
||||
follows = json.loads(follows_str)
|
||||
if is_insert:
|
||||
follows.append(entity)
|
||||
else:
|
||||
entity_id = entity.get('id')
|
||||
entity_id = entity.get("id")
|
||||
if not entity_id:
|
||||
raise Exception('wrong entity')
|
||||
raise Exception("wrong entity")
|
||||
# Remove the entity from follows
|
||||
follows = [e for e in follows if e['id'] != entity_id]
|
||||
follows = [e for e in follows if e["id"] != entity_id]
|
||||
|
||||
# update follows cache
|
||||
updated_data = [t.dict() if isinstance(t, Topic) else t for t in follows]
|
||||
payload = json.dumps(updated_data, cls=CustomJSONEncoder)
|
||||
await redis.execute('SET', redis_key, payload)
|
||||
await redis.execute("SET", redis_key, payload)
|
||||
|
||||
# update follower's stats everywhere
|
||||
author_str = await redis.execute('GET', f'author:{follower.id}')
|
||||
author_str = await redis.execute("GET", f"author:{follower.id}")
|
||||
if author_str:
|
||||
author = json.loads(author_str)
|
||||
author['stat'][f'{entity_type}s'] = len(updated_data)
|
||||
author["stat"][f"{entity_type}s"] = len(updated_data)
|
||||
await cache_author(author)
|
||||
return follows
|
||||
|
||||
|
||||
async def cache_follower(follower: Author, author: Author, is_insert=True):
|
||||
redis_key = f'author:{author.id}:followers'
|
||||
followers_str = await redis.execute('GET', redis_key)
|
||||
redis_key = f"author:{author.id}:followers"
|
||||
followers_str = await redis.execute("GET", redis_key)
|
||||
followers = []
|
||||
if isinstance(followers_str, str):
|
||||
followers = json.loads(followers_str)
|
||||
if is_insert:
|
||||
# Remove the entity from followers
|
||||
followers = [e for e in followers if e['id'] != author.id]
|
||||
followers = [e for e in followers if e["id"] != author.id]
|
||||
else:
|
||||
followers.append(follower)
|
||||
updated_followers = [f.dict() if isinstance(f, Author) else f for f in followers]
|
||||
updated_followers = [
|
||||
f.dict() if isinstance(f, Author) else f for f in followers
|
||||
]
|
||||
payload = json.dumps(updated_followers, cls=CustomJSONEncoder)
|
||||
await redis.execute('SET', redis_key, payload)
|
||||
author_str = await redis.execute('GET', f'author:{follower.id}')
|
||||
await redis.execute("SET", redis_key, payload)
|
||||
author_str = await redis.execute("GET", f"author:{follower.id}")
|
||||
if author_str:
|
||||
author = json.loads(author_str)
|
||||
author['stat']['followers'] = len(updated_followers)
|
||||
author["stat"]["followers"] = len(updated_followers)
|
||||
await cache_author(author)
|
||||
return followers
|
||||
|
@@ -5,8 +5,7 @@ import traceback
|
||||
import warnings
|
||||
from typing import Any, Callable, Dict, TypeVar
|
||||
|
||||
from sqlalchemy import (JSON, Column, Engine, Integer, create_engine, event,
|
||||
exc, inspect)
|
||||
from sqlalchemy import JSON, Column, Engine, Integer, create_engine, event, exc, inspect
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import Session, configure_mappers
|
||||
from sqlalchemy.sql.schema import Table
|
||||
@@ -19,13 +18,13 @@ from settings import DB_URL
|
||||
engine = create_engine(DB_URL, echo=False, pool_size=10, max_overflow=20)
|
||||
inspector = inspect(engine)
|
||||
configure_mappers()
|
||||
T = TypeVar('T')
|
||||
T = TypeVar("T")
|
||||
REGISTRY: Dict[str, type] = {}
|
||||
FILTERED_FIELDS = ['_sa_instance_state', 'search_vector']
|
||||
FILTERED_FIELDS = ["_sa_instance_state", "search_vector"]
|
||||
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def local_session(src=''):
|
||||
def local_session(src=""):
|
||||
return Session(bind=engine, expire_on_commit=False)
|
||||
|
||||
|
||||
@@ -36,7 +35,7 @@ class Base(declarative_base()):
|
||||
__init__: Callable
|
||||
__allow_unmapped__ = True
|
||||
__abstract__ = True
|
||||
__table_args__ = {'extend_existing': True}
|
||||
__table_args__ = {"extend_existing": True}
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
@@ -57,11 +56,11 @@ class Base(declarative_base()):
|
||||
else:
|
||||
data[c] = value
|
||||
# Add synthetic field .stat
|
||||
if hasattr(self, 'stat'):
|
||||
data['stat'] = self.stat
|
||||
if hasattr(self, "stat"):
|
||||
data["stat"] = self.stat
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.error(f'Error occurred while converting object to dictionary: {e}')
|
||||
logger.error(f"Error occurred while converting object to dictionary: {e}")
|
||||
return {}
|
||||
|
||||
def update(self, values: Dict[str, Any]) -> None:
|
||||
@@ -79,22 +78,22 @@ def warning_with_traceback(
|
||||
message: Warning | str, category, filename: str, lineno: int, file=None, line=None
|
||||
):
|
||||
tb = traceback.format_stack()
|
||||
tb_str = ''.join(tb)
|
||||
return f'{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}'
|
||||
tb_str = "".join(tb)
|
||||
return f"{message} ({filename}, {lineno}): {category.__name__}\n{tb_str}"
|
||||
|
||||
|
||||
# Установка функции вывода трейсбека для предупреждений SQLAlchemy
|
||||
warnings.showwarning = warning_with_traceback
|
||||
warnings.simplefilter('always', exc.SAWarning)
|
||||
warnings.simplefilter("always", exc.SAWarning)
|
||||
|
||||
|
||||
@event.listens_for(Engine, 'before_cursor_execute')
|
||||
@event.listens_for(Engine, "before_cursor_execute")
|
||||
def before_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
conn.query_start_time = time.time()
|
||||
conn.last_statement = ''
|
||||
conn.last_statement = ""
|
||||
|
||||
|
||||
@event.listens_for(Engine, 'after_cursor_execute')
|
||||
@event.listens_for(Engine, "after_cursor_execute")
|
||||
def after_cursor_execute(conn, cursor, statement, parameters, context, executemany):
|
||||
compiled_statement = context.compiled.string
|
||||
compiled_parameters = context.compiled.params
|
||||
@@ -105,7 +104,6 @@ def after_cursor_execute(conn, cursor, statement, parameters, context, executema
|
||||
else:
|
||||
query = compiled_statement # or handle this case in a way that makes sense for your application
|
||||
|
||||
|
||||
if elapsed > 1 and conn.last_statement != query:
|
||||
conn.last_statement = query
|
||||
logger.debug(f"\n{query}\n{'*' * math.floor(elapsed)} {elapsed:.3f} s\n")
|
||||
|
@@ -29,19 +29,19 @@ def apply_diff(original, diff):
|
||||
The modified string.
|
||||
"""
|
||||
result = []
|
||||
pattern = re.compile(r'^(\+|-) ')
|
||||
pattern = re.compile(r"^(\+|-) ")
|
||||
|
||||
for line in diff:
|
||||
match = pattern.match(line)
|
||||
if match:
|
||||
op = match.group(1)
|
||||
content = line[2:]
|
||||
if op == '+':
|
||||
if op == "+":
|
||||
result.append(content)
|
||||
elif op == '-':
|
||||
elif op == "-":
|
||||
# Ignore deleted lines
|
||||
pass
|
||||
else:
|
||||
result.append(line)
|
||||
|
||||
return ' '.join(result)
|
||||
return " ".join(result)
|
||||
|
@@ -4,51 +4,51 @@ import colorlog
|
||||
|
||||
# Define the color scheme
|
||||
color_scheme = {
|
||||
'DEBUG': 'light_black',
|
||||
'INFO': 'green',
|
||||
'WARNING': 'yellow',
|
||||
'ERROR': 'red',
|
||||
'CRITICAL': 'red,bg_white',
|
||||
"DEBUG": "light_black",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red,bg_white",
|
||||
}
|
||||
|
||||
# Define secondary log colors
|
||||
secondary_colors = {
|
||||
'log_name': {'DEBUG': 'blue'},
|
||||
'asctime': {'DEBUG': 'cyan'},
|
||||
'process': {'DEBUG': 'purple'},
|
||||
'module': {'DEBUG': 'light_black,bg_blue'},
|
||||
'funcName': {'DEBUG': 'light_white,bg_blue'}, # Add this line
|
||||
"log_name": {"DEBUG": "blue"},
|
||||
"asctime": {"DEBUG": "cyan"},
|
||||
"process": {"DEBUG": "purple"},
|
||||
"module": {"DEBUG": "light_black,bg_blue"},
|
||||
"funcName": {"DEBUG": "light_white,bg_blue"}, # Add this line
|
||||
}
|
||||
|
||||
# Define the log format string
|
||||
fmt_string = '%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s'
|
||||
fmt_string = "%(log_color)s%(levelname)s: %(log_color)s[%(module)s.%(funcName)s]%(reset)s %(white)s%(message)s"
|
||||
|
||||
# Define formatting configuration
|
||||
fmt_config = {
|
||||
'log_colors': color_scheme,
|
||||
'secondary_log_colors': secondary_colors,
|
||||
'style': '%',
|
||||
'reset': True,
|
||||
"log_colors": color_scheme,
|
||||
"secondary_log_colors": secondary_colors,
|
||||
"style": "%",
|
||||
"reset": True,
|
||||
}
|
||||
|
||||
|
||||
class MultilineColoredFormatter(colorlog.ColoredFormatter):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.log_colors = kwargs.pop('log_colors', {})
|
||||
self.secondary_log_colors = kwargs.pop('secondary_log_colors', {})
|
||||
self.log_colors = kwargs.pop("log_colors", {})
|
||||
self.secondary_log_colors = kwargs.pop("secondary_log_colors", {})
|
||||
|
||||
def format(self, record):
|
||||
message = record.getMessage()
|
||||
if '\n' in message:
|
||||
lines = message.split('\n')
|
||||
if "\n" in message:
|
||||
lines = message.split("\n")
|
||||
first_line = lines[0]
|
||||
record.message = first_line
|
||||
formatted_first_line = super().format(record)
|
||||
formatted_lines = [formatted_first_line]
|
||||
for line in lines[1:]:
|
||||
formatted_lines.append(line)
|
||||
return '\n'.join(formatted_lines)
|
||||
return "\n".join(formatted_lines)
|
||||
else:
|
||||
return super().format(record)
|
||||
|
||||
@@ -61,7 +61,7 @@ stream = logging.StreamHandler()
|
||||
stream.setFormatter(formatter)
|
||||
|
||||
|
||||
def get_colorful_logger(name='main'):
|
||||
def get_colorful_logger(name="main"):
|
||||
# Create and configure the logger
|
||||
logger = logging.getLogger(name)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
@@ -75,7 +75,7 @@ root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
root_logger.addHandler(stream)
|
||||
|
||||
ignore_logs = ['_trace', 'httpx', '_client', '_trace.atrace', 'aiohttp', '_client']
|
||||
ignore_logs = ["_trace", "httpx", "_client", "_trace.atrace", "aiohttp", "_client"]
|
||||
for lgr in ignore_logs:
|
||||
loggr = logging.getLogger(lgr)
|
||||
loggr.setLevel(logging.INFO)
|
||||
|
@@ -5,7 +5,7 @@ from settings import REDIS_URL
|
||||
# Создание региона кэша с TTL
|
||||
cache_region = make_region()
|
||||
cache_region.configure(
|
||||
'dogpile.cache.redis',
|
||||
arguments={'url': f'{REDIS_URL}/1'},
|
||||
"dogpile.cache.redis",
|
||||
arguments={"url": f"{REDIS_URL}/1"},
|
||||
expiration_time=3600, # Cache expiration time in seconds
|
||||
)
|
||||
|
@@ -5,6 +5,7 @@ from services.db import local_session
|
||||
from services.rediscache import redis
|
||||
from services.logger import root_logger as logger
|
||||
|
||||
|
||||
def save_notification(action: str, entity: str, payload):
|
||||
with local_session() as session:
|
||||
n = Notification(action=action, entity=entity, payload=payload)
|
||||
@@ -12,44 +13,43 @@ def save_notification(action: str, entity: str, payload):
|
||||
session.commit()
|
||||
|
||||
|
||||
async def notify_reaction(reaction, action: str = 'create'):
|
||||
channel_name = 'reaction'
|
||||
data = {'payload': reaction, 'action': action}
|
||||
async def notify_reaction(reaction, action: str = "create"):
|
||||
channel_name = "reaction"
|
||||
data = {"payload": reaction, "action": action}
|
||||
try:
|
||||
save_notification(action, channel_name, data.get('payload'))
|
||||
save_notification(action, channel_name, data.get("payload"))
|
||||
await redis.publish(channel_name, json.dumps(data))
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to publish to channel {channel_name}: {e}')
|
||||
logger.error(f"Failed to publish to channel {channel_name}: {e}")
|
||||
|
||||
|
||||
async def notify_shout(shout, action: str = 'update'):
|
||||
channel_name = 'shout'
|
||||
data = {'payload': shout, 'action': action}
|
||||
async def notify_shout(shout, action: str = "update"):
|
||||
channel_name = "shout"
|
||||
data = {"payload": shout, "action": action}
|
||||
try:
|
||||
save_notification(action, channel_name, data.get('payload'))
|
||||
save_notification(action, channel_name, data.get("payload"))
|
||||
await redis.publish(channel_name, json.dumps(data))
|
||||
except Exception as e:
|
||||
logger.error(f'Failed to publish to channel {channel_name}: {e}')
|
||||
logger.error(f"Failed to publish to channel {channel_name}: {e}")
|
||||
|
||||
|
||||
async def notify_follower(follower: dict, author_id: int, action: str = 'follow'):
|
||||
channel_name = f'follower:{author_id}'
|
||||
async def notify_follower(follower: dict, author_id: int, action: str = "follow"):
|
||||
channel_name = f"follower:{author_id}"
|
||||
try:
|
||||
# Simplify dictionary before publishing
|
||||
simplified_follower = {k: follower[k] for k in ['id', 'name', 'slug', 'pic']}
|
||||
data = {'payload': simplified_follower, 'action': action}
|
||||
simplified_follower = {k: follower[k] for k in ["id", "name", "slug", "pic"]}
|
||||
data = {"payload": simplified_follower, "action": action}
|
||||
# save in channel
|
||||
save_notification(action, channel_name, data.get('payload'))
|
||||
save_notification(action, channel_name, data.get("payload"))
|
||||
|
||||
# Convert data to JSON string
|
||||
json_data = json.dumps(data)
|
||||
|
||||
# Ensure the data is not empty before publishing
|
||||
if json_data:
|
||||
if json_data:
|
||||
# Use the 'await' keyword when publishing
|
||||
await redis.publish(channel_name, json_data)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
# Log the error and re-raise it
|
||||
logger.error(f'Failed to publish to channel {channel_name}: {e}')
|
||||
logger.error(f"Failed to publish to channel {channel_name}: {e}")
|
||||
|
@@ -5,7 +5,7 @@ import redis.asyncio as aredis
|
||||
from settings import REDIS_URL
|
||||
|
||||
# Set redis logging level to suppress DEBUG messages
|
||||
logger = logging.getLogger('redis')
|
||||
logger = logging.getLogger("redis")
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
|
||||
@@ -25,11 +25,11 @@ class RedisCache:
|
||||
async def execute(self, command, *args, **kwargs):
|
||||
if self._client:
|
||||
try:
|
||||
logger.debug(f'{command} {args} {kwargs}')
|
||||
logger.debug(f"{command} {args} {kwargs}")
|
||||
for arg in args:
|
||||
if isinstance(arg, dict):
|
||||
if arg.get('_sa_instance_state'):
|
||||
del arg['_sa_instance_state']
|
||||
if arg.get("_sa_instance_state"):
|
||||
del arg["_sa_instance_state"]
|
||||
r = await self._client.execute_command(command, *args, **kwargs)
|
||||
logger.debug(type(r))
|
||||
logger.debug(r)
|
||||
@@ -60,4 +60,4 @@ class RedisCache:
|
||||
|
||||
redis = RedisCache()
|
||||
|
||||
__all__ = ['redis']
|
||||
__all__ = ["redis"]
|
||||
|
@@ -8,51 +8,51 @@ from services.encoders import CustomJSONEncoder
|
||||
from services.logger import root_logger as logger
|
||||
from services.rediscache import redis
|
||||
|
||||
ELASTIC_HOST = os.environ.get('ELASTIC_HOST', '').replace('https://', '')
|
||||
ELASTIC_USER = os.environ.get('ELASTIC_USER', '')
|
||||
ELASTIC_PASSWORD = os.environ.get('ELASTIC_PASSWORD', '')
|
||||
ELASTIC_PORT = os.environ.get('ELASTIC_PORT', 9200)
|
||||
ELASTIC_AUTH = f'{ELASTIC_USER}:{ELASTIC_PASSWORD}' if ELASTIC_USER else ''
|
||||
ELASTIC_HOST = os.environ.get("ELASTIC_HOST", "").replace("https://", "")
|
||||
ELASTIC_USER = os.environ.get("ELASTIC_USER", "")
|
||||
ELASTIC_PASSWORD = os.environ.get("ELASTIC_PASSWORD", "")
|
||||
ELASTIC_PORT = os.environ.get("ELASTIC_PORT", 9200)
|
||||
ELASTIC_AUTH = f"{ELASTIC_USER}:{ELASTIC_PASSWORD}" if ELASTIC_USER else ""
|
||||
ELASTIC_URL = os.environ.get(
|
||||
'ELASTIC_URL', f'https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}'
|
||||
"ELASTIC_URL", f"https://{ELASTIC_AUTH}@{ELASTIC_HOST}:{ELASTIC_PORT}"
|
||||
)
|
||||
REDIS_TTL = 86400 # 1 day in seconds
|
||||
|
||||
index_settings = {
|
||||
'settings': {
|
||||
'index': {'number_of_shards': 1, 'auto_expand_replicas': '0-all'},
|
||||
'analysis': {
|
||||
'analyzer': {
|
||||
'ru': {
|
||||
'tokenizer': 'standard',
|
||||
'filter': ['lowercase', 'ru_stop', 'ru_stemmer'],
|
||||
"settings": {
|
||||
"index": {"number_of_shards": 1, "auto_expand_replicas": "0-all"},
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"ru": {
|
||||
"tokenizer": "standard",
|
||||
"filter": ["lowercase", "ru_stop", "ru_stemmer"],
|
||||
}
|
||||
},
|
||||
'filter': {
|
||||
'ru_stemmer': {'type': 'stemmer', 'language': 'russian'},
|
||||
'ru_stop': {'type': 'stop', 'stopwords': '_russian_'},
|
||||
"filter": {
|
||||
"ru_stemmer": {"type": "stemmer", "language": "russian"},
|
||||
"ru_stop": {"type": "stop", "stopwords": "_russian_"},
|
||||
},
|
||||
},
|
||||
},
|
||||
'mappings': {
|
||||
'properties': {
|
||||
'body': {'type': 'text', 'analyzer': 'ru'},
|
||||
'title': {'type': 'text', 'analyzer': 'ru'},
|
||||
'subtitle': {'type': 'text', 'analyzer': 'ru'},
|
||||
'lead': {'type': 'text', 'analyzer': 'ru'},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"body": {"type": "text", "analyzer": "ru"},
|
||||
"title": {"type": "text", "analyzer": "ru"},
|
||||
"subtitle": {"type": "text", "analyzer": "ru"},
|
||||
"lead": {"type": "text", "analyzer": "ru"},
|
||||
# 'author': {'type': 'text'},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
expected_mapping = index_settings['mappings']
|
||||
expected_mapping = index_settings["mappings"]
|
||||
|
||||
# Create an event loop
|
||||
search_loop = asyncio.get_event_loop()
|
||||
|
||||
|
||||
class SearchService:
|
||||
def __init__(self, index_name='search_index'):
|
||||
def __init__(self, index_name="search_index"):
|
||||
self.index_name = index_name
|
||||
self.client = None
|
||||
self.lock = asyncio.Lock() # Create an asyncio lock
|
||||
@@ -61,7 +61,7 @@ class SearchService:
|
||||
if ELASTIC_HOST:
|
||||
try:
|
||||
self.client = OpenSearch(
|
||||
hosts=[{'host': ELASTIC_HOST, 'port': ELASTIC_PORT}],
|
||||
hosts=[{"host": ELASTIC_HOST, "port": ELASTIC_PORT}],
|
||||
http_compress=True,
|
||||
http_auth=(ELASTIC_USER, ELASTIC_PASSWORD),
|
||||
use_ssl=True,
|
||||
@@ -70,52 +70,52 @@ class SearchService:
|
||||
ssl_show_warn=False,
|
||||
# ca_certs = ca_certs_path
|
||||
)
|
||||
logger.info(' Клиент OpenSearch.org подключен')
|
||||
logger.info(" Клиент OpenSearch.org подключен")
|
||||
|
||||
# Create a task and run it in the event loop
|
||||
search_loop.create_task(self.check_index())
|
||||
except Exception as exc:
|
||||
logger.error(f' {exc}')
|
||||
logger.error(f" {exc}")
|
||||
self.client = None
|
||||
|
||||
def info(self):
|
||||
if isinstance(self.client, OpenSearch):
|
||||
logger.info(' Поиск подключен') # : {self.client.info()}')
|
||||
logger.info(" Поиск подключен") # : {self.client.info()}')
|
||||
else:
|
||||
logger.info(' * Задайте переменные среды для подключения к серверу поиска')
|
||||
logger.info(" * Задайте переменные среды для подключения к серверу поиска")
|
||||
|
||||
def delete_index(self):
|
||||
if self.client:
|
||||
logger.debug(f' Удаляем индекс {self.index_name}')
|
||||
logger.debug(f" Удаляем индекс {self.index_name}")
|
||||
self.client.indices.delete(index=self.index_name, ignore_unavailable=True)
|
||||
|
||||
def create_index(self):
|
||||
if self.client:
|
||||
logger.debug(f'Создается индекс: {self.index_name}')
|
||||
logger.debug(f"Создается индекс: {self.index_name}")
|
||||
self.delete_index()
|
||||
self.client.indices.create(index=self.index_name, body=index_settings)
|
||||
logger.debug(f'Индекс {self.index_name} создан')
|
||||
logger.debug(f"Индекс {self.index_name} создан")
|
||||
|
||||
async def check_index(self):
|
||||
if self.client:
|
||||
logger.debug(f' Проверяем индекс {self.index_name}...')
|
||||
logger.debug(f" Проверяем индекс {self.index_name}...")
|
||||
if not self.client.indices.exists(index=self.index_name):
|
||||
self.create_index()
|
||||
self.client.indices.put_mapping(
|
||||
index=self.index_name, body=expected_mapping
|
||||
)
|
||||
else:
|
||||
logger.info(f'найден существующий индекс {self.index_name}')
|
||||
logger.info(f"найден существующий индекс {self.index_name}")
|
||||
# Check if the mapping is correct, and recreate the index if needed
|
||||
result = self.client.indices.get_mapping(index=self.index_name)
|
||||
if isinstance(result, str):
|
||||
result = json.loads(result)
|
||||
if isinstance(result, dict):
|
||||
mapping = result.get('mapping')
|
||||
mapping = result.get("mapping")
|
||||
if mapping and mapping != expected_mapping:
|
||||
logger.debug(f' найдена структура индексации: {mapping}')
|
||||
logger.debug(f" найдена структура индексации: {mapping}")
|
||||
logger.warn(
|
||||
' требуется другая структура индексации, переиндексация'
|
||||
" требуется другая структура индексации, переиндексация"
|
||||
)
|
||||
await self.recreate_index()
|
||||
|
||||
@@ -130,28 +130,30 @@ class SearchService:
|
||||
def index(self, shout):
|
||||
if self.client:
|
||||
id_ = str(shout.id)
|
||||
logger.debug(f' Индексируем пост {id_}')
|
||||
logger.debug(f" Индексируем пост {id_}")
|
||||
asyncio.create_task(self.perform_index(shout))
|
||||
|
||||
async def perform_index(self, shout):
|
||||
if self.client:
|
||||
self.client.index(index=self.index_name, id=str(shout.id), body=shout.dict())
|
||||
self.client.index(
|
||||
index=self.index_name, id=str(shout.id), body=shout.dict()
|
||||
)
|
||||
|
||||
async def search(self, text, limit, offset):
|
||||
logger.debug(f' Ищем: {text}')
|
||||
search_body = {'query': {'match': {'_all': text}}}
|
||||
logger.debug(f" Ищем: {text}")
|
||||
search_body = {"query": {"match": {"_all": text}}}
|
||||
if self.client:
|
||||
search_response = self.client.search(
|
||||
index=self.index_name, body=search_body, size=limit, from_=offset
|
||||
)
|
||||
hits = search_response['hits']['hits']
|
||||
hits = search_response["hits"]["hits"]
|
||||
|
||||
results = [{**hit['_source'], 'score': hit['_score']} for hit in hits]
|
||||
results = [{**hit["_source"], "score": hit["_score"]} for hit in hits]
|
||||
|
||||
# Use Redis as cache with TTL
|
||||
redis_key = f'search:{text}'
|
||||
redis_key = f"search:{text}"
|
||||
await redis.execute(
|
||||
'SETEX',
|
||||
"SETEX",
|
||||
redis_key,
|
||||
REDIS_TTL,
|
||||
json.dumps(results, cls=CustomJSONEncoder),
|
||||
|
@@ -26,5 +26,5 @@ def start_sentry():
|
||||
],
|
||||
)
|
||||
except Exception as e:
|
||||
print('[services.sentry] init error')
|
||||
print("[services.sentry] init error")
|
||||
print(e)
|
||||
|
@@ -14,13 +14,15 @@ from services.logger import root_logger as logger
|
||||
from services.cache import cache_author, cache_follows, cache_follower
|
||||
|
||||
DEFAULT_FOLLOWS = {
|
||||
'topics': [],
|
||||
'authors': [],
|
||||
'communities': [{'id': 1, 'name': 'Дискурс', 'slug': 'discours', 'pic': ''}],
|
||||
"topics": [],
|
||||
"authors": [],
|
||||
"communities": [{"id": 1, "name": "Дискурс", "slug": "discours", "pic": ""}],
|
||||
}
|
||||
|
||||
|
||||
async def handle_author_follower_change(author_id: int, follower_id: int, is_insert: bool):
|
||||
async def handle_author_follower_change(
|
||||
author_id: int, follower_id: int, is_insert: bool
|
||||
):
|
||||
logger.info(author_id)
|
||||
author_query = select(Author).select_from(Author).filter(Author.id == author_id)
|
||||
[author] = get_with_stat(author_query)
|
||||
@@ -29,11 +31,13 @@ async def handle_author_follower_change(author_id: int, follower_id: int, is_ins
|
||||
if follower and author:
|
||||
await cache_author(author.dict())
|
||||
await cache_author(follower.dict())
|
||||
await cache_follows(follower, 'author', author.dict(), is_insert)
|
||||
await cache_follows(follower, "author", author.dict(), is_insert)
|
||||
await cache_follower(follower, author, is_insert)
|
||||
|
||||
|
||||
async def handle_topic_follower_change(topic_id: int, follower_id: int, is_insert: bool):
|
||||
async def handle_topic_follower_change(
|
||||
topic_id: int, follower_id: int, is_insert: bool
|
||||
):
|
||||
logger.info(topic_id)
|
||||
topic_query = select(Topic).filter(Topic.id == topic_id)
|
||||
[topic] = get_with_stat(topic_query)
|
||||
@@ -41,15 +45,17 @@ async def handle_topic_follower_change(topic_id: int, follower_id: int, is_inser
|
||||
[follower] = get_with_stat(follower_query)
|
||||
if follower and topic:
|
||||
await cache_author(follower.dict())
|
||||
await redis.execute('SET', f'topic:{topic.id}', json.dumps(topic.dict(), cls=CustomJSONEncoder))
|
||||
await cache_follows(follower, 'topic', topic.dict(), is_insert)
|
||||
await redis.execute(
|
||||
"SET", f"topic:{topic.id}", json.dumps(topic.dict(), cls=CustomJSONEncoder)
|
||||
)
|
||||
await cache_follows(follower, "topic", topic.dict(), is_insert)
|
||||
|
||||
|
||||
# handle_author_follow and handle_topic_follow -> cache_author, cache_follows, cache_followers
|
||||
|
||||
|
||||
def after_shout_update(_mapper, _connection, shout: Shout):
|
||||
logger.info('after shout update')
|
||||
logger.info("after shout update")
|
||||
# Main query to get authors associated with the shout through ShoutAuthor
|
||||
authors_query = (
|
||||
select(Author)
|
||||
@@ -63,7 +69,7 @@ def after_shout_update(_mapper, _connection, shout: Shout):
|
||||
|
||||
|
||||
def after_reaction_update(mapper, connection, reaction: Reaction):
|
||||
logger.info('after reaction update')
|
||||
logger.info("after reaction update")
|
||||
try:
|
||||
author_subquery = select(Author).where(Author.id == reaction.created_by)
|
||||
replied_author_subquery = (
|
||||
@@ -98,7 +104,7 @@ def after_reaction_update(mapper, connection, reaction: Reaction):
|
||||
|
||||
|
||||
def after_author_update(_mapper, _connection, author: Author):
|
||||
logger.info('after author update')
|
||||
logger.info("after author update")
|
||||
q = select(Author).where(Author.id == author.id)
|
||||
result = get_with_stat(q)
|
||||
if result:
|
||||
@@ -135,19 +141,19 @@ def after_author_follower_delete(_mapper, _connection, target: AuthorFollower):
|
||||
|
||||
|
||||
def events_register():
|
||||
event.listen(Shout, 'after_insert', after_shout_update)
|
||||
event.listen(Shout, 'after_update', after_shout_update)
|
||||
event.listen(Shout, "after_insert", after_shout_update)
|
||||
event.listen(Shout, "after_update", after_shout_update)
|
||||
|
||||
event.listen(Reaction, 'after_insert', after_reaction_update)
|
||||
event.listen(Reaction, 'after_update', after_reaction_update)
|
||||
event.listen(Reaction, "after_insert", after_reaction_update)
|
||||
event.listen(Reaction, "after_update", after_reaction_update)
|
||||
|
||||
event.listen(Author, 'after_insert', after_author_update)
|
||||
event.listen(Author, 'after_update', after_author_update)
|
||||
event.listen(Author, "after_insert", after_author_update)
|
||||
event.listen(Author, "after_update", after_author_update)
|
||||
|
||||
event.listen(AuthorFollower, 'after_insert', after_author_follower_insert)
|
||||
event.listen(AuthorFollower, 'after_delete', after_author_follower_delete)
|
||||
event.listen(AuthorFollower, "after_insert", after_author_follower_insert)
|
||||
event.listen(AuthorFollower, "after_delete", after_author_follower_delete)
|
||||
|
||||
event.listen(TopicFollower, 'after_insert', after_topic_follower_insert)
|
||||
event.listen(TopicFollower, 'after_delete', after_topic_follower_delete)
|
||||
event.listen(TopicFollower, "after_insert", after_topic_follower_insert)
|
||||
event.listen(TopicFollower, "after_delete", after_topic_follower_delete)
|
||||
|
||||
logger.info('cache events were registered!')
|
||||
logger.info("cache events were registered!")
|
||||
|
@@ -4,7 +4,7 @@ from services.rediscache import redis
|
||||
|
||||
|
||||
async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
||||
r = await redis.execute('LLEN', f'chats/{chat_id}/unread/{author_id}')
|
||||
r = await redis.execute("LLEN", f"chats/{chat_id}/unread/{author_id}")
|
||||
if isinstance(r, str):
|
||||
return int(r)
|
||||
elif isinstance(r, int):
|
||||
@@ -14,7 +14,7 @@ async def get_unread_counter(chat_id: str, author_id: int) -> int:
|
||||
|
||||
|
||||
async def get_total_unread_counter(author_id: int) -> int:
|
||||
chats_set = await redis.execute('SMEMBERS', f'chats_by_author/{author_id}')
|
||||
chats_set = await redis.execute("SMEMBERS", f"chats_by_author/{author_id}")
|
||||
s = 0
|
||||
if isinstance(chats_set, str):
|
||||
chats_set = json.loads(chats_set)
|
||||
|
@@ -7,8 +7,12 @@ from typing import Dict
|
||||
|
||||
# ga
|
||||
from google.analytics.data_v1beta import BetaAnalyticsDataClient
|
||||
from google.analytics.data_v1beta.types import (DateRange, Dimension, Metric,
|
||||
RunReportRequest)
|
||||
from google.analytics.data_v1beta.types import (
|
||||
DateRange,
|
||||
Dimension,
|
||||
Metric,
|
||||
RunReportRequest,
|
||||
)
|
||||
|
||||
from orm.author import Author
|
||||
from orm.shout import Shout, ShoutAuthor, ShoutTopic
|
||||
@@ -16,9 +20,9 @@ from orm.topic import Topic
|
||||
from services.db import local_session
|
||||
from services.logger import root_logger as logger
|
||||
|
||||
GOOGLE_KEYFILE_PATH = os.environ.get('GOOGLE_KEYFILE_PATH', '/dump/google-service.json')
|
||||
GOOGLE_PROPERTY_ID = os.environ.get('GOOGLE_PROPERTY_ID', '')
|
||||
VIEWS_FILEPATH = '/dump/views.json'
|
||||
GOOGLE_KEYFILE_PATH = os.environ.get("GOOGLE_KEYFILE_PATH", "/dump/google-service.json")
|
||||
GOOGLE_PROPERTY_ID = os.environ.get("GOOGLE_PROPERTY_ID", "")
|
||||
VIEWS_FILEPATH = "/dump/views.json"
|
||||
|
||||
|
||||
class ViewedStorage:
|
||||
@@ -41,17 +45,17 @@ class ViewedStorage:
|
||||
# Загрузка предварительно подсчитанных просмотров из файла JSON
|
||||
self.load_precounted_views()
|
||||
|
||||
os.environ.setdefault('GOOGLE_APPLICATION_CREDENTIALS', GOOGLE_KEYFILE_PATH)
|
||||
os.environ.setdefault("GOOGLE_APPLICATION_CREDENTIALS", GOOGLE_KEYFILE_PATH)
|
||||
if GOOGLE_KEYFILE_PATH and os.path.isfile(GOOGLE_KEYFILE_PATH):
|
||||
# Using a default constructor instructs the client to use the credentials
|
||||
# specified in GOOGLE_APPLICATION_CREDENTIALS environment variable.
|
||||
self.analytics_client = BetaAnalyticsDataClient()
|
||||
logger.info(' * Клиент Google Analytics успешно авторизован')
|
||||
logger.info(" * Клиент Google Analytics успешно авторизован")
|
||||
|
||||
# Запуск фоновой задачи
|
||||
_task = asyncio.create_task(self.worker())
|
||||
else:
|
||||
logger.info(' * Пожалуйста, добавьте ключевой файл Google Analytics')
|
||||
logger.info(" * Пожалуйста, добавьте ключевой файл Google Analytics")
|
||||
self.disabled = True
|
||||
|
||||
@staticmethod
|
||||
@@ -63,44 +67,44 @@ class ViewedStorage:
|
||||
self.file_modification_timestamp = os.path.getmtime(VIEWS_FILEPATH)
|
||||
self.start_date = datetime.fromtimestamp(
|
||||
self.file_modification_timestamp
|
||||
).strftime('%Y-%m-%d')
|
||||
now_date = datetime.now().strftime('%Y-%m-%d')
|
||||
).strftime("%Y-%m-%d")
|
||||
now_date = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
if now_date == self.start_date:
|
||||
logger.info(' * Данные актуализованы!')
|
||||
logger.info(" * Данные актуализованы!")
|
||||
else:
|
||||
logger.warn(
|
||||
f' * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}'
|
||||
f" * Файл просмотров {VIEWS_FILEPATH} устарел: {self.start_date}"
|
||||
)
|
||||
|
||||
with open(VIEWS_FILEPATH, 'r') as file:
|
||||
with open(VIEWS_FILEPATH, "r") as file:
|
||||
precounted_views = json.load(file)
|
||||
self.views_by_shout.update(precounted_views)
|
||||
logger.info(
|
||||
f' * {len(precounted_views)} публикаций с просмотрами успешно загружены.'
|
||||
f" * {len(precounted_views)} публикаций с просмотрами успешно загружены."
|
||||
)
|
||||
else:
|
||||
logger.info(' * Файл просмотров не найден.')
|
||||
logger.info(" * Файл просмотров не найден.")
|
||||
except Exception as e:
|
||||
logger.error(f'Ошибка загрузки предварительно подсчитанных просмотров: {e}')
|
||||
logger.error(f"Ошибка загрузки предварительно подсчитанных просмотров: {e}")
|
||||
|
||||
# noinspection PyTypeChecker
|
||||
@staticmethod
|
||||
async def update_pages():
|
||||
"""Запрос всех страниц от Google Analytics, отсортированных по количеству просмотров"""
|
||||
self = ViewedStorage
|
||||
logger.info(' ⎧ Обновление данных просмотров от Google Analytics ---')
|
||||
logger.info(" ⎧ Обновление данных просмотров от Google Analytics ---")
|
||||
if not self.disabled:
|
||||
try:
|
||||
start = time.time()
|
||||
async with self.lock:
|
||||
if self.analytics_client:
|
||||
request = RunReportRequest(
|
||||
property=f'properties/{GOOGLE_PROPERTY_ID}',
|
||||
dimensions=[Dimension(name='pagePath')],
|
||||
metrics=[Metric(name='screenPageViews')],
|
||||
property=f"properties/{GOOGLE_PROPERTY_ID}",
|
||||
dimensions=[Dimension(name="pagePath")],
|
||||
metrics=[Metric(name="screenPageViews")],
|
||||
date_ranges=[
|
||||
DateRange(start_date=self.start_date, end_date='today')
|
||||
DateRange(start_date=self.start_date, end_date="today")
|
||||
],
|
||||
)
|
||||
response = self.analytics_client.run_report(request)
|
||||
@@ -114,7 +118,7 @@ class ViewedStorage:
|
||||
# Извлечение путей страниц из ответа Google Analytics
|
||||
if isinstance(row.dimension_values, list):
|
||||
page_path = row.dimension_values[0].value
|
||||
slug = page_path.split('discours.io/')[-1]
|
||||
slug = page_path.split("discours.io/")[-1]
|
||||
views_count = int(row.metric_values[0].value)
|
||||
|
||||
# Обновление данных в хранилище
|
||||
@@ -127,10 +131,10 @@ class ViewedStorage:
|
||||
# Запись путей страниц для логирования
|
||||
slugs.add(slug)
|
||||
|
||||
logger.info(f' ⎪ Собрано страниц: {len(slugs)} ')
|
||||
logger.info(f" ⎪ Собрано страниц: {len(slugs)} ")
|
||||
|
||||
end = time.time()
|
||||
logger.info(' ⎪ Обновление страниц заняло %fs ' % (end - start))
|
||||
logger.info(" ⎪ Обновление страниц заняло %fs " % (end - start))
|
||||
except Exception as error:
|
||||
logger.error(error)
|
||||
|
||||
@@ -211,18 +215,18 @@ class ViewedStorage:
|
||||
except Exception as exc:
|
||||
failed += 1
|
||||
logger.debug(exc)
|
||||
logger.info(' - Обновление не удалось #%d, ожидание 10 секунд' % failed)
|
||||
logger.info(" - Обновление не удалось #%d, ожидание 10 секунд" % failed)
|
||||
if failed > 3:
|
||||
logger.info(' - Больше не пытаемся обновить')
|
||||
logger.info(" - Больше не пытаемся обновить")
|
||||
break
|
||||
if failed == 0:
|
||||
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
|
||||
t = format(when.astimezone().isoformat())
|
||||
logger.info(
|
||||
' ⎩ Следующее обновление: %s'
|
||||
% (t.split('T')[0] + ' ' + t.split('T')[1].split('.')[0])
|
||||
" ⎩ Следующее обновление: %s"
|
||||
% (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
|
||||
)
|
||||
await asyncio.sleep(self.period)
|
||||
else:
|
||||
await asyncio.sleep(10)
|
||||
logger.info(' - Попытка снова обновить данные')
|
||||
logger.info(" - Попытка снова обновить данные")
|
||||
|
@@ -15,50 +15,50 @@ class WebhookEndpoint(HTTPEndpoint):
|
||||
try:
|
||||
data = await request.json()
|
||||
if not data:
|
||||
raise HTTPException(status_code=400, detail='Request body is empty')
|
||||
auth = request.headers.get('Authorization')
|
||||
if not auth or auth != os.environ.get('WEBHOOK_SECRET'):
|
||||
raise HTTPException(status_code=400, detail="Request body is empty")
|
||||
auth = request.headers.get("Authorization")
|
||||
if not auth or auth != os.environ.get("WEBHOOK_SECRET"):
|
||||
raise HTTPException(
|
||||
status_code=401, detail='Invalid Authorization header'
|
||||
status_code=401, detail="Invalid Authorization header"
|
||||
)
|
||||
# logger.debug(data)
|
||||
user = data.get('user')
|
||||
user = data.get("user")
|
||||
if not isinstance(user, dict):
|
||||
raise HTTPException(
|
||||
status_code=400, detail='User data is not a dictionary'
|
||||
status_code=400, detail="User data is not a dictionary"
|
||||
)
|
||||
user_id: str = user.get('id', '')
|
||||
user_id: str = user.get("id", "")
|
||||
name: str = (
|
||||
f"{user.get('given_name', user.get('slug'))} {user.get('middle_name', '')}"
|
||||
+ f"{user.get('family_name', '')}".strip()
|
||||
) or 'Аноним'
|
||||
email: str = user.get('email', '')
|
||||
pic: str = user.get('picture', '')
|
||||
) or "Аноним"
|
||||
email: str = user.get("email", "")
|
||||
pic: str = user.get("picture", "")
|
||||
|
||||
with local_session() as session:
|
||||
author = session.query(Author).filter(Author.user == user_id).first()
|
||||
if not author:
|
||||
# If the author does not exist, create a new one
|
||||
slug: str = email.split('@')[0].replace('.', '-').lower()
|
||||
slug: str = re.sub('[^0-9a-z]+', '-', slug)
|
||||
slug: str = email.split("@")[0].replace(".", "-").lower()
|
||||
slug: str = re.sub("[^0-9a-z]+", "-", slug)
|
||||
while True:
|
||||
author = (
|
||||
session.query(Author).filter(Author.slug == slug).first()
|
||||
)
|
||||
if not author:
|
||||
break
|
||||
slug = f'{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}'
|
||||
slug = f"{slug}-{len(session.query(Author).filter(Author.email == email).all()) + 1}"
|
||||
author = Author(user=user_id, slug=slug, name=name, pic=pic)
|
||||
session.add(author)
|
||||
session.commit()
|
||||
|
||||
return JSONResponse({'status': 'success'})
|
||||
return JSONResponse({"status": "success"})
|
||||
except HTTPException as e:
|
||||
return JSONResponse(
|
||||
{'status': 'error', 'message': str(e.detail)}, status_code=e.status_code
|
||||
{"status": "error", "message": str(e.detail)}, status_code=e.status_code
|
||||
)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
traceback.print_exc()
|
||||
return JSONResponse({'status': 'error', 'message': str(e)}, status_code=500)
|
||||
return JSONResponse({"status": "error", "message": str(e)}, status_code=500)
|
||||
|
Reference in New Issue
Block a user