Files
inbox/services/core.py

129 lines
3.7 KiB
Python
Raw Normal View History

2023-12-23 09:11:04 +03:00
import asyncio
2024-01-25 12:25:52 +03:00
import logging
from datetime import datetime, timedelta, timezone
from typing import List
2023-12-19 18:31:31 +03:00
import requests
2023-12-23 09:11:04 +03:00
2023-11-22 15:09:24 +03:00
from models.member import ChatMember
2023-12-17 20:13:17 +03:00
from settings import API_BASE
2023-10-04 23:42:39 +03:00
2024-01-23 23:13:49 +03:00
2024-04-08 09:30:57 +03:00
logger = logging.getLogger("[services.core] ")
2024-01-23 23:13:49 +03:00
logger.setLevel(logging.DEBUG)
2023-10-04 23:42:39 +03:00
2023-12-19 19:16:42 +03:00
def _request_endpoint(query_name, body) -> dict:
2024-04-08 09:30:57 +03:00
logger.debug(f"requesting {query_name}...")
response = requests.post(
API_BASE, headers={"Content-Type": "application/json"}, json=body, timeout=30.0
)
2023-12-19 18:31:31 +03:00
if response.status_code == 200:
2023-12-19 18:58:26 +03:00
try:
r = response.json()
2024-04-08 09:30:57 +03:00
result = r.get("data", {}).get(query_name, {})
2023-12-25 01:53:14 +03:00
if result:
2024-04-08 09:30:57 +03:00
logger.info(f"entries amount in result: {len(result)} ")
2023-12-19 18:58:26 +03:00
return result
except ValueError as e:
2024-04-08 09:30:57 +03:00
logger.error(f"Error decoding JSON response: {e}")
2023-12-19 18:58:26 +03:00
2023-12-19 19:16:42 +03:00
return {}
2023-11-28 12:05:39 +03:00
2023-12-19 18:58:26 +03:00
def get_all_authors():
2024-04-08 09:30:57 +03:00
query_name = "get_authors_all"
2023-11-06 19:25:28 +03:00
2023-12-19 18:58:26 +03:00
gql = {
2024-04-08 09:30:57 +03:00
"query": "query { " + query_name + "{ id slug pic name user } }",
"variables": None,
2023-12-19 18:58:26 +03:00
}
2023-12-19 11:25:06 +03:00
2023-12-19 20:19:16 +03:00
return _request_endpoint(query_name, gql)
2023-11-22 15:09:24 +03:00
2023-12-19 20:19:16 +03:00
def get_author_by_user(user: str):
2024-04-08 09:30:57 +03:00
operation = "GetAuthorId"
query_name = "get_author_id"
2023-12-19 20:19:16 +03:00
gql = {
2024-04-08 09:30:57 +03:00
"query": f"query {operation}($user: String!) {{ {query_name}(user: $user){{ id }} }}", # noqa E201, E202
"operationName": operation,
"variables": {"user": user.strip()},
2023-12-19 20:19:16 +03:00
}
2023-12-19 11:25:06 +03:00
2023-12-19 20:37:43 +03:00
return _request_endpoint(query_name, gql)
2023-10-04 23:42:39 +03:00
2023-10-11 22:12:55 +03:00
2023-12-19 18:31:31 +03:00
def get_my_followed() -> List[ChatMember]:
2024-04-08 09:30:57 +03:00
query_name = "get_my_followed"
2023-10-11 22:12:55 +03:00
gql = {
2024-04-08 09:30:57 +03:00
"query": "query { " + query_name + " { authors { id slug pic name } } }",
"variables": None,
2023-10-11 22:12:55 +03:00
}
2023-11-22 15:09:24 +03:00
2023-12-19 20:19:16 +03:00
result = _request_endpoint(query_name, gql)
2024-04-08 09:30:57 +03:00
return result.get("authors", [])
2023-12-23 09:11:04 +03:00
class CacheStorage:
lock = asyncio.Lock()
period = 5 * 60 # every 5 mins
client = None
authors = []
authors_by_user = {}
authors_by_id = {}
@staticmethod
async def init():
"""graphql client connection using permanent token"""
self = CacheStorage
async with self.lock:
task = asyncio.create_task(self.worker())
2024-01-23 23:13:49 +03:00
logger.info(task)
2023-12-23 09:11:04 +03:00
@staticmethod
async def update_authors():
self = CacheStorage
async with self.lock:
result = get_all_authors()
2024-04-08 09:30:57 +03:00
logger.info(f"cache loaded {len(result)}")
2023-12-23 09:11:04 +03:00
if result:
CacheStorage.authors = result
for a in result:
2024-04-08 09:30:57 +03:00
user_id = a.get("user")
author_id = str(a.get("id"))
2024-01-24 12:22:05 +03:00
self.authors_by_user[user_id] = a
self.authors_by_id[author_id] = a
2023-12-23 09:11:04 +03:00
@staticmethod
async def worker():
"""async task worker"""
failed = 0
self = CacheStorage
while True:
try:
2024-04-08 09:30:57 +03:00
logger.info(" - updating profiles data...")
2023-12-23 09:11:04 +03:00
await self.update_authors()
failed = 0
2024-01-11 10:11:23 +03:00
except Exception as er:
2023-12-23 09:11:04 +03:00
failed += 1
2024-04-08 09:30:57 +03:00
logger.error(f"{er} - update failed #{failed}, wait 10 seconds")
2023-12-23 09:11:04 +03:00
if failed > 3:
2024-04-08 09:30:57 +03:00
logger.error(" - not trying to update anymore")
2024-01-24 10:47:34 +03:00
import traceback
2024-01-25 12:25:52 +03:00
2024-01-24 10:47:34 +03:00
traceback.print_exc()
2023-12-23 09:11:04 +03:00
break
if failed == 0:
when = datetime.now(timezone.utc) + timedelta(seconds=self.period)
t = format(when.astimezone().isoformat())
2024-04-08 09:30:57 +03:00
logger.info(
" ⎩ next update: %s"
% (t.split("T")[0] + " " + t.split("T")[1].split(".")[0])
)
2023-12-23 09:11:04 +03:00
await asyncio.sleep(self.period)
else:
await asyncio.sleep(10)
2024-04-08 09:30:57 +03:00
logger.info(" - trying to update data again")