diff --git a/Dockerfile b/Dockerfile index 2c4874b1..ad15f9b4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,4 +13,7 @@ RUN pip install -r requirements.txt COPY . . -CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "8000"] \ No newline at end of file +ENV PORT=8000 +EXPOSE $PORT + +CMD ["python", "-m", "granian", "main:app", "--interface", "asgi", "--host", "0.0.0.0", "--port", "$PORT"] \ No newline at end of file diff --git a/cache/cache.py b/cache/cache.py index 75f7ec68..6057300a 100644 --- a/cache/cache.py +++ b/cache/cache.py @@ -1,6 +1,8 @@ import asyncio +import json from typing import List +import jsonschema import orjson from sqlalchemy import and_, join, select @@ -35,7 +37,7 @@ CACHE_KEYS = { # Cache topic data async def cache_topic(topic: dict): - payload = orjson.dumps(topic, cls=CustomJSONEncoder) + payload = json.dumps(topic, cls=CustomJSONEncoder) await asyncio.gather( redis_operation("SET", f"topic:id:{topic['id']}", payload), redis_operation("SET", f"topic:slug:{topic['slug']}", payload), @@ -44,7 +46,7 @@ async def cache_topic(topic: dict): # Cache author data async def cache_author(author: dict): - payload = orjson.dumps(author, cls=CustomJSONEncoder) + payload = json.dumps(author, cls=CustomJSONEncoder) await asyncio.gather( redis_operation("SET", f"author:user:{author['user'].strip()}", str(author["id"])), redis_operation("SET", f"author:id:{author['id']}", payload), @@ -61,7 +63,7 @@ async def cache_follows(follower_id: int, entity_type: str, entity_id: int, is_i follows.append(entity_id) else: follows = [eid for eid in follows if eid != entity_id] - await redis_operation("SET", key, orjson.dumps(follows, cls=CustomJSONEncoder)) + await redis_operation("SET", key, json.dumps(follows, cls=CustomJSONEncoder)) await update_follower_stat(follower_id, entity_type, len(follows)) @@ -112,7 +114,7 @@ async def get_cached_topic(topic_id: int): topic = session.execute(select(Topic).where(Topic.id == topic_id)).scalar_one_or_none() if topic: topic_dict = topic.dict() - await redis_operation("SET", topic_key, orjson.dumps(topic_dict, cls=CustomJSONEncoder)) + await redis_operation("SET", topic_key, json.dumps(topic_dict, cls=CustomJSONEncoder)) return topic_dict return None @@ -378,7 +380,7 @@ async def invalidate_shouts_cache(cache_keys: List[str]): async def cache_topic_shouts(topic_id: int, shouts: List[dict]): """Кэширует список публикаций для темы""" key = f"topic_shouts_{topic_id}" - payload = orjson.dumps(shouts, cls=CustomJSONEncoder) + payload = json.dumps(shouts, cls=CustomJSONEncoder) await redis_operation("SETEX", key, value=payload, ttl=CACHE_TTL) diff --git a/cache/memorycache.py b/cache/memorycache.py index 7cfc94eb..d2b666d9 100644 --- a/cache/memorycache.py +++ b/cache/memorycache.py @@ -6,6 +6,7 @@ import functools import hashlib import inspect +import json import logging import pickle from typing import Callable, Optional @@ -77,7 +78,7 @@ class RedisCache: # Сохранение результата в кеш try: # Пытаемся сериализовать как JSON - serialized = orjson.dumps(result, cls=CustomJSONEncoder) + serialized = json.dumps(result, cls=CustomJSONEncoder) except (TypeError, ValueError): # Если не удалось, используем pickle serialized = pickle.dumps(result).decode() @@ -98,7 +99,7 @@ class RedisCache: try: import asyncio - serialized = orjson.dumps(result, cls=CustomJSONEncoder) + serialized = json.dumps(result, cls=CustomJSONEncoder) asyncio.create_task(redis.set(key, serialized, ex=self.ttl)) except Exception as e: logger.error(f"Ошибка при кешировании результата: {e}") diff --git a/cache/precache.py b/cache/precache.py index b0faec5f..5df91f2d 100644 --- a/cache/precache.py +++ b/cache/precache.py @@ -1,4 +1,5 @@ import asyncio +import json import orjson from sqlalchemy import and_, join, select @@ -21,7 +22,7 @@ async def precache_authors_followers(author_id, session): result = session.execute(followers_query) authors_followers.update(row[0] for row in result if row[0]) - followers_payload = orjson.dumps(list(authors_followers), cls=CustomJSONEncoder) + followers_payload = json.dumps(list(authors_followers), cls=CustomJSONEncoder) await redis.execute("SET", f"author:followers:{author_id}", followers_payload) @@ -35,9 +36,9 @@ async def precache_authors_follows(author_id, session): follows_authors = {row[0] for row in session.execute(follows_authors_query) if row[0]} follows_shouts = {row[0] for row in session.execute(follows_shouts_query) if row[0]} - topics_payload = orjson.dumps(list(follows_topics), cls=CustomJSONEncoder) - authors_payload = orjson.dumps(list(follows_authors), cls=CustomJSONEncoder) - shouts_payload = orjson.dumps(list(follows_shouts), cls=CustomJSONEncoder) + topics_payload = json.dumps(list(follows_topics), cls=CustomJSONEncoder) + authors_payload = json.dumps(list(follows_authors), cls=CustomJSONEncoder) + shouts_payload = json.dumps(list(follows_shouts), cls=CustomJSONEncoder) await asyncio.gather( redis.execute("SET", f"author:follows-topics:{author_id}", topics_payload), @@ -62,7 +63,7 @@ async def precache_topics_authors(topic_id: int, session): ) topic_authors = {row[0] for row in session.execute(topic_authors_query) if row[0]} - authors_payload = orjson.dumps(list(topic_authors), cls=CustomJSONEncoder) + authors_payload = json.dumps(list(topic_authors), cls=CustomJSONEncoder) await redis.execute("SET", f"topic:authors:{topic_id}", authors_payload) @@ -71,7 +72,7 @@ async def precache_topics_followers(topic_id: int, session): followers_query = select(TopicFollower.follower).where(TopicFollower.topic == topic_id) topic_followers = {row[0] for row in session.execute(followers_query) if row[0]} - followers_payload = orjson.dumps(list(topic_followers), cls=CustomJSONEncoder) + followers_payload = json.dumps(list(topic_followers), cls=CustomJSONEncoder) await redis.execute("SET", f"topic:followers:{topic_id}", followers_payload) diff --git a/services/search.py b/services/search.py index e92c387b..e9257436 100644 --- a/services/search.py +++ b/services/search.py @@ -1,4 +1,5 @@ import asyncio +import json import logging import os @@ -210,7 +211,7 @@ class SearchService: "SETEX", redis_key, REDIS_TTL, - orjson.dumps(results, cls=CustomJSONEncoder), + json.dumps(results, cls=CustomJSONEncoder), ) return results return [] diff --git a/settings.py b/settings.py index 5567e60e..6453b9e3 100644 --- a/settings.py +++ b/settings.py @@ -1,18 +1,24 @@ import sys from os import environ -PORT = 8000 +MODE = "development" if "dev" in sys.argv else "production" +DEV_SERVER_PID_FILE_NAME = "dev-server.pid" + +PORT = environ.get("PORT") or 8000 + +# storages DB_URL = ( environ.get("DATABASE_URL", "").replace("postgres://", "postgresql://") or environ.get("DB_URL", "").replace("postgres://", "postgresql://") or "sqlite:///discoursio.db" ) REDIS_URL = environ.get("REDIS_URL") or "redis://127.0.0.1" -AUTH_URL = environ.get("AUTH_URL") or "" -GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN") -DEV_SERVER_PID_FILE_NAME = "dev-server.pid" -MODE = "development" if "dev" in sys.argv else "production" +# debug +GLITCHTIP_DSN = environ.get("GLITCHTIP_DSN") + +# authorizer.dev +AUTH_URL = environ.get("AUTH_URL") or "https://auth.discours.io/graphql" ADMIN_SECRET = environ.get("AUTH_SECRET") or "nothing" WEBHOOK_SECRET = environ.get("WEBHOOK_SECRET") or "nothing-else"