custom-encoder-fix

This commit is contained in:
Untone 2024-03-06 21:57:04 +03:00
parent 4aa4303a59
commit 2b89ab7c78
4 changed files with 22 additions and 10 deletions

View File

@ -13,6 +13,7 @@ from resolvers.stat import get_with_stat, author_follows_authors, author_follows
from services.cache import set_author_cache, update_author_followers_cache from services.cache import set_author_cache, update_author_followers_cache
from services.auth import login_required from services.auth import login_required
from services.db import local_session from services.db import local_session
from services.encoders import CustomJSONEncoder
from services.rediscache import redis from services.rediscache import redis
from services.schema import mutation, query from services.schema import mutation, query
from services.logger import root_logger as logger from services.logger import root_logger as logger
@ -161,14 +162,14 @@ async def get_author_follows(_, _info, slug='', user=None, author_id=None):
) )
if not cached: if not cached:
prepared = [author.dict() for author in authors] prepared = [author.dict() for author in authors]
await redis.execute('SETEX', rkey, 24 * 60 * 60, json.dumps(prepared)) await redis.execute('SET', rkey, 24 * 60 * 60, json.dumps(prepared), cls=CustomJSONEncoder)
rkey = f'author:{author_id}:follows-topics' rkey = f'author:{author_id}:follows-topics'
cached = await redis.execute('GET', rkey) cached = await redis.execute('GET', rkey)
topics = json.loads(cached) if cached else author_follows_topics(author_id) topics = json.loads(cached) if cached else author_follows_topics(author_id)
if not cached: if not cached:
prepared = [topic.dict() for topic in topics] prepared = [topic.dict() for topic in topics]
await redis.execute('SETEX', rkey, 24 * 60 * 60, json.dumps(prepared)) await redis.execute('SET', rkey, 24 * 60 * 60, json.dumps(prepared), cls=CustomJSONEncoder)
return { return {
'topics': topics, 'topics': topics,
'authors': authors, 'authors': authors,
@ -197,7 +198,7 @@ async def get_author_follows_topics(_, _info, slug='', user=None, author_id=None
topics = json.loads(cached) if cached else author_follows_topics(author_id) topics = json.loads(cached) if cached else author_follows_topics(author_id)
if not cached: if not cached:
prepared = [topic.dict() for topic in topics] prepared = [topic.dict() for topic in topics]
await redis.execute('SETEX', rkey, 24 * 60 * 60, json.dumps(prepared)) await redis.execute('SET', rkey, 24 * 60 * 60, json.dumps(prepared), cls=CustomJSONEncoder)
return topics return topics
else: else:
raise ValueError('Author not found') raise ValueError('Author not found')
@ -222,7 +223,7 @@ async def get_author_follows_authors(_, _info, slug='', user=None, author_id=Non
) )
if not cached: if not cached:
prepared = [author.dict() for author in authors] prepared = [author.dict() for author in authors]
await redis.execute('SETEX', rkey, 24 * 60 * 60, json.dumps(prepared)) await redis.execute('SET', rkey, 24 * 60 * 60, json.dumps(prepared), cls=CustomJSONEncoder)
return authors return authors
else: else:
raise ValueError('Author not found') raise ValueError('Author not found')

View File

@ -8,6 +8,7 @@ from orm.reaction import Reaction
from orm.shout import ShoutAuthor, Shout from orm.shout import ShoutAuthor, Shout
from orm.topic import Topic, TopicFollower from orm.topic import Topic, TopicFollower
from resolvers.stat import get_with_stat from resolvers.stat import get_with_stat
from services.encoders import CustomJSONEncoder
from services.rediscache import redis from services.rediscache import redis
from services.logger import root_logger as logger from services.logger import root_logger as logger
@ -21,19 +22,19 @@ DEFAULT_FOLLOWS = {
async def set_author_cache(author: dict, ttl=25 * 60 * 60): async def set_author_cache(author: dict, ttl=25 * 60 * 60):
payload = json.dumps(author) payload = json.dumps(author)
await redis.execute('SETEX', f'user:{author.get("user")}:author', ttl, payload) await redis.execute('SET', f'user:{author.get("user")}:author', ttl, payload, cls=CustomJSONEncoder)
await redis.execute('SETEX', f'id:{author.get("id")}:author', ttl, payload) await redis.execute('SET', f'id:{author.get("id")}:author', ttl, payload, cls=CustomJSONEncoder)
async def update_author_followers_cache(author_id: int, followers, ttl=25 * 60 * 60): async def update_author_followers_cache(author_id: int, followers, ttl=25 * 60 * 60):
payload = json.dumps(followers) payload = json.dumps(followers)
await redis.execute('SETEX', f'author:{author_id}:followers', ttl, payload) await redis.execute('SET', f'author:{author_id}:followers', ttl, payload, cls=CustomJSONEncoder)
async def set_follows_topics_cache(follows, author_id: int, ttl=25 * 60 * 60): async def set_follows_topics_cache(follows, author_id: int, ttl=25 * 60 * 60):
try: try:
payload = json.dumps(follows) payload = json.dumps(follows)
await redis.execute('SETEX', f'author:{author_id}:follows-topics', ttl, payload) await redis.execute('SET', f'author:{author_id}:follows-topics', ttl, payload, cls=CustomJSONEncoder)
except Exception as exc: except Exception as exc:
logger.error(exc) logger.error(exc)
import traceback import traceback
@ -46,7 +47,7 @@ async def set_follows_authors_cache(follows, author_id: int, ttl=25 * 60 * 60):
try: try:
payload = json.dumps(follows) payload = json.dumps(follows)
await redis.execute( await redis.execute(
'SETEX', f'author:{author_id}:follows-authors', ttl, payload 'SET', f'author:{author_id}:follows-authors', ttl, payload, cls=CustomJSONEncoder
) )
except Exception: except Exception:
import traceback import traceback

9
services/encoders.py Normal file
View File

@ -0,0 +1,9 @@
import json
from decimal import Decimal
class CustomJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, Decimal):
return str(obj)
return super().default(obj)

View File

@ -4,6 +4,7 @@ import os
from opensearchpy import OpenSearch from opensearchpy import OpenSearch
from services.encoders import CustomJSONEncoder
from services.logger import root_logger as logger from services.logger import root_logger as logger
from services.rediscache import redis from services.rediscache import redis
@ -144,7 +145,7 @@ class SearchService:
# Use Redis as cache with TTL # Use Redis as cache with TTL
redis_key = f'search:{text}' redis_key = f'search:{text}'
await redis.execute('SETEX', redis_key, REDIS_TTL, json.dumps(results)) await redis.execute('SETEX', redis_key, REDIS_TTL, json.dumps(results), cls=CustomJSONEncoder)
return [] return []