region-cache-fix
This commit is contained in:
parent
e90d5aefb2
commit
5dbb0ccb12
|
@ -15,7 +15,6 @@ from services.cache import cache_author, cache_follower
|
||||||
from services.db import local_session
|
from services.db import local_session
|
||||||
from services.encoders import CustomJSONEncoder
|
from services.encoders import CustomJSONEncoder
|
||||||
from services.logger import root_logger as logger
|
from services.logger import root_logger as logger
|
||||||
from services.memorycache import cache_region
|
|
||||||
from services.rediscache import redis
|
from services.rediscache import redis
|
||||||
from services.schema import mutation, query
|
from services.schema import mutation, query
|
||||||
|
|
||||||
|
@ -126,10 +125,6 @@ async def get_author_id(_, _info, user: str):
|
||||||
|
|
||||||
@query.field("load_authors_by")
|
@query.field("load_authors_by")
|
||||||
def load_authors_by(_, _info, by, limit, offset):
|
def load_authors_by(_, _info, by, limit, offset):
|
||||||
cache_key = f"{json.dumps(by)}_{limit}_{offset}"
|
|
||||||
|
|
||||||
@cache_region.cache_on_arguments(cache_key)
|
|
||||||
def _load_authors_by():
|
|
||||||
logger.debug(f"loading authors by {by}")
|
logger.debug(f"loading authors by {by}")
|
||||||
q = select(Author)
|
q = select(Author)
|
||||||
if by.get("slug"):
|
if by.get("slug"):
|
||||||
|
@ -155,15 +150,12 @@ def load_authors_by(_, _info, by, limit, offset):
|
||||||
if order in ["likes", "shouts", "followers"]:
|
if order in ["likes", "shouts", "followers"]:
|
||||||
q = q.order_by(desc(text(f"{order}_stat")))
|
q = q.order_by(desc(text(f"{order}_stat")))
|
||||||
|
|
||||||
# q = q.distinct()
|
|
||||||
q = q.limit(limit).offset(offset)
|
q = q.limit(limit).offset(offset)
|
||||||
|
|
||||||
authors = get_with_stat(q)
|
authors = get_with_stat(q)
|
||||||
|
|
||||||
return authors
|
return authors
|
||||||
|
|
||||||
return _load_authors_by()
|
|
||||||
|
|
||||||
|
|
||||||
@query.field("get_author_follows")
|
@query.field("get_author_follows")
|
||||||
async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||||
|
@ -185,10 +177,12 @@ async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||||
if author and isinstance(author, Author):
|
if author and isinstance(author, Author):
|
||||||
# logger.debug(author.dict())
|
# logger.debug(author.dict())
|
||||||
author_id = author.id if not author_id else author_id
|
author_id = author.id if not author_id else author_id
|
||||||
|
topics = []
|
||||||
|
authors = []
|
||||||
|
if author_id:
|
||||||
rkey = f"author:{author_id}:follows-authors"
|
rkey = f"author:{author_id}:follows-authors"
|
||||||
logger.debug(f"getting {author_id} follows authors")
|
logger.debug(f"getting {author_id} follows authors")
|
||||||
cached = await redis.execute("GET", rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
authors = []
|
|
||||||
if not cached:
|
if not cached:
|
||||||
authors = author_follows_authors(author_id)
|
authors = author_follows_authors(author_id)
|
||||||
prepared = [author.dict() for author in authors]
|
prepared = [author.dict() for author in authors]
|
||||||
|
@ -200,7 +194,6 @@ async def get_author_follows(_, _info, slug="", user=None, author_id=0):
|
||||||
|
|
||||||
rkey = f"author:{author_id}:follows-topics"
|
rkey = f"author:{author_id}:follows-topics"
|
||||||
cached = await redis.execute("GET", rkey)
|
cached = await redis.execute("GET", rkey)
|
||||||
topics = []
|
|
||||||
if cached and isinstance(cached, str):
|
if cached and isinstance(cached, str):
|
||||||
topics = json.loads(cached)
|
topics = json.loads(cached)
|
||||||
if not cached:
|
if not cached:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user