search-result-schema-fix-3
All checks were successful
deploy / deploy (push) Successful in 1m21s

This commit is contained in:
Untone 2023-12-19 15:28:55 +03:00
parent 5aa8258f16
commit d14f0c2f95

View File

@ -16,25 +16,25 @@ class SearchService:
async def init(session): async def init(session):
async with SearchService.lock: async with SearchService.lock:
logging.info("[services.search] Initializing SearchService") logging.info("[services.search] Initializing SearchService")
SearchService.cache = {}
@staticmethod @staticmethod
async def search(text, limit: int = 50, offset: int = 0) -> List[Shout]: async def search(text: str, limit: int = 50, offset: int = 0) -> List[Shout]:
cached = await redis.execute("GET", text) try:
if not cached: payload = await redis.execute("GET", text)
async with SearchService.lock:
# Use aiohttp to send a request to ElasticSearch if not payload:
async with aiohttp.ClientSession() as session: async with SearchService.lock:
search_url = f"https://search.discours.io/search?q={text}" # Use aiohttp to send a request to ElasticSearch
try: async with aiohttp.ClientSession() as session:
search_url = f"https://search.discours.io/search?q={text}"
async with session.get(search_url) as response: async with session.get(search_url) as response:
if response.status == 200: if response.status == 200:
payload = await response.json() payload = await response.json()
await redis.execute("SET", text, json.dumps(payload)) # use redis as cache await redis.execute("SET", text, json.dumps(payload)) # use redis as cache
return payload[offset : offset + limit]
else: else:
logging.error(f"[services.search] response: {response.status} {await response.text()}") logging.error(f"[services.search] response: {response.status} {await response.text()}")
except Exception as e: except Exception as e:
logging.error(f"[services.search] error: {e}") logging.error(f"[services.search] Error during search: {e}")
else: payload = []
return json.loads(cached)[offset : offset + limit]
return payload[offset : offset + limit]