fix(search.py): store all results in cash not only first offset
All checks were successful
Deploy on push / deploy (push) Successful in 52s
All checks were successful
Deploy on push / deploy (push) Successful in 52s
This commit is contained in:
parent
a0db5707c4
commit
bc4ec79240
|
@ -23,6 +23,8 @@ SEARCH_MIN_SCORE = float(os.environ.get("SEARCH_MIN_SCORE", "0.1"))
|
||||||
SEARCH_PREFETCH_SIZE = int(os.environ.get("SEARCH_PREFETCH_SIZE", "200"))
|
SEARCH_PREFETCH_SIZE = int(os.environ.get("SEARCH_PREFETCH_SIZE", "200"))
|
||||||
SEARCH_USE_REDIS = bool(os.environ.get("SEARCH_USE_REDIS", "true").lower() in ["true", "1", "yes"])
|
SEARCH_USE_REDIS = bool(os.environ.get("SEARCH_USE_REDIS", "true").lower() in ["true", "1", "yes"])
|
||||||
|
|
||||||
|
search_offset = 0
|
||||||
|
|
||||||
# Import Redis client if Redis caching is enabled
|
# Import Redis client if Redis caching is enabled
|
||||||
if SEARCH_USE_REDIS:
|
if SEARCH_USE_REDIS:
|
||||||
try:
|
try:
|
||||||
|
@ -573,10 +575,13 @@ class SearchService:
|
||||||
search_limit = limit
|
search_limit = limit
|
||||||
search_offset = offset
|
search_offset = offset
|
||||||
|
|
||||||
# If cache is enabled, prefetch more results to store in cache
|
# Always prefetch full results when caching is enabled
|
||||||
if SEARCH_CACHE_ENABLED and offset == 0:
|
if SEARCH_CACHE_ENABLED:
|
||||||
search_limit = SEARCH_PREFETCH_SIZE # Fetch more results to cache
|
search_limit = SEARCH_PREFETCH_SIZE # Always fetch a large set
|
||||||
search_offset = 0 # Always start from beginning for cache
|
search_offset = 0 # Always start from beginning
|
||||||
|
else:
|
||||||
|
search_limit = limit
|
||||||
|
search_offset = offset
|
||||||
|
|
||||||
logger.info(f"Sending search request: text='{text}', limit={search_limit}, offset={search_offset}")
|
logger.info(f"Sending search request: text='{text}', limit={search_limit}, offset={search_offset}")
|
||||||
response = await self.client.post(
|
response = await self.client.post(
|
||||||
|
@ -585,7 +590,7 @@ class SearchService:
|
||||||
)
|
)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
logger.info(f"Raw search response: {response.text}")
|
# logger.info(f"Raw search response: {response.text}")
|
||||||
result = response.json()
|
result = response.json()
|
||||||
logger.info(f"Parsed search response: {result}")
|
logger.info(f"Parsed search response: {result}")
|
||||||
|
|
||||||
|
@ -617,18 +622,15 @@ class SearchService:
|
||||||
logger.info(f"Sample result: {formatted_results[0]}")
|
logger.info(f"Sample result: {formatted_results[0]}")
|
||||||
else:
|
else:
|
||||||
logger.warning(f"No results found for '{text}'")
|
logger.warning(f"No results found for '{text}'")
|
||||||
|
|
||||||
|
|
||||||
# Store full results in cache if caching is enabled
|
if SEARCH_CACHE_ENABLED:
|
||||||
if SEARCH_CACHE_ENABLED and offset == 0:
|
logger.info(f"Storing {len(formatted_results)} results in cache for query '{text}'")
|
||||||
# Store normal sorted results
|
await self.cache.store(text, formatted_results) # Return the proper page slice from the full results stored in cache end_idx = offset + limit
|
||||||
await self.cache.store(text, formatted_results)
|
end_idx = offset + limit
|
||||||
|
page_results = formatted_results[offset:end_idx]
|
||||||
# Return only the requested page
|
logger.info(f"Returning results from {offset} to {end_idx} (of {len(formatted_results)} total)")
|
||||||
if limit < len(formatted_results):
|
return page_results
|
||||||
page_results = formatted_results[:limit]
|
|
||||||
logger.info(f"Returning first page of {len(page_results)} results " +
|
|
||||||
f"(out of {len(formatted_results)} total)")
|
|
||||||
return page_results
|
|
||||||
|
|
||||||
return formatted_results
|
return formatted_results
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user