search-debug
Some checks failed
Deploy on push / type-check (push) Failing after 6s
Deploy on push / deploy (push) Has been skipped

This commit is contained in:
Untone 2025-06-02 22:40:10 +03:00
parent 63c96ef965
commit 903065fdb3
3 changed files with 72 additions and 23 deletions

View File

@ -89,9 +89,14 @@ ignore = [
"TRY301", # Abstract `raise` to an inner function - иногда удобнее
"TRY300", # return/break в try блоке - иногда удобнее
"ARG001", # неиспользуемые аргументы - часто нужны для совместимости API
"PLR0911", #
"PLR0913", # too many arguments - иногда неизбежно
"PLR0912", # too many branches - иногда неизбежно
"PLR0915", # too many statements - иногда неизбежно
"PLR0911", # too many return statements - иногда неизбежно для обработки различных case'ов
"FBT001", # boolean positional arguments - иногда удобно для API совместимости
"FBT002", # boolean default arguments - иногда удобно для API совместимости
"PERF203", # try-except in loop - иногда нужно для обработки отдельных элементов
# Игнорируем некоторые строгие правила для удобства разработки
"ANN401", # Dynamically typed expressions (Any) - иногда нужно
"S101", # assert statements - нужно в тестах

View File

@ -338,7 +338,7 @@ def get_shouts_with_links(info: GraphQLResolveInfo, q: select, limit: int = 20,
except Exception as e:
logger.error(f"Fatal error in get_shouts_with_links: {e}", exc_info=True)
raise
finally:
logger.info(f"Returning {len(shouts)} shouts from get_shouts_with_links")
return shouts
@ -444,8 +444,7 @@ async def load_shouts_by(_: None, info: GraphQLResolveInfo, options: dict[str, A
q, limit, offset = apply_options(q, options)
# Передача сформированного запроса в метод получения публикаций с учетом сортировки и пагинации
shouts_dicts = get_shouts_with_links(info, q, limit, offset)
return shouts_dicts
return get_shouts_with_links(info, q, limit, offset)
@query.field("load_shouts_search")
@ -461,16 +460,32 @@ async def load_shouts_search(_: None, info: GraphQLResolveInfo, text: str, optio
"""
limit = options.get("limit", 10)
offset = options.get("offset", 0)
logger.info(f"[load_shouts_search] Starting search for '{text}' with limit={limit}, offset={offset}")
if isinstance(text, str) and len(text) > 2:
logger.debug(f"[load_shouts_search] Calling search_text service for '{text}'")
results = await search_text(text, limit, offset)
logger.debug(f"[load_shouts_search] Search service returned {len(results)} results for '{text}'")
scores = {}
hits_ids = []
for sr in results:
for i, sr in enumerate(results):
shout_id = sr.get("id")
if shout_id:
shout_id = str(shout_id)
scores[shout_id] = sr.get("score")
scores[shout_id] = sr.get("score", 0.0)
hits_ids.append(shout_id)
logger.debug(f"[load_shouts_search] Result {i}: id={shout_id}, score={scores[shout_id]}")
else:
logger.warning(f"[load_shouts_search] Result {i} missing id: {sr}")
logger.debug(f"[load_shouts_search] Extracted {len(hits_ids)} shout IDs: {hits_ids}")
if not hits_ids:
logger.warning(f"[load_shouts_search] No valid shout IDs found for query '{text}'")
return []
q = (
query_with_stat(info)
@ -480,11 +495,22 @@ async def load_shouts_search(_: None, info: GraphQLResolveInfo, text: str, optio
q = q.filter(Shout.id.in_(hits_ids))
q = apply_filters(q, options)
q = apply_sorting(q, options)
logger.debug(f"[load_shouts_search] Executing database query for {len(hits_ids)} shout IDs")
shouts_dicts = get_shouts_with_links(info, q, limit, offset)
logger.debug(f"[load_shouts_search] Database returned {len(shouts_dicts)} shouts")
for shout_dict in shouts_dicts:
shout_dict["score"] = scores[f"{shout_dict['id']}"]
shouts_dicts.sort(key=lambda x: x["score"], reverse=True)
shout_id_str = f"{shout_dict['id']}"
shout_dict["score"] = scores.get(shout_id_str, 0.0)
shouts_dicts.sort(key=lambda x: x.get("score", 0.0), reverse=True)
logger.info(f"[load_shouts_search] Returning {len(shouts_dicts)} sorted shouts for '{text}'")
return shouts_dicts
logger.warning(f"[load_shouts_search] Invalid search query: '{text}' (length={len(text) if text else 0})")
return []
@ -524,8 +550,7 @@ async def load_shouts_unrated(_: None, info: GraphQLResolveInfo, options: dict[s
limit = options.get("limit", 5)
offset = options.get("offset", 0)
shouts_dicts = get_shouts_with_links(info, q, limit, offset)
return shouts_dicts
return get_shouts_with_links(info, q, limit, offset)
@query.field("load_shouts_random_top")
@ -565,5 +590,4 @@ async def load_shouts_random_top(_: None, info: GraphQLResolveInfo, options: dic
q = q.filter(Shout.id.in_(subquery))
q = q.order_by(func.random())
limit = options.get("limit", 10)
shouts_dicts = get_shouts_with_links(info, q, limit)
return shouts_dicts
return get_shouts_with_links(info, q, limit)

View File

@ -635,9 +635,11 @@ class SearchService:
async def search(self, text: str, limit: int, offset: int) -> list[dict]:
"""Search documents"""
if not self.available:
logger.warning("Search service not available")
return []
if not text or not text.strip():
logger.warning("Empty search query provided")
return []
# Устанавливаем общий размер выборки поиска
@ -645,38 +647,56 @@ class SearchService:
logger.info("Searching for: '%s' (limit=%d, offset=%d, search_limit=%d)", text, limit, offset, search_limit)
try:
response = await self.client.post(
"/search",
json={"text": text, "limit": search_limit},
)
try:
logger.debug(f"Search service response status: {response.status_code}")
if response.status_code != 200:
logger.error(f"Search service returned status {response.status_code}: {response.text}")
return []
results = response.json()
logger.debug(f"Raw search results: {len(results) if results else 0} items")
if not results or not isinstance(results, list):
logger.warning(f"No search results or invalid format for query '{text}'")
return []
# Обрабатываем каждый результат
formatted_results = []
for item in results:
for i, item in enumerate(results):
if isinstance(item, dict):
formatted_result = self._format_search_result(item)
formatted_results.append(formatted_result)
logger.debug(
f"Formatted result {i}: id={formatted_result.get('id')}, title={formatted_result.get('title', '')[:50]}..."
)
else:
logger.warning(f"Invalid search result item {i}: {type(item)}")
logger.info(f"Successfully formatted {len(formatted_results)} search results for '{text}'")
# Сохраняем результаты в кеше
if SEARCH_CACHE_ENABLED and self.cache:
await self.cache.store(text, formatted_results)
logger.debug(f"Stored {len(formatted_results)} results in cache for '{text}'")
# Если включен кеш и есть лишние результаты
if SEARCH_CACHE_ENABLED and self.cache and await self.cache.has_query(text):
cached_result = await self.cache.get(text, limit, offset)
logger.debug(f"Retrieved {len(cached_result) if cached_result else 0} results from cache for '{text}'")
return cached_result or []
except Exception:
logger.exception("Search error for '%s'", text)
return []
else:
return formatted_results
except Exception as e:
logger.error(f"Search error for '{text}': {e}", exc_info=True)
return []
async def search_authors(self, text: str, limit: int = 10, offset: int = 0) -> list[dict]:
"""Search only for authors using the specialized endpoint"""
if not self.available or not text.strip():