toxic-debug3

This commit is contained in:
2024-09-26 20:28:16 +03:00
parent 21a366abe1
commit a51e936f67
3 changed files with 22 additions and 10 deletions

View File

@@ -1,6 +1,6 @@
import logging
import math
from store import redis
from store import redis, get_average_toxic
from bot.api import telegram_api
from bot.config import FEEDBACK_CHAT_ID
from nlp.toxicity_detector import detector
@@ -33,15 +33,7 @@ async def messages_routing(msg, state):
mid = msg.get("message_id")
if text == '/toxic@welcomecenter_bot':
latest_toxic_message_id = await redis.get(f"toxic:{cid}")
pattern = f"toxic:{cid}:{uid}:*"
scores = []
scoring_msg_id = 0
async for key in redis.scan_iter(pattern):
scr = await redis.get(key)
if isinstance(scr, int):
scores.append(scr)
logger.debug(f'found {len(scores)} messages')
toxic_score = math.floor(sum(scores)/len(scores)) if scores else 0
toxic_score = get_average_toxic(msg)
if reply_msg:
scoring_msg_id = reply_msg.get("message_id")
if not scoring_msg_id and latest_toxic_message_id: