toxic-debug3

This commit is contained in:
Untone 2024-09-26 20:28:16 +03:00
parent 21a366abe1
commit a51e936f67
3 changed files with 22 additions and 10 deletions

View File

@ -26,6 +26,11 @@ async def handle_private(msg, state):
elif text.startswith('/message'):
await edit_announce(msg)
return
elif text.startswith('/toxic'):
toxic_score = get_average_toxic(msg)
text = f"Средняя токсичность сообщений: {toxic_score}%"
await telegram_api("sendMessage", chat_id=uid, reply_to_message_id=msg.get("message_id"), text=text)
return
elif text == '/removed':
removed_messages = await get_all_removed(uid)
if removed_messages:

View File

@ -1,6 +1,6 @@
import logging
import math
from store import redis
from store import redis, get_average_toxic
from bot.api import telegram_api
from bot.config import FEEDBACK_CHAT_ID
from nlp.toxicity_detector import detector
@ -33,15 +33,7 @@ async def messages_routing(msg, state):
mid = msg.get("message_id")
if text == '/toxic@welcomecenter_bot':
latest_toxic_message_id = await redis.get(f"toxic:{cid}")
pattern = f"toxic:{cid}:{uid}:*"
scores = []
scoring_msg_id = 0
async for key in redis.scan_iter(pattern):
scr = await redis.get(key)
if isinstance(scr, int):
scores.append(scr)
logger.debug(f'found {len(scores)} messages')
toxic_score = math.floor(sum(scores)/len(scores)) if scores else 0
toxic_score = get_average_toxic(msg)
if reply_msg:
scoring_msg_id = reply_msg.get("message_id")
if not scoring_msg_id and latest_toxic_message_id:

View File

@ -19,3 +19,18 @@ async def get_all_removed(uid):
texts.append(value.encode('utf-8'))
return texts
async def get_average_toxic(msg):
uid = msg['from']['id']
cid = msg['chat']['id']
pattern = f"toxic:{cid}:{uid}:*"
scores = []
scoring_msg_id = 0
async for key in redis.scan_iter(pattern):
scr = await redis.get(key)
if isinstance(scr, int):
scores.append(scr)
logger.debug(f'found {len(scores)} messages')
toxic_score = math.floor(sum(scores)/len(scores)) if scores else 0
return toxic_score