toxic-debug3
This commit is contained in:
parent
21a366abe1
commit
a51e936f67
|
@ -26,6 +26,11 @@ async def handle_private(msg, state):
|
||||||
elif text.startswith('/message'):
|
elif text.startswith('/message'):
|
||||||
await edit_announce(msg)
|
await edit_announce(msg)
|
||||||
return
|
return
|
||||||
|
elif text.startswith('/toxic'):
|
||||||
|
toxic_score = get_average_toxic(msg)
|
||||||
|
text = f"Средняя токсичность сообщений: {toxic_score}%"
|
||||||
|
await telegram_api("sendMessage", chat_id=uid, reply_to_message_id=msg.get("message_id"), text=text)
|
||||||
|
return
|
||||||
elif text == '/removed':
|
elif text == '/removed':
|
||||||
removed_messages = await get_all_removed(uid)
|
removed_messages = await get_all_removed(uid)
|
||||||
if removed_messages:
|
if removed_messages:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import math
|
import math
|
||||||
from store import redis
|
from store import redis, get_average_toxic
|
||||||
from bot.api import telegram_api
|
from bot.api import telegram_api
|
||||||
from bot.config import FEEDBACK_CHAT_ID
|
from bot.config import FEEDBACK_CHAT_ID
|
||||||
from nlp.toxicity_detector import detector
|
from nlp.toxicity_detector import detector
|
||||||
|
@ -33,15 +33,7 @@ async def messages_routing(msg, state):
|
||||||
mid = msg.get("message_id")
|
mid = msg.get("message_id")
|
||||||
if text == '/toxic@welcomecenter_bot':
|
if text == '/toxic@welcomecenter_bot':
|
||||||
latest_toxic_message_id = await redis.get(f"toxic:{cid}")
|
latest_toxic_message_id = await redis.get(f"toxic:{cid}")
|
||||||
pattern = f"toxic:{cid}:{uid}:*"
|
toxic_score = get_average_toxic(msg)
|
||||||
scores = []
|
|
||||||
scoring_msg_id = 0
|
|
||||||
async for key in redis.scan_iter(pattern):
|
|
||||||
scr = await redis.get(key)
|
|
||||||
if isinstance(scr, int):
|
|
||||||
scores.append(scr)
|
|
||||||
logger.debug(f'found {len(scores)} messages')
|
|
||||||
toxic_score = math.floor(sum(scores)/len(scores)) if scores else 0
|
|
||||||
if reply_msg:
|
if reply_msg:
|
||||||
scoring_msg_id = reply_msg.get("message_id")
|
scoring_msg_id = reply_msg.get("message_id")
|
||||||
if not scoring_msg_id and latest_toxic_message_id:
|
if not scoring_msg_id and latest_toxic_message_id:
|
||||||
|
|
15
store.py
15
store.py
|
@ -19,3 +19,18 @@ async def get_all_removed(uid):
|
||||||
texts.append(value.encode('utf-8'))
|
texts.append(value.encode('utf-8'))
|
||||||
|
|
||||||
return texts
|
return texts
|
||||||
|
|
||||||
|
|
||||||
|
async def get_average_toxic(msg):
|
||||||
|
uid = msg['from']['id']
|
||||||
|
cid = msg['chat']['id']
|
||||||
|
pattern = f"toxic:{cid}:{uid}:*"
|
||||||
|
scores = []
|
||||||
|
scoring_msg_id = 0
|
||||||
|
async for key in redis.scan_iter(pattern):
|
||||||
|
scr = await redis.get(key)
|
||||||
|
if isinstance(scr, int):
|
||||||
|
scores.append(scr)
|
||||||
|
logger.debug(f'found {len(scores)} messages')
|
||||||
|
toxic_score = math.floor(sum(scores)/len(scores)) if scores else 0
|
||||||
|
return toxic_score
|
Loading…
Reference in New Issue
Block a user