welcomecenterbot/handlers/messages_routing.py

90 lines
3.5 KiB
Python
Raw Normal View History

2024-06-03 10:27:42 +00:00
import logging
import math
2024-09-26 11:07:00 +00:00
from store import redis
2024-01-07 09:19:46 +00:00
from bot.api import telegram_api
from bot.config import FEEDBACK_CHAT_ID
2024-09-26 10:07:01 +00:00
from nlp.toxicity_detector import detector
2024-01-07 09:19:46 +00:00
from handlers.handle_private import handle_private
2024-06-03 10:27:42 +00:00
logger = logging.getLogger('handlers.messages_routing')
logging.basicConfig(level=logging.DEBUG)
2024-01-07 09:19:46 +00:00
async def messages_routing(msg, state):
cid = msg["chat"]["id"]
uid = msg["from"]["id"]
text = msg.get("text")
2024-09-26 12:50:50 +00:00
reply_msg = msg.get("reply_to_message")
2024-01-07 09:19:46 +00:00
if cid == uid:
# сообщения в личке с ботом
logger.info("private chat message")
await handle_private(msg, state)
elif str(cid) == FEEDBACK_CHAT_ID:
# сообщения из группы обратной связи
logger.info("feedback chat message")
logger.debug(msg)
if reply_msg:
reply_chat_id = reply_msg.get("chat", {}).get("id")
if reply_chat_id != FEEDBACK_CHAT_ID:
2024-09-26 12:50:50 +00:00
await telegram_api("sendMessage", chat_id=reply_chat_id, text=text, reply_to_message_id=reply_msg.get("message_id"))
2024-01-07 09:19:46 +00:00
2024-02-12 12:50:35 +00:00
elif bool(text):
2024-09-26 10:07:01 +00:00
mid = msg.get("message_id")
2024-09-26 13:56:04 +00:00
if text == '/toxic@welcomecenter_bot':
2024-09-26 14:38:00 +00:00
text = ''
toxic_score = 0
2024-09-26 13:56:04 +00:00
if not reply_msg:
2024-09-26 15:00:21 +00:00
logger.debug(f'scoring average for {uid}')
2024-09-26 13:56:04 +00:00
scoring_msg_id = mid
pattern = f"toxic:{cid}:{uid}:*"
scores = []
2024-09-26 14:44:32 +00:00
async for key in redis.scan_iter(pattern):
2024-09-26 14:01:52 +00:00
scr = int(await redis.get(key))
2024-09-26 13:56:04 +00:00
scores.append(scr)
2024-09-26 14:43:44 +00:00
toxic_score = math.floor(sum(scores)/len(scores)) if scores else 0
2024-09-26 15:07:50 +00:00
text = f"Доброе утро! Средняя токсичность твоих сообщений: {toxic_score}%"
2024-09-26 13:56:04 +00:00
else:
latest_toxic_message_id = await redis.get(f"toxic:{cid}")
2024-09-26 15:21:39 +00:00
scoring_msg_id = reply_msg.get("message_id") or int(latest_toxic_message_id)
2024-09-26 14:38:00 +00:00
toxic_score = await redis.get(f"toxic:{cid}:{uid}:{scoring_msg_id}")
2024-09-26 15:14:44 +00:00
x = int(toxic_score) if toxic_score else 0
text = f"{x}% токсичности"
2024-09-26 15:07:50 +00:00
await telegram_api(
"deleteMessage",
chat_id=cid,
message_id=mid
)
2024-09-26 14:38:00 +00:00
if text:
await telegram_api(
"sendMessage",
chat_id=cid,
reply_to_message_id=scoring_msg_id,
text=text
)
2024-09-26 12:38:14 +00:00
else:
toxic_score = detector(text)
toxic_perc = math.floor(toxic_score*100)
2024-09-26 13:28:29 +00:00
await redis.set(f"toxic:{cid}", mid)
2024-09-26 13:56:04 +00:00
await redis.set(f"toxic:{cid}:{uid}:{mid}", toxic_perc, ex=60*60*24*3)
2024-09-26 12:38:14 +00:00
logger.info(f'\ntext: {text}\ntoxic: {toxic_perc}%')
if toxic_score > 0.81:
if toxic_score > 0.90:
await redis.set(f"removed:{uid}:{cid}:{mid}", text)
await telegram_api(
"deleteMessage",
chat_id=cid,
message_id=mid
)
else:
await telegram_api(
"setMessageReaction",
chat_id=cid,
is_big=True,
message_id=mid,
reaction=f'[{{"type":"emoji", "emoji":"🙉"}}]'
)
2024-02-12 12:50:35 +00:00
2024-01-07 09:19:46 +00:00
else:
pass