toxic-debug
This commit is contained in:
parent
7feca1579a
commit
60589c53ad
|
@ -17,12 +17,12 @@ async def messages_routing(msg, state):
|
||||||
|
|
||||||
if cid == uid:
|
if cid == uid:
|
||||||
# сообщения в личке с ботом
|
# сообщения в личке с ботом
|
||||||
logger.info("private chat message", msg)
|
logger.info("private chat message: ", msg)
|
||||||
await handle_private(msg, state)
|
await handle_private(msg, state)
|
||||||
|
|
||||||
elif str(cid) == FEEDBACK_CHAT_ID:
|
elif str(cid) == FEEDBACK_CHAT_ID:
|
||||||
# сообщения из группы обратной связи
|
# сообщения из группы обратной связи
|
||||||
logger.info("feedback chat message")
|
logger.info("feedback chat message: ", msg)
|
||||||
logger.debug(msg)
|
logger.debug(msg)
|
||||||
if reply_msg:
|
if reply_msg:
|
||||||
reply_chat_id = reply_msg.get("chat", {}).get("id")
|
reply_chat_id = reply_msg.get("chat", {}).get("id")
|
||||||
|
@ -44,9 +44,9 @@ async def messages_routing(msg, state):
|
||||||
toxic_score = math.floor(sum(scores)/len(scores)) if scores else 0
|
toxic_score = math.floor(sum(scores)/len(scores)) if scores else 0
|
||||||
scoring_msg_id = reply_msg.get("message_id", int(latest_toxic_message_id)) if reply_msg else int(latest_toxic_message_id)
|
scoring_msg_id = reply_msg.get("message_id", int(latest_toxic_message_id)) if reply_msg else int(latest_toxic_message_id)
|
||||||
msg_toxic_key = f"toxic:{cid}:{uid}:{scoring_msg_id}"
|
msg_toxic_key = f"toxic:{cid}:{uid}:{scoring_msg_id}"
|
||||||
logger.debug(msg_toxic_key)
|
logger.debug('msg_toxic_key: ', msg_toxic_key)
|
||||||
one_score = await redis.get(msg_toxic_key)
|
one_score = await redis.get(msg_toxic_key)
|
||||||
logger.debug(one_score)
|
logger.debug('one_score: ', one_score)
|
||||||
emoji = '😳' if toxic_score > 90 else '😟' if toxic_score > 80 else '😏' if toxic_score > 60 else '🙂' if toxic_score > 20 else '😇'
|
emoji = '😳' if toxic_score > 90 else '😟' if toxic_score > 80 else '😏' if toxic_score > 60 else '🙂' if toxic_score > 20 else '😇'
|
||||||
text = f"{int(one_score or 0)}% токсичности\nСредняя токсичность сообщений: {toxic_score}% {emoji}"
|
text = f"{int(one_score or 0)}% токсичности\nСредняя токсичность сообщений: {toxic_score}% {emoji}"
|
||||||
await telegram_api(
|
await telegram_api(
|
||||||
|
|
Loading…
Reference in New Issue
Block a user