store-toxic-scores-3-days

This commit is contained in:
Untone 2024-09-26 16:28:29 +03:00
parent 6d1372fd7f
commit 43845713c2

View File

@ -9,8 +9,6 @@ from handlers.handle_private import handle_private
logger = logging.getLogger('handlers.messages_routing')
logging.basicConfig(level=logging.DEBUG)
latest_toxic = {}
async def messages_routing(msg, state):
cid = msg["chat"]["id"]
uid = msg["from"]["id"]
@ -33,8 +31,10 @@ async def messages_routing(msg, state):
elif bool(text):
mid = msg.get("message_id")
if text == '/score@welcomecenter_bot':
rmsg = reply_msg.get("message_id", latest_toxic[cid])
if text == '/toxic@welcomecenter_bot':
latest_toxic_message_id = await redis.get(f"toxic:{cid}")
if latest_toxic_message_id:
rmsg = reply_msg.get("message_id", latest_toxic_message_id)
await telegram_api(
"sendMessage",
chat_id=cid,
@ -49,8 +49,8 @@ async def messages_routing(msg, state):
else:
toxic_score = detector(text)
toxic_perc = math.floor(toxic_score*100)
latest_toxic[cid] = mid
latest_toxic[f"{cid}:{mid}"] = toxic_perc
await redis.set(f"toxic:{cid}", mid)
await redis.set(f"toxic:{cid}:{mid}", toxic_perc, ex=60*60*24*3)
logger.info(f'\ntext: {text}\ntoxic: {toxic_perc}%')
if toxic_score > 0.81:
if toxic_score > 0.90: