welcomecenterbot/nlp/toxycity_detector.py
2024-09-26 13:24:18 +03:00

30 lines
951 B
Python

from transformers import BertTokenizer, BertForSequenceClassification
import torch
import torch.nn.functional as F
# Load tokenizer and model weights
tokenizer = BertTokenizer.from_pretrained('SkolkovoInstitute/russian_toxicity_classifier')
model = BertForSequenceClassification.from_pretrained('SkolkovoInstitute/russian_toxicity_classifier')
def detector(text):
# Prepare the input
batch = tokenizer.encode(text, return_tensors='pt')
# Inference
with torch.no_grad():
result = model(batch)
# Get logits
logits = result.logits
# Convert logits to probabilities using softmax
probabilities = F.softmax(logits, dim=1)
return probabilities[0][1].item()
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
p = detector(sys.argv[1])
toxicity_percentage = p * 100 # Assuming index 1 is for toxic class
print(f"Toxicity Probability: {toxicity_percentage:.2f}%")