File size: 1,176 Bytes
81bc0f3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
from transformers import BertTokenizer, BertForSequenceClassification
import torch
model = BertForSequenceClassification.from_pretrained('./test_model')
tokenizer = BertForSequenceClassification.from_pretrained('./test_tokenizer')
def predict_relevance(question, answer):
if not answer.strip(): # Check for empty answers
return "Irrelevant"
inputs = tokenizer(question, answer, return_tensors="pt", padding=True, truncation=True)
model.eval()
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
probabilities = torch.softmax(logits, dim=-1)
# Adjust the threshold
threshold = 0.5
prediction = torch.argmax(probabilities, dim=-1)
relevant_prob = probabilities[0, 1] # Probability for relevant class
#threshold logic
if relevant_prob > threshold:
return "Relevant"
else:
return "Irrelevant"
# Example
question = "What is your experience with Python?"
answer = "I have minimal experience with java, mostly for small automation tasks." # Empty answer
result = predict_relevance(question, answer)
print(f"Relevance: {result}")
|