-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmodel.py
More file actions
49 lines (37 loc) · 1.66 KB
/
model.py
File metadata and controls
49 lines (37 loc) · 1.66 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import tensorflow as tf
import torch
from transformers import BertTokenizer, TFBertForSequenceClassification
from sklearn.metrics import classification_report
from IMDBmovieset import X_test_encoded, y_test
import torch.nn.functional as F
path = "C:\\Users\\lenovo\\Desktop\\on-going projects\\BERTmodel2"
bert_tokenizer = BertTokenizer.from_pretrained(path + '\\Tokenizer')
bert_model = TFBertForSequenceClassification.from_pretrained(path + '\\Model')
# Predict the sentiment of the test dataset
pred = bert_model.predict(
[X_test_encoded['input_ids'], X_test_encoded['token_type_ids'], X_test_encoded['attention_mask']])
# pred is of type TFSequenceClassifierOutput
logits_np = pred.logits
# convert the numpy array to a pytorch tensor
logits = torch.from_numpy(logits_np)
# Apply softmax to obtain probabilities
probabilities = F.softmax(logits, dim=-1)
# get the confidence score
confidence_score = probabilities.max().item()
# Use argmax along the appropriate axis to get the predicted labels
pred_labels = tf.argmax(logits, axis=1)
# Convert the predicted labels to a NumPy array
pred_labels = pred_labels.numpy()
label = {
1: 'positive',
0: 'Negative'
}
# Map the predicted labels to their corresponding strings using the label dictionary
pred_labels = [label[i] for i in pred_labels]
Actual = [label[i] for i in y_test]
print('Predicted Label :', pred_labels[:10])
print('Actual Label :', Actual[:10])
print('Predicted Label :', pred_labels[:10])
print('Actual Label :', Actual[:10])
print("Classification Report: \n", classification_report(Actual, pred_labels))
print("Confidence score: ", confidence_score)