Upload lagi
Browse files- dinstilBert.py +7 -6
dinstilBert.py
CHANGED
@@ -1,17 +1,18 @@
|
|
1 |
-
from transformers import
|
2 |
import torch.nn as nn
|
3 |
|
4 |
class MultiTaskBERT(nn.Module):
|
5 |
-
def __init__(self, num_lang_classes=4, num_sentiment_classes=
|
6 |
-
|
7 |
super().__init__()
|
8 |
-
|
|
|
9 |
self.dropout = nn.Dropout(0.3)
|
10 |
-
self.sentiment_head = nn.Linear(
|
11 |
-
self.lang_head = nn.Linear(
|
12 |
|
13 |
def forward(self, input_ids, attention_mask):
|
14 |
outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)
|
|
|
15 |
pooled_output = outputs.last_hidden_state[:, 0]
|
16 |
pooled_output = self.dropout(pooled_output)
|
17 |
sentiment_logits = self.sentiment_head(pooled_output)
|
|
|
1 |
+
from transformers import DistilBertModel, AutoTokenizer
|
2 |
import torch.nn as nn
|
3 |
|
4 |
class MultiTaskBERT(nn.Module):
|
5 |
+
def __init__(self, num_lang_classes=4, num_sentiment_classes=3):
|
|
|
6 |
super().__init__()
|
7 |
+
# Use DistilBertModel directly instead of AutoModel
|
8 |
+
self.bert = DistilBertModel.from_pretrained("distilbert-base-multilingual-cased")
|
9 |
self.dropout = nn.Dropout(0.3)
|
10 |
+
self.sentiment_head = nn.Linear(self.bert.config.hidden_size, num_sentiment_classes)
|
11 |
+
self.lang_head = nn.Linear(self.bert.config.hidden_size, num_lang_classes)
|
12 |
|
13 |
def forward(self, input_ids, attention_mask):
|
14 |
outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)
|
15 |
+
# Use the hidden state of the first token (CLS token) for pooling
|
16 |
pooled_output = outputs.last_hidden_state[:, 0]
|
17 |
pooled_output = self.dropout(pooled_output)
|
18 |
sentiment_logits = self.sentiment_head(pooled_output)
|