File size: 1,043 Bytes
93d98ec
2fc050b
 
 
93d98ec
2fc050b
93d98ec
 
2fc050b
93d98ec
 
2fc050b
 
 
93d98ec
2fc050b
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
from transformers import DistilBertModel, AutoTokenizer
import torch.nn as nn

class MultiTaskBERT(nn.Module):
    def __init__(self, num_lang_classes=4, num_sentiment_classes=3):
        super().__init__()
        # Use DistilBertModel directly instead of AutoModel
        self.bert = DistilBertModel.from_pretrained("distilbert-base-multilingual-cased")
        self.dropout = nn.Dropout(0.3)
        self.sentiment_head = nn.Linear(self.bert.config.hidden_size, num_sentiment_classes)
        self.lang_head = nn.Linear(self.bert.config.hidden_size, num_lang_classes)

    def forward(self, input_ids, attention_mask):
        outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)
        # Use the hidden state of the first token (CLS token) for pooling
        pooled_output = outputs.last_hidden_state[:, 0]
        pooled_output = self.dropout(pooled_output)
        sentiment_logits = self.sentiment_head(pooled_output)
        lang_logits = self.lang_head(pooled_output)
        return sentiment_logits, lang_logits