|
from transformers import DistilBertModel, AutoTokenizer |
|
import torch.nn as nn |
|
|
|
class MultiTaskBERT(nn.Module): |
|
def __init__(self, num_lang_classes=4, num_sentiment_classes=3): |
|
super().__init__() |
|
|
|
self.bert = DistilBertModel.from_pretrained("distilbert-base-multilingual-cased") |
|
self.dropout = nn.Dropout(0.3) |
|
self.sentiment_head = nn.Linear(self.bert.config.hidden_size, num_sentiment_classes) |
|
self.lang_head = nn.Linear(self.bert.config.hidden_size, num_lang_classes) |
|
|
|
def forward(self, input_ids, attention_mask): |
|
outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask) |
|
|
|
pooled_output = outputs.last_hidden_state[:, 0] |
|
pooled_output = self.dropout(pooled_output) |
|
sentiment_logits = self.sentiment_head(pooled_output) |
|
lang_logits = self.lang_head(pooled_output) |
|
return sentiment_logits, lang_logits |