import torch import torch.nn as nn from transformers import AutoModel class EmotionClassifier(nn.Module): def __init__(self, model_name="microsoft/deberta-v3-base"): super().__init__() # IMPORTANT: use the SAME NAME you used during training self.transformer = AutoModel.from_pretrained(model_name) hidden = self.transformer.config.hidden_size # IMPORTANT: your saved checkpoint uses out.weight & out.bias self.out = nn.Linear(hidden, 5) def forward(self, input_ids, attention_mask): outputs = self.transformer( input_ids=input_ids, attention_mask=attention_mask ) cls_rep = outputs.last_hidden_state[:, 0, :] logits = self.out(cls_rep) return logits