Spaces:
Running
Running
Update model.py
Browse files
model.py
CHANGED
|
@@ -5,17 +5,20 @@ from transformers import AutoModel
|
|
| 5 |
class EmotionClassifier(nn.Module):
|
| 6 |
def __init__(self, model_name="microsoft/deberta-v3-base"):
|
| 7 |
super().__init__()
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
| 12 |
|
| 13 |
def forward(self, input_ids, attention_mask):
|
| 14 |
-
outputs = self.
|
| 15 |
input_ids=input_ids,
|
| 16 |
attention_mask=attention_mask
|
| 17 |
)
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
logits = self.out(
|
| 21 |
return logits
|
|
|
|
| 5 |
class EmotionClassifier(nn.Module):
|
| 6 |
def __init__(self, model_name="microsoft/deberta-v3-base"):
|
| 7 |
super().__init__()
|
| 8 |
+
# IMPORTANT: use the SAME NAME you used during training
|
| 9 |
+
self.transformer = AutoModel.from_pretrained(model_name)
|
| 10 |
+
|
| 11 |
+
hidden = self.transformer.config.hidden_size
|
| 12 |
+
|
| 13 |
+
# IMPORTANT: your saved checkpoint uses out.weight & out.bias
|
| 14 |
+
self.out = nn.Linear(hidden, 5)
|
| 15 |
|
| 16 |
def forward(self, input_ids, attention_mask):
|
| 17 |
+
outputs = self.transformer(
|
| 18 |
input_ids=input_ids,
|
| 19 |
attention_mask=attention_mask
|
| 20 |
)
|
| 21 |
+
|
| 22 |
+
cls_rep = outputs.last_hidden_state[:, 0, :]
|
| 23 |
+
logits = self.out(cls_rep)
|
| 24 |
return logits
|