Spaces:
Sleeping
Sleeping
| import torch | |
| import gradio as gr | |
| from transformers import AutoTokenizer | |
| from model import EmotionClassifier | |
| MODEL_NAME = "microsoft/deberta-v3-base" | |
| tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME) | |
| # Build model with correct structure | |
| model = EmotionClassifier(MODEL_NAME) | |
| # Load checkpoint β MUST MATCH KEYS NOW | |
| model.load_state_dict(torch.load("best_deberta_base.bin", map_location="cpu")) | |
| model.eval() | |
| EMOTIONS = ["anger", "fear", "joy", "sadness", "surprise"] | |
| def predict(text): | |
| encoded = tokenizer( | |
| text, | |
| padding=True, | |
| truncation=True, | |
| max_length=128, | |
| return_tensors="pt" | |
| ) | |
| with torch.no_grad(): | |
| logits = model( | |
| encoded["input_ids"], | |
| encoded["attention_mask"] | |
| ) | |
| probs = torch.sigmoid(logits).numpy()[0] | |
| return {emo: float(p) for emo, p in zip(EMOTIONS, probs)} | |
| demo = gr.Interface( | |
| fn=predict, | |
| inputs="text", | |
| outputs="label", | |
| title="Emotion Classifier β DeBERTa" | |
| ) | |
| demo.launch() | |