| | from transformers import AutoModel |
| | from huggingface_hub import hf_hub_download |
| | from safetensors.torch import load_file |
| | import torch.nn as nn |
| | import torch |
| |
|
| | |
| | NUM_LABELS = 4 |
| |
|
| | |
| | class DistilBertClassificationModel(nn.Module): |
| | def __init__( |
| | self, |
| | model_path="distilbert/distilbert-base-uncased", |
| | freeze_weights=True, |
| | ): |
| | super(DistilBertClassificationModel, self).__init__() |
| | if model_path == "distilbert/distilbert-base-uncased": |
| | self.base_model = AutoModel.from_pretrained(model_path) |
| | else: |
| | pytorch_model_path = hf_hub_download( |
| | repo_id=model_path, |
| | repo_type="model", |
| | filename="model.safetensors" |
| | ) |
| | state_dict = load_file(pytorch_model_path) |
| | filtered_state_dict = { |
| | k.replace("base_model.", ""): v |
| | for k, v in state_dict.items() |
| | if not k.startswith("classifier.") |
| | } |
| |
|
| | self.base_model = AutoModel.from_pretrained("distilbert/distilbert-base-uncased", state_dict=filtered_state_dict) |
| |
|
| | |
| | self.config = self.base_model.config |
| |
|
| | |
| | if freeze_weights: |
| | for param in self.base_model.parameters(): |
| | param.requires_grad = False |
| | |
| | |
| | self.classifier = nn.Linear(self.base_model.config.hidden_size, NUM_LABELS) |
| | |
| | def forward(self, input_ids, attention_mask, labels=None): |
| | with torch.no_grad(): |
| | outputs = self.base_model(input_ids=input_ids, attention_mask=attention_mask) |
| | |
| | |
| | summed_representation = outputs.last_hidden_state.sum(dim=1) |
| | |
| | logits = self.classifier(summed_representation) |
| | loss = None |
| | if labels is not None: |
| | loss_fn = nn.BCEWithLogitsLoss() |
| | loss = loss_fn(logits, labels.float()) |
| | return {"loss": loss, "logits": logits} |
| |
|