Instructions to use IEETA/BioNExt-Tagger with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- Transformers
How to use IEETA/BioNExt-Tagger with Transformers:
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("feature-extraction", model="IEETA/BioNExt-Tagger", trust_remote_code=True)# Load model directly from transformers import AutoModel model = AutoModel.from_pretrained("IEETA/BioNExt-Tagger", trust_remote_code=True, dtype="auto") - Notebooks
- Google Colab
- Kaggle
Upload model
Browse files
configuration_bionexttager.py
CHANGED
|
@@ -25,10 +25,12 @@ class BioNextTaggerConfig(PretrainedConfig):
|
|
| 25 |
super().__init__(**kwargs)
|
| 26 |
|
| 27 |
def get_backbonemodel_config(self):
|
| 28 |
-
|
|
|
|
|
|
|
| 29 |
for k in backbonemodel_cfg.to_dict():
|
| 30 |
if hasattr(self, k):
|
| 31 |
setattr(backbonemodel_cfg,k, getattr(self,k))
|
| 32 |
-
|
| 33 |
return backbonemodel_cfg
|
| 34 |
|
|
|
|
| 25 |
super().__init__(**kwargs)
|
| 26 |
|
| 27 |
def get_backbonemodel_config(self):
|
| 28 |
+
print("BACKend config start?", flush=True)
|
| 29 |
+
backbonemodel_cfg = AutoConfig.from_pretrained(self._name_or_path,
|
| 30 |
+
trust_remote_code=True)#.to_dict()
|
| 31 |
for k in backbonemodel_cfg.to_dict():
|
| 32 |
if hasattr(self, k):
|
| 33 |
setattr(backbonemodel_cfg,k, getattr(self,k))
|
| 34 |
+
print("BACKend config end?", flush=True)
|
| 35 |
return backbonemodel_cfg
|
| 36 |
|