Transformers How to use casehold/bert-double with Transformers:
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("fill-mask", model="casehold/bert-double") # Load model directly
from transformers import AutoTokenizer, AutoModelForPreTraining
tokenizer = AutoTokenizer.from_pretrained("casehold/bert-double")
model = AutoModelForPreTraining.from_pretrained("casehold/bert-double")