Energy-Intelligence / check_model.py
savantripathi's picture
added more files
c3fc3ae verified
raw
history blame contribute delete
290 Bytes
from transformers import AutoModelForCausalLM
try:
model = AutoModelForCausalLM.from_pretrained('Qwen/Qwen2.5-7B-Instruct', local_files_only=True)
print('Local model files:', list(model.state_dict().keys())[:5])
except Exception as e:
print('Model not cached:', str(e))