File size: 290 Bytes
c3fc3ae
 
 
 
 
 
 
1
2
3
4
5
6
7
8
from transformers import AutoModelForCausalLM
try:
    model = AutoModelForCausalLM.from_pretrained('Qwen/Qwen2.5-7B-Instruct', local_files_only=True)
    print('Local model files:', list(model.state_dict().keys())[:5])
except Exception as e:
    print('Model not cached:', str(e))