| from transformers import AutoModelForCausalLM | |
| try: | |
| model = AutoModelForCausalLM.from_pretrained('Qwen/Qwen2.5-7B-Instruct', local_files_only=True) | |
| print('Local model files:', list(model.state_dict().keys())[:5]) | |
| except Exception as e: | |
| print('Model not cached:', str(e)) | |