ariG23498 HF Staff commited on
Commit
e4c5656
·
verified ·
1 Parent(s): 6f24eb1

Upload MiniMaxAI_MiniMax-M2_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. MiniMaxAI_MiniMax-M2_1.py +9 -34
MiniMaxAI_MiniMax-M2_1.py CHANGED
@@ -1,9 +1,14 @@
1
  # /// script
2
  # requires-python = ">=3.12"
3
  # dependencies = [
 
 
 
 
4
  # "torch",
5
  # "torchvision",
6
  # "transformers",
 
7
  # "diffusers",
8
  # "sentence-transformers",
9
  # "accelerate",
@@ -14,23 +19,8 @@
14
 
15
  try:
16
  # Load model directly
17
- from transformers import AutoTokenizer, AutoModelForCausalLM
18
-
19
- tokenizer = AutoTokenizer.from_pretrained("MiniMaxAI/MiniMax-M2")
20
- model = AutoModelForCausalLM.from_pretrained("MiniMaxAI/MiniMax-M2")
21
- messages = [
22
- {"role": "user", "content": "Who are you?"},
23
- ]
24
- inputs = tokenizer.apply_chat_template(
25
- messages,
26
- add_generation_prompt=True,
27
- tokenize=True,
28
- return_dict=True,
29
- return_tensors="pt",
30
- ).to(model.device)
31
-
32
- outputs = model.generate(**inputs, max_new_tokens=40)
33
- print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
34
  with open('MiniMaxAI_MiniMax-M2_1.txt', 'w', encoding='utf-8') as f:
35
  f.write('Everything was good in MiniMaxAI_MiniMax-M2_1.txt')
36
  except Exception as e:
@@ -46,23 +36,8 @@ except Exception as e:
46
  import traceback
47
  f.write('''```CODE:
48
  # Load model directly
49
- from transformers import AutoTokenizer, AutoModelForCausalLM
50
-
51
- tokenizer = AutoTokenizer.from_pretrained("MiniMaxAI/MiniMax-M2")
52
- model = AutoModelForCausalLM.from_pretrained("MiniMaxAI/MiniMax-M2")
53
- messages = [
54
- {"role": "user", "content": "Who are you?"},
55
- ]
56
- inputs = tokenizer.apply_chat_template(
57
- messages,
58
- add_generation_prompt=True,
59
- tokenize=True,
60
- return_dict=True,
61
- return_tensors="pt",
62
- ).to(model.device)
63
-
64
- outputs = model.generate(**inputs, max_new_tokens=40)
65
- print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
66
  ```
67
 
68
  ERROR:
 
1
  # /// script
2
  # requires-python = ">=3.12"
3
  # dependencies = [
4
+ # "numpy",
5
+ # "einops",
6
+ # "pandas",
7
+ # "protobuf",
8
  # "torch",
9
  # "torchvision",
10
  # "transformers",
11
+ # "timm",
12
  # "diffusers",
13
  # "sentence-transformers",
14
  # "accelerate",
 
19
 
20
  try:
21
  # Load model directly
22
+ from transformers import AutoModelForCausalLM
23
+ model = AutoModelForCausalLM.from_pretrained("MiniMaxAI/MiniMax-M2", trust_remote_code=True, torch_dtype="auto")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  with open('MiniMaxAI_MiniMax-M2_1.txt', 'w', encoding='utf-8') as f:
25
  f.write('Everything was good in MiniMaxAI_MiniMax-M2_1.txt')
26
  except Exception as e:
 
36
  import traceback
37
  f.write('''```CODE:
38
  # Load model directly
39
+ from transformers import AutoModelForCausalLM
40
+ model = AutoModelForCausalLM.from_pretrained("MiniMaxAI/MiniMax-M2", trust_remote_code=True, torch_dtype="auto")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  ```
42
 
43
  ERROR: