Upload GptOssForCausalLM

#2
by albertvillanova HF Staff - opened
Files changed (2) hide show
  1. config.json +1 -2
  2. model.safetensors +2 -2
config.json CHANGED
@@ -17,11 +17,10 @@
17
  "max_position_embeddings": 131072,
18
  "model_type": "gpt_oss",
19
  "num_attention_heads": 4,
20
- "num_experts": 4,
21
  "num_experts_per_tok": 2,
22
  "num_hidden_layers": 2,
23
  "num_key_value_heads": 2,
24
- "num_local_experts": 128,
25
  "output_router_logits": false,
26
  "rms_norm_eps": 1e-05,
27
  "rope_scaling": {
 
17
  "max_position_embeddings": 131072,
18
  "model_type": "gpt_oss",
19
  "num_attention_heads": 4,
 
20
  "num_experts_per_tok": 2,
21
  "num_hidden_layers": 2,
22
  "num_key_value_heads": 2,
23
+ "num_local_experts": 4,
24
  "output_router_logits": false,
25
  "rms_norm_eps": 1e-05,
26
  "rope_scaling": {
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:015ad2fae9f683669e23746fec4260638970a906453a9a299f6619393c1d9254
3
- size 6865928
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e04d3d94d39a433e473b19e7552a78dc58aa616dd083d881912c3ec52aa9a985
3
+ size 6444800