mlboydaisuke commited on
Commit
0c8ae9b
·
verified ·
1 Parent(s): a5ca589

upload model_config.json

Browse files
Files changed (1) hide show
  1. model_config.json +58 -0
model_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_name": "functiongemma-270m",
3
+ "architecture": "gemma3",
4
+ "hidden_size": 640,
5
+ "num_hidden_layers": 18,
6
+ "num_layers": 18,
7
+ "num_attention_heads": 4,
8
+ "num_key_value_heads": 1,
9
+ "head_dim": 256,
10
+ "intermediate_size": 2048,
11
+ "vocab_size": 262144,
12
+ "context_length": 2048,
13
+ "sliding_window": 512,
14
+ "sliding_window_pattern": 6,
15
+ "layer_types": [
16
+ "sliding_attention",
17
+ "sliding_attention",
18
+ "sliding_attention",
19
+ "sliding_attention",
20
+ "sliding_attention",
21
+ "full_attention",
22
+ "sliding_attention",
23
+ "sliding_attention",
24
+ "sliding_attention",
25
+ "sliding_attention",
26
+ "sliding_attention",
27
+ "full_attention",
28
+ "sliding_attention",
29
+ "sliding_attention",
30
+ "sliding_attention",
31
+ "sliding_attention",
32
+ "sliding_attention",
33
+ "full_attention"
34
+ ],
35
+ "embed_scale": 25.298221281347036,
36
+ "rope_theta_global": 1000000.0,
37
+ "rope_theta_local": 10000.0,
38
+ "query_pre_attn_scalar": 256.0,
39
+ "rms_norm_eps": 1e-06,
40
+ "bos_token_id": 2,
41
+ "eos_token_id": [
42
+ 1,
43
+ 50
44
+ ],
45
+ "tie_word_embeddings": true,
46
+ "final_logit_softcapping": 0.0,
47
+ "parts": {
48
+ "model": "model.mlpackage"
49
+ },
50
+ "quantization": "fp16",
51
+ "compute_units": "CPU_AND_NE",
52
+ "tokenizer_repo": "google/functiongemma-270m-it",
53
+ "chat_format": "functiongemma",
54
+ "function_call_markers": {
55
+ "start": "<start_function_call>",
56
+ "end": "<end_function_call>"
57
+ }
58
+ }