Upload Gemma3ForConditionalGeneration
#10
by albertvillanova HF Staff - opened
- config.json +3 -3
- model.safetensors +2 -2
- preprocessor_config.json +2 -2
config.json
CHANGED
|
@@ -23,7 +23,7 @@
|
|
| 23 |
"hidden_activation": "gelu_pytorch_tanh",
|
| 24 |
"hidden_size": 16,
|
| 25 |
"initializer_range": 0.02,
|
| 26 |
-
"intermediate_size":
|
| 27 |
"layer_types": [
|
| 28 |
"sliding_attention",
|
| 29 |
"sliding_attention"
|
|
@@ -52,8 +52,8 @@
|
|
| 52 |
"embed_dim": 64,
|
| 53 |
"hidden_act": "gelu_pytorch_tanh",
|
| 54 |
"hidden_size": 16,
|
| 55 |
-
"image_size":
|
| 56 |
-
"intermediate_size":
|
| 57 |
"layer_norm_eps": 1e-06,
|
| 58 |
"model_type": "siglip_vision_model",
|
| 59 |
"num_attention_heads": 4,
|
|
|
|
| 23 |
"hidden_activation": "gelu_pytorch_tanh",
|
| 24 |
"hidden_size": 16,
|
| 25 |
"initializer_range": 0.02,
|
| 26 |
+
"intermediate_size": 32,
|
| 27 |
"layer_types": [
|
| 28 |
"sliding_attention",
|
| 29 |
"sliding_attention"
|
|
|
|
| 52 |
"embed_dim": 64,
|
| 53 |
"hidden_act": "gelu_pytorch_tanh",
|
| 54 |
"hidden_size": 16,
|
| 55 |
+
"image_size": 224,
|
| 56 |
+
"intermediate_size": 32,
|
| 57 |
"layer_norm_eps": 1e-06,
|
| 58 |
"model_type": "siglip_vision_model",
|
| 59 |
"num_attention_heads": 4,
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:866c91b1e2d7f40146f4a679ff31dc7cf900afdf2fc8a880c94bbf68392fb952
|
| 3 |
+
size 8640680
|
preprocessor_config.json
CHANGED
|
@@ -23,7 +23,7 @@
|
|
| 23 |
"resample": 2,
|
| 24 |
"rescale_factor": 0.00392156862745098,
|
| 25 |
"size": {
|
| 26 |
-
"height":
|
| 27 |
-
"width":
|
| 28 |
}
|
| 29 |
}
|
|
|
|
| 23 |
"resample": 2,
|
| 24 |
"rescale_factor": 0.00392156862745098,
|
| 25 |
"size": {
|
| 26 |
+
"height": 224,
|
| 27 |
+
"width": 224
|
| 28 |
}
|
| 29 |
}
|