Upload folder using huggingface_hub
Browse files- config.json +2 -6
- generation_config.json +1 -1
- model-00001-of-00003.safetensors +1 -1
- preprocessor_config.json +1 -1
config.json
CHANGED
|
@@ -10,7 +10,6 @@
|
|
| 10 |
"conf_conv_kernel_size": 5,
|
| 11 |
"conf_num_attention_heads": 8,
|
| 12 |
"conf_num_hidden_layers": 12,
|
| 13 |
-
"conf_positional_bias_size": 256,
|
| 14 |
"conf_reduction_factor": 4,
|
| 15 |
"conf_residual_weight": 0.5,
|
| 16 |
"gradient_clipping": 10000000000.0,
|
|
@@ -22,7 +21,6 @@
|
|
| 22 |
128,
|
| 23 |
32
|
| 24 |
],
|
| 25 |
-
"sscp_conv_eps": 0.001,
|
| 26 |
"sscp_conv_group_norm_eps": 0.001,
|
| 27 |
"sscp_conv_kernel_size": [
|
| 28 |
[
|
|
@@ -129,7 +127,6 @@
|
|
| 129 |
"altup_active_idx": 0,
|
| 130 |
"altup_coef_clip": 120.0,
|
| 131 |
"altup_correct_scale": true,
|
| 132 |
-
"altup_lr_multiplier": 1.0,
|
| 133 |
"altup_num_inputs": 4,
|
| 134 |
"attention_bias": false,
|
| 135 |
"attention_dropout": 0.0,
|
|
@@ -210,7 +207,6 @@
|
|
| 210 |
"num_hidden_layers": 30,
|
| 211 |
"num_key_value_heads": 2,
|
| 212 |
"num_kv_shared_layers": 10,
|
| 213 |
-
"query_pre_attn_scalar": 256,
|
| 214 |
"rms_norm_eps": 1e-06,
|
| 215 |
"rope_local_base_freq": 10000.0,
|
| 216 |
"rope_scaling": null,
|
|
@@ -222,11 +218,11 @@
|
|
| 222 |
"vocab_size_per_layer_input": 262144
|
| 223 |
},
|
| 224 |
"torch_dtype": "bfloat16",
|
| 225 |
-
"transformers_version": "4.53.
|
| 226 |
"unsloth_fixed": true,
|
| 227 |
"vision_config": {
|
| 228 |
"architecture": "mobilenetv5_300m_enc",
|
| 229 |
-
"do_pooling":
|
| 230 |
"hidden_size": 2048,
|
| 231 |
"initializer_range": 0.02,
|
| 232 |
"label_names": [
|
|
|
|
| 10 |
"conf_conv_kernel_size": 5,
|
| 11 |
"conf_num_attention_heads": 8,
|
| 12 |
"conf_num_hidden_layers": 12,
|
|
|
|
| 13 |
"conf_reduction_factor": 4,
|
| 14 |
"conf_residual_weight": 0.5,
|
| 15 |
"gradient_clipping": 10000000000.0,
|
|
|
|
| 21 |
128,
|
| 22 |
32
|
| 23 |
],
|
|
|
|
| 24 |
"sscp_conv_group_norm_eps": 0.001,
|
| 25 |
"sscp_conv_kernel_size": [
|
| 26 |
[
|
|
|
|
| 127 |
"altup_active_idx": 0,
|
| 128 |
"altup_coef_clip": 120.0,
|
| 129 |
"altup_correct_scale": true,
|
|
|
|
| 130 |
"altup_num_inputs": 4,
|
| 131 |
"attention_bias": false,
|
| 132 |
"attention_dropout": 0.0,
|
|
|
|
| 207 |
"num_hidden_layers": 30,
|
| 208 |
"num_key_value_heads": 2,
|
| 209 |
"num_kv_shared_layers": 10,
|
|
|
|
| 210 |
"rms_norm_eps": 1e-06,
|
| 211 |
"rope_local_base_freq": 10000.0,
|
| 212 |
"rope_scaling": null,
|
|
|
|
| 218 |
"vocab_size_per_layer_input": 262144
|
| 219 |
},
|
| 220 |
"torch_dtype": "bfloat16",
|
| 221 |
+
"transformers_version": "4.53.1",
|
| 222 |
"unsloth_fixed": true,
|
| 223 |
"vision_config": {
|
| 224 |
"architecture": "mobilenetv5_300m_enc",
|
| 225 |
+
"do_pooling": false,
|
| 226 |
"hidden_size": 2048,
|
| 227 |
"initializer_range": 0.02,
|
| 228 |
"label_names": [
|
generation_config.json
CHANGED
|
@@ -6,5 +6,5 @@
|
|
| 6 |
"pad_token_id": 0,
|
| 7 |
"top_k": 64,
|
| 8 |
"top_p": 0.95,
|
| 9 |
-
"transformers_version": "4.53.
|
| 10 |
}
|
|
|
|
| 6 |
"pad_token_id": 0,
|
| 7 |
"top_k": 64,
|
| 8 |
"top_p": 0.95,
|
| 9 |
+
"transformers_version": "4.53.1"
|
| 10 |
}
|
model-00001-of-00003.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 2650152952
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:dcb67227b2b0f92e9c47eed6cad816fcc226a6b0cf9cd8f6bd1e3b834419ef70
|
| 3 |
size 2650152952
|
preprocessor_config.json
CHANGED
|
@@ -41,7 +41,7 @@
|
|
| 41 |
"processor_class": "Gemma3nProcessor",
|
| 42 |
"resample": 2,
|
| 43 |
"rescale_factor": 0.00392156862745098,
|
| 44 |
-
"return_attention_mask":
|
| 45 |
"return_tensors": null,
|
| 46 |
"sampling_rate": 16000,
|
| 47 |
"size": {
|
|
|
|
| 41 |
"processor_class": "Gemma3nProcessor",
|
| 42 |
"resample": 2,
|
| 43 |
"rescale_factor": 0.00392156862745098,
|
| 44 |
+
"return_attention_mask": true,
|
| 45 |
"return_tensors": null,
|
| 46 |
"sampling_rate": 16000,
|
| 47 |
"size": {
|