suayptalha commited on
Commit
bf53d71
·
verified ·
1 Parent(s): 6e31c2b

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -6,8 +6,8 @@
6
  "attention_dropout": 0.0,
7
  "attn_logit_soft_capping": 50.0,
8
  "auto_map": {
9
- "AutoConfig": "suayptalha/turkish-gemmoe-cosmos-base-router-tuned--configuration_gemma2moe.Gemma2MoeConfig",
10
- "AutoModelForCausalLM": "suayptalha/turkish-gemmoe-cosmos-base-router-tuned--modeling_gemma2moe.Gemma2MoeForCausalLM"
11
  },
12
  "bos_token_id": 2,
13
  "dtype": "bfloat16",
@@ -27,7 +27,7 @@
27
  "num_local_experts": 3,
28
  "output_router_logits": false,
29
  "pad_token_id": 0,
30
- "path": "suayptalha/turkish-gemmoe-cosmos-base-router-tuned",
31
  "query_pre_attn_scalar": 256,
32
  "rms_norm_eps": 1e-06,
33
  "rope_theta": 10000.0,
 
6
  "attention_dropout": 0.0,
7
  "attn_logit_soft_capping": 50.0,
8
  "auto_map": {
9
+ "AutoConfig": "suayptalha/Sungur-3x9B-Cosmos--configuration_gemma2moe.Gemma2MoeConfig",
10
+ "AutoModelForCausalLM": "suayptalha/Sungur-3x9B-Cosmos--modeling_gemma2moe.Gemma2MoeForCausalLM"
11
  },
12
  "bos_token_id": 2,
13
  "dtype": "bfloat16",
 
27
  "num_local_experts": 3,
28
  "output_router_logits": false,
29
  "pad_token_id": 0,
30
+ "path": "suayptalha/Sungur-3x9B-Cosmos",
31
  "query_pre_attn_scalar": 256,
32
  "rms_norm_eps": 1e-06,
33
  "rope_theta": 10000.0,