{ "activation_func": "swiglu", "activation_func_fp8_input_store": false, "add_bias_linear": false, "architectures": [ "ZayaForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "bias_activation_fusion": true, "bos_token_id": 2, "cca": true, "cca_num_q_heads": [ 2, 0 ], "dtype": "bfloat16", "eos_token_id": 1, "ffn_hidden_size_list": [ 0, 32 ], "gated_linear_unit": true, "hidden_size": 512, "kv_channels": 128, "lm_head_bias": false, "max_position_embeddings": 32768, "model_type": "zaya", "moe_router_topk": 1, "norm_epsilon": 1e-05, "num_attention_heads": 4, "num_hidden_layers": 2, "num_key_value_heads": 1, "num_query_groups_list": [ 1, 0 ], "pad_token_id": 0, "partial_rotary_factor": 0.5, "residual_in_fp32": false, "rope_scaling": false, "rope_theta": 1000000, "scale_residual_merge": true, "sliding_window": null, "tie_word_embeddings": true, "transformers_version": "4.57.1", "use_cache": true, "vocab_size": 262272, "zaya_layers": [ "a", 16 ], "zaya_mlp_expansion": [ 0, 8 ], "zaya_use_eda": true, "zaya_use_mod": true }