{ "model_type": "lizard", "architectures": [ "LizardForCausalLM" ], "vocab_size": 24005, "d_model": 256, "n_heads": 8, "n_layers": 6, "max_length": 128, "pad_token_id": 0 }