{ "architectures": [ "JetNemotronForCausalLM" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_jet_nemotron.JetNemotronConfig", "AutoModelForCausalLM": "modeling_jet_nemotron.JetNemotronForCausalLM" }, "bos_token_id": 151643, "efficient_attention_config": { "jet": { "conv_size": 4, "dconv_generator_reduction": 8, "dconv_implementation": "triton", "expand_v": 2, "head_dim": 128, "mode": "chunk", "norm_eps": "1e-5", "num_heads": 16 }, "swa": { "window_size": 2048 } }, "eos_token_id": 151643, "hidden_act": "silu", "hidden_size": 2048, "initializer_range": 0.02, "intermediate_size": 11008, "layer_types": [ "jet", "jet", "jet", "jet", "jet", "swa", "jet", "jet", "jet", "jet", "jet", "jet", "jet", "jet", "jet", "jet", "swa", "attn", "jet", "swa", "attn", "swa", "swa", "jet", "jet", "swa", "jet", "swa", "jet", "jet", "jet", "jet", "attn", "jet", "jet", "jet" ], "max_position_embeddings": 32768, "max_window_layers": 36, "model_type": "jet_nemotron", "num_attention_heads": 16, "num_hidden_layers": 36, "num_key_value_heads": 2, "rms_norm_eps": 1e-06, "rope_scaling": null, "rope_theta": 1000000.0, "tie_word_embeddings": true, "torch_dtype": "bfloat16", "transformers_version": "4.51.3", "use_cache": true, "use_mrope": false, "vocab_size": 151936 }