{ "_name_or_path": "databricks/dbrx-instruct", "architectures": [ "DbrxForCausalLM" ], "attn_config": { "clip_qkv": 8, "kv_n_heads": 8, "model_type": "", "rope_theta": 500000 }, "d_model": 6144, "emb_pdrop": 0.0, "ffn_config": { "ffn_hidden_size": 10752, "model_type": "", "moe_jitter_eps": 0, "moe_loss_weight": 0.05, "moe_num_experts": 16, "moe_top_k": 4 }, "initializer_range": 0.02, "max_seq_len": 32768, "model_type": "dbrx", "n_heads": 48, "n_layers": 40, "output_router_logits": false, "quantization_config": { "activation_scheme": "static", "ignored_layers": [ "transformer.blocks.26.ffn.router.layer", "transformer.blocks.1.ffn.router.layer", "transformer.blocks.32.ffn.router.layer", "transformer.blocks.35.ffn.router.layer", "transformer.blocks.16.ffn.router.layer", "transformer.blocks.15.ffn.router.layer", "transformer.blocks.10.ffn.router.layer", "transformer.blocks.18.ffn.router.layer", "transformer.blocks.34.ffn.router.layer", "transformer.blocks.38.ffn.router.layer", "transformer.blocks.13.ffn.router.layer", "transformer.blocks.29.ffn.router.layer", "transformer.blocks.3.ffn.router.layer", "transformer.blocks.33.ffn.router.layer", "transformer.blocks.21.ffn.router.layer", "transformer.blocks.19.ffn.router.layer", "transformer.blocks.8.ffn.router.layer", "transformer.blocks.0.ffn.router.layer", "transformer.blocks.37.ffn.router.layer", "transformer.blocks.39.ffn.router.layer", "transformer.blocks.4.ffn.router.layer", "transformer.blocks.28.ffn.router.layer", "transformer.blocks.7.ffn.router.layer", "transformer.blocks.5.ffn.router.layer", "transformer.blocks.9.ffn.router.layer", "transformer.blocks.24.ffn.router.layer", "transformer.blocks.25.ffn.router.layer", "transformer.blocks.20.ffn.router.layer", "transformer.blocks.27.ffn.router.layer", "transformer.blocks.2.ffn.router.layer", "transformer.blocks.31.ffn.router.layer", "transformer.blocks.6.ffn.router.layer", "transformer.blocks.30.ffn.router.layer", "transformer.blocks.36.ffn.router.layer", "transformer.blocks.12.ffn.router.layer", "transformer.blocks.14.ffn.router.layer", "transformer.blocks.23.ffn.router.layer", "transformer.blocks.22.ffn.router.layer", "transformer.blocks.11.ffn.router.layer", "lm_head", "transformer.blocks.17.ffn.router.layer" ], "quant_method": "fp8" }, "resid_pdrop": 0.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.42.1", "use_cache": true, "vocab_size": 100352 }