{ "_name_or_path": "mistralai/Mistral-7B-v0.1", "architectures": [ "ZettHypernet" ], "attention_dropout": 0.0, "auto_map": { "AutoConfig": "configuration_hypernet.ZettHypernetConfig", "AutoModel": "modeling_hypernet.ZettHypernet" }, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "hn_add_inter_token_attention": false, "hn_concat_last_hidden_state": false, "hn_embed_lang_id": true, "hn_embed_target_priors": false, "hn_embed_using_source_embeddings": true, "hn_hidden_size": 4096, "hn_inter_token_attention_bias_by_priors": true, "hn_inter_token_attention_bias_scaler": 1.0, "hn_intermediate_size": 8192, "hn_language_adapter_bottleneck_dim": 0, "hn_model_name_or_path": "roberta-base", "hn_model_type": "roberta", "hn_n_extra_tokens": 522, "hn_n_inter_token_blocks": 16, "hn_n_layers": 3, "hn_num_attention_heads": 32, "hn_predict_bias": true, "hn_rescale_embeddings": true, "hn_single_head": false, "hn_surface_maxlen": 15, "initializer_range": 0.02, "intermediate_size": 14336, "langs": [ "en", "ru", "de", "es", "fr", "it", "pt", "el", "ko", "fi", "id", "tr", "ar", "vi", "bg", "ca", "hi", "et", "bn", "ta", "ur", "sw", "te", "eu", "ht", "qu" ], "max_position_embeddings": 32768, "n_embd": 4096, "n_langs": 26, "name": "v7:mistral7b_multilingual:lw=0.5_long_length=15_alpha=0.1_26l_resume", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "original_vocab_size": 32000, "pad_token_id": 2, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "separate_out_embeddings": true, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.39.0.dev0", "use_cache": true, "use_unigram_bias": true, "vocab_size": 32896 }