mjschock commited on
Commit
6ee5a33
1 Parent(s): d182f09

Upload config

Browse files
Files changed (1) hide show
  1. configuration_mobilevlm.py +2 -2
configuration_mobilevlm.py CHANGED
@@ -78,7 +78,7 @@ class MobileVLMConfig(PretrainedConfig):
78
  rope_scaling: Optional[float] = config["rope_scaling"],
79
  rope_theta: Optional[float] = config["rope_theta"],
80
  tie_word_embeddings: Optional[bool] = config["tie_word_embeddings"],
81
- # torch_dtype: Optional[str] = config["torch_dtype"],
82
  transformers_version: Optional[str] = config["transformers_version"],
83
  tune_mm_mlp_adapter: Optional[bool] = config["tune_mm_mlp_adapter"],
84
  use_cache: Optional[bool] = config["use_cache"],
@@ -117,7 +117,7 @@ class MobileVLMConfig(PretrainedConfig):
117
  self.rope_scaling = rope_scaling
118
  self.rope_theta = rope_theta
119
  self.tie_word_embeddings = tie_word_embeddings
120
- # self.torch_dtype = torch_dtype
121
  self.transformers_version = transformers_version
122
  self.tune_mm_mlp_adapter = tune_mm_mlp_adapter
123
  self.use_cache = use_cache
 
78
  rope_scaling: Optional[float] = config["rope_scaling"],
79
  rope_theta: Optional[float] = config["rope_theta"],
80
  tie_word_embeddings: Optional[bool] = config["tie_word_embeddings"],
81
+ torch_dtype: Optional[str] = config["torch_dtype"],
82
  transformers_version: Optional[str] = config["transformers_version"],
83
  tune_mm_mlp_adapter: Optional[bool] = config["tune_mm_mlp_adapter"],
84
  use_cache: Optional[bool] = config["use_cache"],
 
117
  self.rope_scaling = rope_scaling
118
  self.rope_theta = rope_theta
119
  self.tie_word_embeddings = tie_word_embeddings
120
+ self.torch_dtype = torch_dtype
121
  self.transformers_version = transformers_version
122
  self.tune_mm_mlp_adapter = tune_mm_mlp_adapter
123
  self.use_cache = use_cache