File size: 389 Bytes
c7b0d5f
1
{"model": "mistral-community/Mixtral-8x22B-v0.1", "base_model": null, "revision": "ab1e8c1950cf359e2a25de9b274ab836adb6dbab", "precision": "bfloat16", "params": 140.621, "architectures": "MixtralForCausalLM", "weight_type": "Original", "status": "PENDING", "submitted_time": "2024-06-08T15:04:42Z", "model_type": "mixtral", "job_id": -1, "job_start_time": null, "use_chat_template": false}