IlyasMoutawwakil HF staff commited on
Commit
dbfb103
1 Parent(s): af2fcf6

Upload save/path/special_config_name2.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. save/path/special_config_name2.json +109 -0
save/path/special_config_name2.json ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "backend": {
3
+ "name": "pytorch",
4
+ "version": "2.1.0+rocm5.6",
5
+ "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
6
+ "model": "TheBloke/Mistral-7B-Instruct-v0.1-AWQ",
7
+ "task": "text-generation",
8
+ "library": "transformers",
9
+ "device": "cuda",
10
+ "device_ids": "0",
11
+ "seed": 42,
12
+ "inter_op_num_threads": null,
13
+ "intra_op_num_threads": null,
14
+ "hub_kwargs": {
15
+ "revision": "main",
16
+ "force_download": false,
17
+ "local_files_only": false,
18
+ "trust_remote_code": false
19
+ },
20
+ "no_weights": true,
21
+ "device_map": null,
22
+ "torch_dtype": null,
23
+ "amp_autocast": false,
24
+ "amp_dtype": null,
25
+ "eval_mode": true,
26
+ "to_bettertransformer": false,
27
+ "low_cpu_mem_usage": null,
28
+ "attn_implementation": null,
29
+ "cache_implementation": null,
30
+ "torch_compile": false,
31
+ "torch_compile_config": {},
32
+ "quantization_scheme": "awq",
33
+ "quantization_config": {
34
+ "version": "exllama"
35
+ },
36
+ "deepspeed_inference": false,
37
+ "deepspeed_inference_config": {},
38
+ "peft_type": null,
39
+ "peft_config": {}
40
+ },
41
+ "launcher": {
42
+ "name": "process",
43
+ "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
44
+ "device_isolation": false,
45
+ "start_method": "spawn"
46
+ },
47
+ "benchmark": {
48
+ "name": "inference",
49
+ "_target_": "optimum_benchmark.benchmarks.inference.benchmark.InferenceBenchmark",
50
+ "duration": 10,
51
+ "warmup_runs": 10,
52
+ "input_shapes": {
53
+ "batch_size": 4,
54
+ "num_choices": 2,
55
+ "sequence_length": 128
56
+ },
57
+ "new_tokens": null,
58
+ "energy": false,
59
+ "memory": true,
60
+ "latency": true,
61
+ "forward_kwargs": {},
62
+ "generate_kwargs": {
63
+ "max_new_tokens": 128,
64
+ "min_new_tokens": 128
65
+ },
66
+ "call_kwargs": {}
67
+ },
68
+ "experiment_name": "awq-exllamav2",
69
+ "task": null,
70
+ "model": null,
71
+ "device": null,
72
+ "library": null,
73
+ "environment": {
74
+ "cpu": " AMD EPYC 7763 64-Core Processor",
75
+ "cpu_count": 128,
76
+ "cpu_ram_mb": 1082015.236096,
77
+ "system": "Linux",
78
+ "machine": "x86_64",
79
+ "platform": "Linux-5.15.0-84-generic-x86_64-with-glibc2.35",
80
+ "processor": "x86_64",
81
+ "python_version": "3.9.18",
82
+ "gpu": [
83
+ "AMD INSTINCT MI250 (MCM) OAM AC MBA",
84
+ "AMD INSTINCT MI250 (MCM) OAM AC MBA",
85
+ "AMD INSTINCT MI250 (MCM) OAM AC MBA",
86
+ "AMD INSTINCT MI250 (MCM) OAM AC MBA",
87
+ "AMD INSTINCT MI250 (MCM) OAM AC MBA",
88
+ "AMD INSTINCT MI250 (MCM) OAM AC MBA",
89
+ "AMD INSTINCT MI250 (MCM) OAM AC MBA",
90
+ "AMD INSTINCT MI250 (MCM) OAM AC MBA"
91
+ ],
92
+ "gpu_count": 8,
93
+ "gpu_vram_mb": 549621596160,
94
+ "optimum_benchmark_version": "0.2.0",
95
+ "optimum_benchmark_commit": "09f95ce7707eb32c7880f72cdd9e14e8b7554315",
96
+ "transformers_version": "4.37.2",
97
+ "transformers_commit": null,
98
+ "accelerate_version": "0.24.1",
99
+ "accelerate_commit": null,
100
+ "diffusers_version": "0.26.3",
101
+ "diffusers_commit": null,
102
+ "optimum_version": "1.14.1",
103
+ "optimum_commit": null,
104
+ "timm_version": "0.9.16",
105
+ "timm_commit": null,
106
+ "peft_version": "0.8.2",
107
+ "peft_commit": null
108
+ }
109
+ }