DewiBrynJones commited on
Commit
85716f0
1 Parent(s): 723677d

Training in progress, step 1000

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "openai/whisper-large-v3",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
@@ -9,7 +9,7 @@
9
  "attention_dropout": 0.0,
10
  "begin_suppress_tokens": [
11
  220,
12
- 50257
13
  ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
@@ -17,7 +17,7 @@
17
  "decoder_attention_heads": 20,
18
  "decoder_ffn_dim": 5120,
19
  "decoder_layerdrop": 0.0,
20
- "decoder_layers": 32,
21
  "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
  "encoder_attention_heads": 20,
@@ -34,14 +34,13 @@
34
  "mask_time_length": 10,
35
  "mask_time_min_masks": 2,
36
  "mask_time_prob": 0.05,
37
- "max_length": 448,
38
  "max_source_positions": 1500,
39
  "max_target_positions": 448,
40
  "median_filter_width": 7,
41
  "model_type": "whisper",
42
  "num_hidden_layers": 32,
43
  "num_mel_bins": 128,
44
- "pad_token_id": 50256,
45
  "scale_embedding": false,
46
  "torch_dtype": "float32",
47
  "transformers_version": "4.44.0",
 
1
  {
2
+ "_name_or_path": "openai/whisper-large-v3-turbo",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
 
9
  "attention_dropout": 0.0,
10
  "begin_suppress_tokens": [
11
  220,
12
+ 50256
13
  ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
 
17
  "decoder_attention_heads": 20,
18
  "decoder_ffn_dim": 5120,
19
  "decoder_layerdrop": 0.0,
20
+ "decoder_layers": 4,
21
  "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
  "encoder_attention_heads": 20,
 
34
  "mask_time_length": 10,
35
  "mask_time_min_masks": 2,
36
  "mask_time_prob": 0.05,
 
37
  "max_source_positions": 1500,
38
  "max_target_positions": 448,
39
  "median_filter_width": 7,
40
  "model_type": "whisper",
41
  "num_hidden_layers": 32,
42
  "num_mel_bins": 128,
43
+ "pad_token_id": 50257,
44
  "scale_embedding": false,
45
  "torch_dtype": "float32",
46
  "transformers_version": "4.44.0",
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a9b709588929882bc7f2624ed86a69aa052cd0560af188208db1ee8e66cdf9b7
3
+ size 3235581408
runs/Oct10_16-21-13_4fc0a42a8f03/events.out.tfevents.1728574116.4fc0a42a8f03.181.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc3ead6bc4adc4aa56fd2d17f56c7701bfcaf812ee5b72702f5be86dc97628c9
3
+ size 14666
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b8359142f7dafd586e2338214dc70deac6b5d4f462f6cc3d5f66be79b6f344fa
3
  size 5496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:296db6290032b52652011c63f427ccc4392b1b4cdf871dfa9310056225a10ffa
3
  size 5496