quazim commited on
Commit
1de1039
·
verified ·
1 Parent(s): 37788d9

Upload config

Browse files
Files changed (1) hide show
  1. config.json +1 -3
config.json CHANGED
@@ -19,13 +19,11 @@
19
  "decoder_layers": 4,
20
  "decoder_start_token_id": 50258,
21
  "dropout": 0.0,
22
- "dtype": "float32",
23
  "encoder_attention_heads": 20,
24
  "encoder_ffn_dim": 5120,
25
  "encoder_layerdrop": 0.0,
26
  "encoder_layers": 32,
27
  "eos_token_id": 50257,
28
- "forced_decoder_ids": null,
29
  "init_std": 0.02,
30
  "is_encoder_decoder": true,
31
  "mask_feature_length": 10,
@@ -42,7 +40,7 @@
42
  "num_mel_bins": 128,
43
  "pad_token_id": 50257,
44
  "scale_embedding": false,
45
- "torch_dtype": "float32",
46
  "transformers_version": "4.52.3",
47
  "use_cache": true,
48
  "use_weighted_layer_sum": false,
 
19
  "decoder_layers": 4,
20
  "decoder_start_token_id": 50258,
21
  "dropout": 0.0,
 
22
  "encoder_attention_heads": 20,
23
  "encoder_ffn_dim": 5120,
24
  "encoder_layerdrop": 0.0,
25
  "encoder_layers": 32,
26
  "eos_token_id": 50257,
 
27
  "init_std": 0.02,
28
  "is_encoder_decoder": true,
29
  "mask_feature_length": 10,
 
40
  "num_mel_bins": 128,
41
  "pad_token_id": 50257,
42
  "scale_embedding": false,
43
+ "torch_dtype": "float16",
44
  "transformers_version": "4.52.3",
45
  "use_cache": true,
46
  "use_weighted_layer_sum": false,