valkiscute commited on
Commit
0a52acb
·
verified ·
1 Parent(s): 2d869f2

Remove extra folder

Browse files
Muyan-TTS/config.json DELETED
@@ -1,40 +0,0 @@
1
- {
2
- "_name_or_path": "saves/llama3.2-3b/full/pretrain-10wh/checkpoint-110000",
3
- "architectures": [
4
- "LlamaForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 128000,
9
- "eos_token_id": [
10
- 128001,
11
- 128008,
12
- 128009
13
- ],
14
- "head_dim": 128,
15
- "hidden_act": "silu",
16
- "hidden_size": 3072,
17
- "initializer_range": 0.02,
18
- "intermediate_size": 8192,
19
- "max_position_embeddings": 131072,
20
- "mlp_bias": false,
21
- "model_type": "llama",
22
- "num_attention_heads": 24,
23
- "num_hidden_layers": 28,
24
- "num_key_value_heads": 8,
25
- "pretraining_tp": 1,
26
- "rms_norm_eps": 1e-05,
27
- "rope_scaling": {
28
- "factor": 32.0,
29
- "high_freq_factor": 4.0,
30
- "low_freq_factor": 1.0,
31
- "original_max_position_embeddings": 8192,
32
- "rope_type": "llama3"
33
- },
34
- "rope_theta": 500000.0,
35
- "tie_word_embeddings": true,
36
- "torch_dtype": "bfloat16",
37
- "transformers_version": "4.46.1",
38
- "use_cache": false,
39
- "vocab_size": 129344
40
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Muyan-TTS/generation_config.json DELETED
@@ -1,12 +0,0 @@
1
- {
2
- "bos_token_id": 128000,
3
- "do_sample": true,
4
- "eos_token_id": [
5
- 128001,
6
- 128008,
7
- 128009
8
- ],
9
- "temperature": 0.6,
10
- "top_p": 0.9,
11
- "transformers_version": "4.46.1"
12
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
Muyan-TTS/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:eca8bc3e195817e3f470a839aa8cbcc4520da6e245700f51a86279ea90946e5a
3
- size 4972483768
 
 
 
 
Muyan-TTS/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1466b06fdbd6a9741046987bf5aef5cb94f85646085e48bbdcc267a93640ff9f
3
- size 2254419664
 
 
 
 
Muyan-TTS/model.safetensors.index.json DELETED
@@ -1,262 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 7226873856
4
- },
5
- "weight_map": {
6
- "lm_head.weight": "model-00002-of-00002.safetensors",
7
- "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
- "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
- "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
- "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
- "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
- "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
- "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
- "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
- "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
- "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
- "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
- "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
- "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
- "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
- "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
- "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
- "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
- "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
- "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
- "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
- "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
- "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
- "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
- "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
- "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
- "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
- "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
- "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
- "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
- "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
- "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
- "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
- "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
- "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
- "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
- "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
- "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
- "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
- "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
- "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
- "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
- "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
- "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
- "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
- "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
- "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
- "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
- "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
- "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
- "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
- "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
- "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
- "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
- "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
- "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
- "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
- "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
- "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
- "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
- "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
- "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
- "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
- "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
- "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
- "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
- "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
- "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
- "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
- "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
- "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
- "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
- "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
- "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
- "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
- "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
- "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
99
- "model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
100
- "model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
101
- "model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
102
- "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
103
- "model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
- "model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
- "model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
106
- "model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
107
- "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
108
- "model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
109
- "model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
110
- "model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
111
- "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
112
- "model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
113
- "model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
114
- "model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
115
- "model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
116
- "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
- "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
126
- "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
127
- "model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
128
- "model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
129
- "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
130
- "model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
131
- "model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
132
- "model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
133
- "model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
134
- "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
135
- "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
136
- "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
137
- "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
138
- "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
139
- "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
140
- "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
141
- "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
142
- "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
143
- "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
144
- "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
145
- "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
146
- "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
147
- "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
148
- "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
149
- "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
150
- "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
151
- "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
152
- "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
- "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
- "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
- "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
- "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
- "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
158
- "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
159
- "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
160
- "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
161
- "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
162
- "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
163
- "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
164
- "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
165
- "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
166
- "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
167
- "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
168
- "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
169
- "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
170
- "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
171
- "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
172
- "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
173
- "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
174
- "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
175
- "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
176
- "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
177
- "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
178
- "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
179
- "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
180
- "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
181
- "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
182
- "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
183
- "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
184
- "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
185
- "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
186
- "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
187
- "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
188
- "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
189
- "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
190
- "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
191
- "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
192
- "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
193
- "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
- "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
- "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
196
- "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
197
- "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
198
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
199
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
200
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
201
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
202
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
203
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
204
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
205
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
206
- "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
207
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
208
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
209
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
210
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
211
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
212
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
213
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
214
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
215
- "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
- "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
- "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
- "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
- "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
225
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
226
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
227
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
228
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
229
- "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
230
- "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
231
- "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
232
- "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
233
- "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
234
- "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
235
- "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
236
- "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
237
- "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
238
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
239
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
240
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
241
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
242
- "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
243
- "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
244
- "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
245
- "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
246
- "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
247
- "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
248
- "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
249
- "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
250
- "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
251
- "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
252
- "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
253
- "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
254
- "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
255
- "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
256
- "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
257
- "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
258
- "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
259
- "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
- "model.norm.weight": "model-00002-of-00002.safetensors"
261
- }
262
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Muyan-TTS/sovits.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a926f836bbee2eb064b5249313d0ee79c71ce053f2e32db409e343dcdf4df9d0
3
- size 85007266
 
 
 
 
Muyan-TTS/special_tokens_map.json DELETED
@@ -1,1050 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<|audio_token_0|>",
4
- "<|audio_token_1|>",
5
- "<|audio_token_2|>",
6
- "<|audio_token_3|>",
7
- "<|audio_token_4|>",
8
- "<|audio_token_5|>",
9
- "<|audio_token_6|>",
10
- "<|audio_token_7|>",
11
- "<|audio_token_8|>",
12
- "<|audio_token_9|>",
13
- "<|audio_token_10|>",
14
- "<|audio_token_11|>",
15
- "<|audio_token_12|>",
16
- "<|audio_token_13|>",
17
- "<|audio_token_14|>",
18
- "<|audio_token_15|>",
19
- "<|audio_token_16|>",
20
- "<|audio_token_17|>",
21
- "<|audio_token_18|>",
22
- "<|audio_token_19|>",
23
- "<|audio_token_20|>",
24
- "<|audio_token_21|>",
25
- "<|audio_token_22|>",
26
- "<|audio_token_23|>",
27
- "<|audio_token_24|>",
28
- "<|audio_token_25|>",
29
- "<|audio_token_26|>",
30
- "<|audio_token_27|>",
31
- "<|audio_token_28|>",
32
- "<|audio_token_29|>",
33
- "<|audio_token_30|>",
34
- "<|audio_token_31|>",
35
- "<|audio_token_32|>",
36
- "<|audio_token_33|>",
37
- "<|audio_token_34|>",
38
- "<|audio_token_35|>",
39
- "<|audio_token_36|>",
40
- "<|audio_token_37|>",
41
- "<|audio_token_38|>",
42
- "<|audio_token_39|>",
43
- "<|audio_token_40|>",
44
- "<|audio_token_41|>",
45
- "<|audio_token_42|>",
46
- "<|audio_token_43|>",
47
- "<|audio_token_44|>",
48
- "<|audio_token_45|>",
49
- "<|audio_token_46|>",
50
- "<|audio_token_47|>",
51
- "<|audio_token_48|>",
52
- "<|audio_token_49|>",
53
- "<|audio_token_50|>",
54
- "<|audio_token_51|>",
55
- "<|audio_token_52|>",
56
- "<|audio_token_53|>",
57
- "<|audio_token_54|>",
58
- "<|audio_token_55|>",
59
- "<|audio_token_56|>",
60
- "<|audio_token_57|>",
61
- "<|audio_token_58|>",
62
- "<|audio_token_59|>",
63
- "<|audio_token_60|>",
64
- "<|audio_token_61|>",
65
- "<|audio_token_62|>",
66
- "<|audio_token_63|>",
67
- "<|audio_token_64|>",
68
- "<|audio_token_65|>",
69
- "<|audio_token_66|>",
70
- "<|audio_token_67|>",
71
- "<|audio_token_68|>",
72
- "<|audio_token_69|>",
73
- "<|audio_token_70|>",
74
- "<|audio_token_71|>",
75
- "<|audio_token_72|>",
76
- "<|audio_token_73|>",
77
- "<|audio_token_74|>",
78
- "<|audio_token_75|>",
79
- "<|audio_token_76|>",
80
- "<|audio_token_77|>",
81
- "<|audio_token_78|>",
82
- "<|audio_token_79|>",
83
- "<|audio_token_80|>",
84
- "<|audio_token_81|>",
85
- "<|audio_token_82|>",
86
- "<|audio_token_83|>",
87
- "<|audio_token_84|>",
88
- "<|audio_token_85|>",
89
- "<|audio_token_86|>",
90
- "<|audio_token_87|>",
91
- "<|audio_token_88|>",
92
- "<|audio_token_89|>",
93
- "<|audio_token_90|>",
94
- "<|audio_token_91|>",
95
- "<|audio_token_92|>",
96
- "<|audio_token_93|>",
97
- "<|audio_token_94|>",
98
- "<|audio_token_95|>",
99
- "<|audio_token_96|>",
100
- "<|audio_token_97|>",
101
- "<|audio_token_98|>",
102
- "<|audio_token_99|>",
103
- "<|audio_token_100|>",
104
- "<|audio_token_101|>",
105
- "<|audio_token_102|>",
106
- "<|audio_token_103|>",
107
- "<|audio_token_104|>",
108
- "<|audio_token_105|>",
109
- "<|audio_token_106|>",
110
- "<|audio_token_107|>",
111
- "<|audio_token_108|>",
112
- "<|audio_token_109|>",
113
- "<|audio_token_110|>",
114
- "<|audio_token_111|>",
115
- "<|audio_token_112|>",
116
- "<|audio_token_113|>",
117
- "<|audio_token_114|>",
118
- "<|audio_token_115|>",
119
- "<|audio_token_116|>",
120
- "<|audio_token_117|>",
121
- "<|audio_token_118|>",
122
- "<|audio_token_119|>",
123
- "<|audio_token_120|>",
124
- "<|audio_token_121|>",
125
- "<|audio_token_122|>",
126
- "<|audio_token_123|>",
127
- "<|audio_token_124|>",
128
- "<|audio_token_125|>",
129
- "<|audio_token_126|>",
130
- "<|audio_token_127|>",
131
- "<|audio_token_128|>",
132
- "<|audio_token_129|>",
133
- "<|audio_token_130|>",
134
- "<|audio_token_131|>",
135
- "<|audio_token_132|>",
136
- "<|audio_token_133|>",
137
- "<|audio_token_134|>",
138
- "<|audio_token_135|>",
139
- "<|audio_token_136|>",
140
- "<|audio_token_137|>",
141
- "<|audio_token_138|>",
142
- "<|audio_token_139|>",
143
- "<|audio_token_140|>",
144
- "<|audio_token_141|>",
145
- "<|audio_token_142|>",
146
- "<|audio_token_143|>",
147
- "<|audio_token_144|>",
148
- "<|audio_token_145|>",
149
- "<|audio_token_146|>",
150
- "<|audio_token_147|>",
151
- "<|audio_token_148|>",
152
- "<|audio_token_149|>",
153
- "<|audio_token_150|>",
154
- "<|audio_token_151|>",
155
- "<|audio_token_152|>",
156
- "<|audio_token_153|>",
157
- "<|audio_token_154|>",
158
- "<|audio_token_155|>",
159
- "<|audio_token_156|>",
160
- "<|audio_token_157|>",
161
- "<|audio_token_158|>",
162
- "<|audio_token_159|>",
163
- "<|audio_token_160|>",
164
- "<|audio_token_161|>",
165
- "<|audio_token_162|>",
166
- "<|audio_token_163|>",
167
- "<|audio_token_164|>",
168
- "<|audio_token_165|>",
169
- "<|audio_token_166|>",
170
- "<|audio_token_167|>",
171
- "<|audio_token_168|>",
172
- "<|audio_token_169|>",
173
- "<|audio_token_170|>",
174
- "<|audio_token_171|>",
175
- "<|audio_token_172|>",
176
- "<|audio_token_173|>",
177
- "<|audio_token_174|>",
178
- "<|audio_token_175|>",
179
- "<|audio_token_176|>",
180
- "<|audio_token_177|>",
181
- "<|audio_token_178|>",
182
- "<|audio_token_179|>",
183
- "<|audio_token_180|>",
184
- "<|audio_token_181|>",
185
- "<|audio_token_182|>",
186
- "<|audio_token_183|>",
187
- "<|audio_token_184|>",
188
- "<|audio_token_185|>",
189
- "<|audio_token_186|>",
190
- "<|audio_token_187|>",
191
- "<|audio_token_188|>",
192
- "<|audio_token_189|>",
193
- "<|audio_token_190|>",
194
- "<|audio_token_191|>",
195
- "<|audio_token_192|>",
196
- "<|audio_token_193|>",
197
- "<|audio_token_194|>",
198
- "<|audio_token_195|>",
199
- "<|audio_token_196|>",
200
- "<|audio_token_197|>",
201
- "<|audio_token_198|>",
202
- "<|audio_token_199|>",
203
- "<|audio_token_200|>",
204
- "<|audio_token_201|>",
205
- "<|audio_token_202|>",
206
- "<|audio_token_203|>",
207
- "<|audio_token_204|>",
208
- "<|audio_token_205|>",
209
- "<|audio_token_206|>",
210
- "<|audio_token_207|>",
211
- "<|audio_token_208|>",
212
- "<|audio_token_209|>",
213
- "<|audio_token_210|>",
214
- "<|audio_token_211|>",
215
- "<|audio_token_212|>",
216
- "<|audio_token_213|>",
217
- "<|audio_token_214|>",
218
- "<|audio_token_215|>",
219
- "<|audio_token_216|>",
220
- "<|audio_token_217|>",
221
- "<|audio_token_218|>",
222
- "<|audio_token_219|>",
223
- "<|audio_token_220|>",
224
- "<|audio_token_221|>",
225
- "<|audio_token_222|>",
226
- "<|audio_token_223|>",
227
- "<|audio_token_224|>",
228
- "<|audio_token_225|>",
229
- "<|audio_token_226|>",
230
- "<|audio_token_227|>",
231
- "<|audio_token_228|>",
232
- "<|audio_token_229|>",
233
- "<|audio_token_230|>",
234
- "<|audio_token_231|>",
235
- "<|audio_token_232|>",
236
- "<|audio_token_233|>",
237
- "<|audio_token_234|>",
238
- "<|audio_token_235|>",
239
- "<|audio_token_236|>",
240
- "<|audio_token_237|>",
241
- "<|audio_token_238|>",
242
- "<|audio_token_239|>",
243
- "<|audio_token_240|>",
244
- "<|audio_token_241|>",
245
- "<|audio_token_242|>",
246
- "<|audio_token_243|>",
247
- "<|audio_token_244|>",
248
- "<|audio_token_245|>",
249
- "<|audio_token_246|>",
250
- "<|audio_token_247|>",
251
- "<|audio_token_248|>",
252
- "<|audio_token_249|>",
253
- "<|audio_token_250|>",
254
- "<|audio_token_251|>",
255
- "<|audio_token_252|>",
256
- "<|audio_token_253|>",
257
- "<|audio_token_254|>",
258
- "<|audio_token_255|>",
259
- "<|audio_token_256|>",
260
- "<|audio_token_257|>",
261
- "<|audio_token_258|>",
262
- "<|audio_token_259|>",
263
- "<|audio_token_260|>",
264
- "<|audio_token_261|>",
265
- "<|audio_token_262|>",
266
- "<|audio_token_263|>",
267
- "<|audio_token_264|>",
268
- "<|audio_token_265|>",
269
- "<|audio_token_266|>",
270
- "<|audio_token_267|>",
271
- "<|audio_token_268|>",
272
- "<|audio_token_269|>",
273
- "<|audio_token_270|>",
274
- "<|audio_token_271|>",
275
- "<|audio_token_272|>",
276
- "<|audio_token_273|>",
277
- "<|audio_token_274|>",
278
- "<|audio_token_275|>",
279
- "<|audio_token_276|>",
280
- "<|audio_token_277|>",
281
- "<|audio_token_278|>",
282
- "<|audio_token_279|>",
283
- "<|audio_token_280|>",
284
- "<|audio_token_281|>",
285
- "<|audio_token_282|>",
286
- "<|audio_token_283|>",
287
- "<|audio_token_284|>",
288
- "<|audio_token_285|>",
289
- "<|audio_token_286|>",
290
- "<|audio_token_287|>",
291
- "<|audio_token_288|>",
292
- "<|audio_token_289|>",
293
- "<|audio_token_290|>",
294
- "<|audio_token_291|>",
295
- "<|audio_token_292|>",
296
- "<|audio_token_293|>",
297
- "<|audio_token_294|>",
298
- "<|audio_token_295|>",
299
- "<|audio_token_296|>",
300
- "<|audio_token_297|>",
301
- "<|audio_token_298|>",
302
- "<|audio_token_299|>",
303
- "<|audio_token_300|>",
304
- "<|audio_token_301|>",
305
- "<|audio_token_302|>",
306
- "<|audio_token_303|>",
307
- "<|audio_token_304|>",
308
- "<|audio_token_305|>",
309
- "<|audio_token_306|>",
310
- "<|audio_token_307|>",
311
- "<|audio_token_308|>",
312
- "<|audio_token_309|>",
313
- "<|audio_token_310|>",
314
- "<|audio_token_311|>",
315
- "<|audio_token_312|>",
316
- "<|audio_token_313|>",
317
- "<|audio_token_314|>",
318
- "<|audio_token_315|>",
319
- "<|audio_token_316|>",
320
- "<|audio_token_317|>",
321
- "<|audio_token_318|>",
322
- "<|audio_token_319|>",
323
- "<|audio_token_320|>",
324
- "<|audio_token_321|>",
325
- "<|audio_token_322|>",
326
- "<|audio_token_323|>",
327
- "<|audio_token_324|>",
328
- "<|audio_token_325|>",
329
- "<|audio_token_326|>",
330
- "<|audio_token_327|>",
331
- "<|audio_token_328|>",
332
- "<|audio_token_329|>",
333
- "<|audio_token_330|>",
334
- "<|audio_token_331|>",
335
- "<|audio_token_332|>",
336
- "<|audio_token_333|>",
337
- "<|audio_token_334|>",
338
- "<|audio_token_335|>",
339
- "<|audio_token_336|>",
340
- "<|audio_token_337|>",
341
- "<|audio_token_338|>",
342
- "<|audio_token_339|>",
343
- "<|audio_token_340|>",
344
- "<|audio_token_341|>",
345
- "<|audio_token_342|>",
346
- "<|audio_token_343|>",
347
- "<|audio_token_344|>",
348
- "<|audio_token_345|>",
349
- "<|audio_token_346|>",
350
- "<|audio_token_347|>",
351
- "<|audio_token_348|>",
352
- "<|audio_token_349|>",
353
- "<|audio_token_350|>",
354
- "<|audio_token_351|>",
355
- "<|audio_token_352|>",
356
- "<|audio_token_353|>",
357
- "<|audio_token_354|>",
358
- "<|audio_token_355|>",
359
- "<|audio_token_356|>",
360
- "<|audio_token_357|>",
361
- "<|audio_token_358|>",
362
- "<|audio_token_359|>",
363
- "<|audio_token_360|>",
364
- "<|audio_token_361|>",
365
- "<|audio_token_362|>",
366
- "<|audio_token_363|>",
367
- "<|audio_token_364|>",
368
- "<|audio_token_365|>",
369
- "<|audio_token_366|>",
370
- "<|audio_token_367|>",
371
- "<|audio_token_368|>",
372
- "<|audio_token_369|>",
373
- "<|audio_token_370|>",
374
- "<|audio_token_371|>",
375
- "<|audio_token_372|>",
376
- "<|audio_token_373|>",
377
- "<|audio_token_374|>",
378
- "<|audio_token_375|>",
379
- "<|audio_token_376|>",
380
- "<|audio_token_377|>",
381
- "<|audio_token_378|>",
382
- "<|audio_token_379|>",
383
- "<|audio_token_380|>",
384
- "<|audio_token_381|>",
385
- "<|audio_token_382|>",
386
- "<|audio_token_383|>",
387
- "<|audio_token_384|>",
388
- "<|audio_token_385|>",
389
- "<|audio_token_386|>",
390
- "<|audio_token_387|>",
391
- "<|audio_token_388|>",
392
- "<|audio_token_389|>",
393
- "<|audio_token_390|>",
394
- "<|audio_token_391|>",
395
- "<|audio_token_392|>",
396
- "<|audio_token_393|>",
397
- "<|audio_token_394|>",
398
- "<|audio_token_395|>",
399
- "<|audio_token_396|>",
400
- "<|audio_token_397|>",
401
- "<|audio_token_398|>",
402
- "<|audio_token_399|>",
403
- "<|audio_token_400|>",
404
- "<|audio_token_401|>",
405
- "<|audio_token_402|>",
406
- "<|audio_token_403|>",
407
- "<|audio_token_404|>",
408
- "<|audio_token_405|>",
409
- "<|audio_token_406|>",
410
- "<|audio_token_407|>",
411
- "<|audio_token_408|>",
412
- "<|audio_token_409|>",
413
- "<|audio_token_410|>",
414
- "<|audio_token_411|>",
415
- "<|audio_token_412|>",
416
- "<|audio_token_413|>",
417
- "<|audio_token_414|>",
418
- "<|audio_token_415|>",
419
- "<|audio_token_416|>",
420
- "<|audio_token_417|>",
421
- "<|audio_token_418|>",
422
- "<|audio_token_419|>",
423
- "<|audio_token_420|>",
424
- "<|audio_token_421|>",
425
- "<|audio_token_422|>",
426
- "<|audio_token_423|>",
427
- "<|audio_token_424|>",
428
- "<|audio_token_425|>",
429
- "<|audio_token_426|>",
430
- "<|audio_token_427|>",
431
- "<|audio_token_428|>",
432
- "<|audio_token_429|>",
433
- "<|audio_token_430|>",
434
- "<|audio_token_431|>",
435
- "<|audio_token_432|>",
436
- "<|audio_token_433|>",
437
- "<|audio_token_434|>",
438
- "<|audio_token_435|>",
439
- "<|audio_token_436|>",
440
- "<|audio_token_437|>",
441
- "<|audio_token_438|>",
442
- "<|audio_token_439|>",
443
- "<|audio_token_440|>",
444
- "<|audio_token_441|>",
445
- "<|audio_token_442|>",
446
- "<|audio_token_443|>",
447
- "<|audio_token_444|>",
448
- "<|audio_token_445|>",
449
- "<|audio_token_446|>",
450
- "<|audio_token_447|>",
451
- "<|audio_token_448|>",
452
- "<|audio_token_449|>",
453
- "<|audio_token_450|>",
454
- "<|audio_token_451|>",
455
- "<|audio_token_452|>",
456
- "<|audio_token_453|>",
457
- "<|audio_token_454|>",
458
- "<|audio_token_455|>",
459
- "<|audio_token_456|>",
460
- "<|audio_token_457|>",
461
- "<|audio_token_458|>",
462
- "<|audio_token_459|>",
463
- "<|audio_token_460|>",
464
- "<|audio_token_461|>",
465
- "<|audio_token_462|>",
466
- "<|audio_token_463|>",
467
- "<|audio_token_464|>",
468
- "<|audio_token_465|>",
469
- "<|audio_token_466|>",
470
- "<|audio_token_467|>",
471
- "<|audio_token_468|>",
472
- "<|audio_token_469|>",
473
- "<|audio_token_470|>",
474
- "<|audio_token_471|>",
475
- "<|audio_token_472|>",
476
- "<|audio_token_473|>",
477
- "<|audio_token_474|>",
478
- "<|audio_token_475|>",
479
- "<|audio_token_476|>",
480
- "<|audio_token_477|>",
481
- "<|audio_token_478|>",
482
- "<|audio_token_479|>",
483
- "<|audio_token_480|>",
484
- "<|audio_token_481|>",
485
- "<|audio_token_482|>",
486
- "<|audio_token_483|>",
487
- "<|audio_token_484|>",
488
- "<|audio_token_485|>",
489
- "<|audio_token_486|>",
490
- "<|audio_token_487|>",
491
- "<|audio_token_488|>",
492
- "<|audio_token_489|>",
493
- "<|audio_token_490|>",
494
- "<|audio_token_491|>",
495
- "<|audio_token_492|>",
496
- "<|audio_token_493|>",
497
- "<|audio_token_494|>",
498
- "<|audio_token_495|>",
499
- "<|audio_token_496|>",
500
- "<|audio_token_497|>",
501
- "<|audio_token_498|>",
502
- "<|audio_token_499|>",
503
- "<|audio_token_500|>",
504
- "<|audio_token_501|>",
505
- "<|audio_token_502|>",
506
- "<|audio_token_503|>",
507
- "<|audio_token_504|>",
508
- "<|audio_token_505|>",
509
- "<|audio_token_506|>",
510
- "<|audio_token_507|>",
511
- "<|audio_token_508|>",
512
- "<|audio_token_509|>",
513
- "<|audio_token_510|>",
514
- "<|audio_token_511|>",
515
- "<|audio_token_512|>",
516
- "<|audio_token_513|>",
517
- "<|audio_token_514|>",
518
- "<|audio_token_515|>",
519
- "<|audio_token_516|>",
520
- "<|audio_token_517|>",
521
- "<|audio_token_518|>",
522
- "<|audio_token_519|>",
523
- "<|audio_token_520|>",
524
- "<|audio_token_521|>",
525
- "<|audio_token_522|>",
526
- "<|audio_token_523|>",
527
- "<|audio_token_524|>",
528
- "<|audio_token_525|>",
529
- "<|audio_token_526|>",
530
- "<|audio_token_527|>",
531
- "<|audio_token_528|>",
532
- "<|audio_token_529|>",
533
- "<|audio_token_530|>",
534
- "<|audio_token_531|>",
535
- "<|audio_token_532|>",
536
- "<|audio_token_533|>",
537
- "<|audio_token_534|>",
538
- "<|audio_token_535|>",
539
- "<|audio_token_536|>",
540
- "<|audio_token_537|>",
541
- "<|audio_token_538|>",
542
- "<|audio_token_539|>",
543
- "<|audio_token_540|>",
544
- "<|audio_token_541|>",
545
- "<|audio_token_542|>",
546
- "<|audio_token_543|>",
547
- "<|audio_token_544|>",
548
- "<|audio_token_545|>",
549
- "<|audio_token_546|>",
550
- "<|audio_token_547|>",
551
- "<|audio_token_548|>",
552
- "<|audio_token_549|>",
553
- "<|audio_token_550|>",
554
- "<|audio_token_551|>",
555
- "<|audio_token_552|>",
556
- "<|audio_token_553|>",
557
- "<|audio_token_554|>",
558
- "<|audio_token_555|>",
559
- "<|audio_token_556|>",
560
- "<|audio_token_557|>",
561
- "<|audio_token_558|>",
562
- "<|audio_token_559|>",
563
- "<|audio_token_560|>",
564
- "<|audio_token_561|>",
565
- "<|audio_token_562|>",
566
- "<|audio_token_563|>",
567
- "<|audio_token_564|>",
568
- "<|audio_token_565|>",
569
- "<|audio_token_566|>",
570
- "<|audio_token_567|>",
571
- "<|audio_token_568|>",
572
- "<|audio_token_569|>",
573
- "<|audio_token_570|>",
574
- "<|audio_token_571|>",
575
- "<|audio_token_572|>",
576
- "<|audio_token_573|>",
577
- "<|audio_token_574|>",
578
- "<|audio_token_575|>",
579
- "<|audio_token_576|>",
580
- "<|audio_token_577|>",
581
- "<|audio_token_578|>",
582
- "<|audio_token_579|>",
583
- "<|audio_token_580|>",
584
- "<|audio_token_581|>",
585
- "<|audio_token_582|>",
586
- "<|audio_token_583|>",
587
- "<|audio_token_584|>",
588
- "<|audio_token_585|>",
589
- "<|audio_token_586|>",
590
- "<|audio_token_587|>",
591
- "<|audio_token_588|>",
592
- "<|audio_token_589|>",
593
- "<|audio_token_590|>",
594
- "<|audio_token_591|>",
595
- "<|audio_token_592|>",
596
- "<|audio_token_593|>",
597
- "<|audio_token_594|>",
598
- "<|audio_token_595|>",
599
- "<|audio_token_596|>",
600
- "<|audio_token_597|>",
601
- "<|audio_token_598|>",
602
- "<|audio_token_599|>",
603
- "<|audio_token_600|>",
604
- "<|audio_token_601|>",
605
- "<|audio_token_602|>",
606
- "<|audio_token_603|>",
607
- "<|audio_token_604|>",
608
- "<|audio_token_605|>",
609
- "<|audio_token_606|>",
610
- "<|audio_token_607|>",
611
- "<|audio_token_608|>",
612
- "<|audio_token_609|>",
613
- "<|audio_token_610|>",
614
- "<|audio_token_611|>",
615
- "<|audio_token_612|>",
616
- "<|audio_token_613|>",
617
- "<|audio_token_614|>",
618
- "<|audio_token_615|>",
619
- "<|audio_token_616|>",
620
- "<|audio_token_617|>",
621
- "<|audio_token_618|>",
622
- "<|audio_token_619|>",
623
- "<|audio_token_620|>",
624
- "<|audio_token_621|>",
625
- "<|audio_token_622|>",
626
- "<|audio_token_623|>",
627
- "<|audio_token_624|>",
628
- "<|audio_token_625|>",
629
- "<|audio_token_626|>",
630
- "<|audio_token_627|>",
631
- "<|audio_token_628|>",
632
- "<|audio_token_629|>",
633
- "<|audio_token_630|>",
634
- "<|audio_token_631|>",
635
- "<|audio_token_632|>",
636
- "<|audio_token_633|>",
637
- "<|audio_token_634|>",
638
- "<|audio_token_635|>",
639
- "<|audio_token_636|>",
640
- "<|audio_token_637|>",
641
- "<|audio_token_638|>",
642
- "<|audio_token_639|>",
643
- "<|audio_token_640|>",
644
- "<|audio_token_641|>",
645
- "<|audio_token_642|>",
646
- "<|audio_token_643|>",
647
- "<|audio_token_644|>",
648
- "<|audio_token_645|>",
649
- "<|audio_token_646|>",
650
- "<|audio_token_647|>",
651
- "<|audio_token_648|>",
652
- "<|audio_token_649|>",
653
- "<|audio_token_650|>",
654
- "<|audio_token_651|>",
655
- "<|audio_token_652|>",
656
- "<|audio_token_653|>",
657
- "<|audio_token_654|>",
658
- "<|audio_token_655|>",
659
- "<|audio_token_656|>",
660
- "<|audio_token_657|>",
661
- "<|audio_token_658|>",
662
- "<|audio_token_659|>",
663
- "<|audio_token_660|>",
664
- "<|audio_token_661|>",
665
- "<|audio_token_662|>",
666
- "<|audio_token_663|>",
667
- "<|audio_token_664|>",
668
- "<|audio_token_665|>",
669
- "<|audio_token_666|>",
670
- "<|audio_token_667|>",
671
- "<|audio_token_668|>",
672
- "<|audio_token_669|>",
673
- "<|audio_token_670|>",
674
- "<|audio_token_671|>",
675
- "<|audio_token_672|>",
676
- "<|audio_token_673|>",
677
- "<|audio_token_674|>",
678
- "<|audio_token_675|>",
679
- "<|audio_token_676|>",
680
- "<|audio_token_677|>",
681
- "<|audio_token_678|>",
682
- "<|audio_token_679|>",
683
- "<|audio_token_680|>",
684
- "<|audio_token_681|>",
685
- "<|audio_token_682|>",
686
- "<|audio_token_683|>",
687
- "<|audio_token_684|>",
688
- "<|audio_token_685|>",
689
- "<|audio_token_686|>",
690
- "<|audio_token_687|>",
691
- "<|audio_token_688|>",
692
- "<|audio_token_689|>",
693
- "<|audio_token_690|>",
694
- "<|audio_token_691|>",
695
- "<|audio_token_692|>",
696
- "<|audio_token_693|>",
697
- "<|audio_token_694|>",
698
- "<|audio_token_695|>",
699
- "<|audio_token_696|>",
700
- "<|audio_token_697|>",
701
- "<|audio_token_698|>",
702
- "<|audio_token_699|>",
703
- "<|audio_token_700|>",
704
- "<|audio_token_701|>",
705
- "<|audio_token_702|>",
706
- "<|audio_token_703|>",
707
- "<|audio_token_704|>",
708
- "<|audio_token_705|>",
709
- "<|audio_token_706|>",
710
- "<|audio_token_707|>",
711
- "<|audio_token_708|>",
712
- "<|audio_token_709|>",
713
- "<|audio_token_710|>",
714
- "<|audio_token_711|>",
715
- "<|audio_token_712|>",
716
- "<|audio_token_713|>",
717
- "<|audio_token_714|>",
718
- "<|audio_token_715|>",
719
- "<|audio_token_716|>",
720
- "<|audio_token_717|>",
721
- "<|audio_token_718|>",
722
- "<|audio_token_719|>",
723
- "<|audio_token_720|>",
724
- "<|audio_token_721|>",
725
- "<|audio_token_722|>",
726
- "<|audio_token_723|>",
727
- "<|audio_token_724|>",
728
- "<|audio_token_725|>",
729
- "<|audio_token_726|>",
730
- "<|audio_token_727|>",
731
- "<|audio_token_728|>",
732
- "<|audio_token_729|>",
733
- "<|audio_token_730|>",
734
- "<|audio_token_731|>",
735
- "<|audio_token_732|>",
736
- "<|audio_token_733|>",
737
- "<|audio_token_734|>",
738
- "<|audio_token_735|>",
739
- "<|audio_token_736|>",
740
- "<|audio_token_737|>",
741
- "<|audio_token_738|>",
742
- "<|audio_token_739|>",
743
- "<|audio_token_740|>",
744
- "<|audio_token_741|>",
745
- "<|audio_token_742|>",
746
- "<|audio_token_743|>",
747
- "<|audio_token_744|>",
748
- "<|audio_token_745|>",
749
- "<|audio_token_746|>",
750
- "<|audio_token_747|>",
751
- "<|audio_token_748|>",
752
- "<|audio_token_749|>",
753
- "<|audio_token_750|>",
754
- "<|audio_token_751|>",
755
- "<|audio_token_752|>",
756
- "<|audio_token_753|>",
757
- "<|audio_token_754|>",
758
- "<|audio_token_755|>",
759
- "<|audio_token_756|>",
760
- "<|audio_token_757|>",
761
- "<|audio_token_758|>",
762
- "<|audio_token_759|>",
763
- "<|audio_token_760|>",
764
- "<|audio_token_761|>",
765
- "<|audio_token_762|>",
766
- "<|audio_token_763|>",
767
- "<|audio_token_764|>",
768
- "<|audio_token_765|>",
769
- "<|audio_token_766|>",
770
- "<|audio_token_767|>",
771
- "<|audio_token_768|>",
772
- "<|audio_token_769|>",
773
- "<|audio_token_770|>",
774
- "<|audio_token_771|>",
775
- "<|audio_token_772|>",
776
- "<|audio_token_773|>",
777
- "<|audio_token_774|>",
778
- "<|audio_token_775|>",
779
- "<|audio_token_776|>",
780
- "<|audio_token_777|>",
781
- "<|audio_token_778|>",
782
- "<|audio_token_779|>",
783
- "<|audio_token_780|>",
784
- "<|audio_token_781|>",
785
- "<|audio_token_782|>",
786
- "<|audio_token_783|>",
787
- "<|audio_token_784|>",
788
- "<|audio_token_785|>",
789
- "<|audio_token_786|>",
790
- "<|audio_token_787|>",
791
- "<|audio_token_788|>",
792
- "<|audio_token_789|>",
793
- "<|audio_token_790|>",
794
- "<|audio_token_791|>",
795
- "<|audio_token_792|>",
796
- "<|audio_token_793|>",
797
- "<|audio_token_794|>",
798
- "<|audio_token_795|>",
799
- "<|audio_token_796|>",
800
- "<|audio_token_797|>",
801
- "<|audio_token_798|>",
802
- "<|audio_token_799|>",
803
- "<|audio_token_800|>",
804
- "<|audio_token_801|>",
805
- "<|audio_token_802|>",
806
- "<|audio_token_803|>",
807
- "<|audio_token_804|>",
808
- "<|audio_token_805|>",
809
- "<|audio_token_806|>",
810
- "<|audio_token_807|>",
811
- "<|audio_token_808|>",
812
- "<|audio_token_809|>",
813
- "<|audio_token_810|>",
814
- "<|audio_token_811|>",
815
- "<|audio_token_812|>",
816
- "<|audio_token_813|>",
817
- "<|audio_token_814|>",
818
- "<|audio_token_815|>",
819
- "<|audio_token_816|>",
820
- "<|audio_token_817|>",
821
- "<|audio_token_818|>",
822
- "<|audio_token_819|>",
823
- "<|audio_token_820|>",
824
- "<|audio_token_821|>",
825
- "<|audio_token_822|>",
826
- "<|audio_token_823|>",
827
- "<|audio_token_824|>",
828
- "<|audio_token_825|>",
829
- "<|audio_token_826|>",
830
- "<|audio_token_827|>",
831
- "<|audio_token_828|>",
832
- "<|audio_token_829|>",
833
- "<|audio_token_830|>",
834
- "<|audio_token_831|>",
835
- "<|audio_token_832|>",
836
- "<|audio_token_833|>",
837
- "<|audio_token_834|>",
838
- "<|audio_token_835|>",
839
- "<|audio_token_836|>",
840
- "<|audio_token_837|>",
841
- "<|audio_token_838|>",
842
- "<|audio_token_839|>",
843
- "<|audio_token_840|>",
844
- "<|audio_token_841|>",
845
- "<|audio_token_842|>",
846
- "<|audio_token_843|>",
847
- "<|audio_token_844|>",
848
- "<|audio_token_845|>",
849
- "<|audio_token_846|>",
850
- "<|audio_token_847|>",
851
- "<|audio_token_848|>",
852
- "<|audio_token_849|>",
853
- "<|audio_token_850|>",
854
- "<|audio_token_851|>",
855
- "<|audio_token_852|>",
856
- "<|audio_token_853|>",
857
- "<|audio_token_854|>",
858
- "<|audio_token_855|>",
859
- "<|audio_token_856|>",
860
- "<|audio_token_857|>",
861
- "<|audio_token_858|>",
862
- "<|audio_token_859|>",
863
- "<|audio_token_860|>",
864
- "<|audio_token_861|>",
865
- "<|audio_token_862|>",
866
- "<|audio_token_863|>",
867
- "<|audio_token_864|>",
868
- "<|audio_token_865|>",
869
- "<|audio_token_866|>",
870
- "<|audio_token_867|>",
871
- "<|audio_token_868|>",
872
- "<|audio_token_869|>",
873
- "<|audio_token_870|>",
874
- "<|audio_token_871|>",
875
- "<|audio_token_872|>",
876
- "<|audio_token_873|>",
877
- "<|audio_token_874|>",
878
- "<|audio_token_875|>",
879
- "<|audio_token_876|>",
880
- "<|audio_token_877|>",
881
- "<|audio_token_878|>",
882
- "<|audio_token_879|>",
883
- "<|audio_token_880|>",
884
- "<|audio_token_881|>",
885
- "<|audio_token_882|>",
886
- "<|audio_token_883|>",
887
- "<|audio_token_884|>",
888
- "<|audio_token_885|>",
889
- "<|audio_token_886|>",
890
- "<|audio_token_887|>",
891
- "<|audio_token_888|>",
892
- "<|audio_token_889|>",
893
- "<|audio_token_890|>",
894
- "<|audio_token_891|>",
895
- "<|audio_token_892|>",
896
- "<|audio_token_893|>",
897
- "<|audio_token_894|>",
898
- "<|audio_token_895|>",
899
- "<|audio_token_896|>",
900
- "<|audio_token_897|>",
901
- "<|audio_token_898|>",
902
- "<|audio_token_899|>",
903
- "<|audio_token_900|>",
904
- "<|audio_token_901|>",
905
- "<|audio_token_902|>",
906
- "<|audio_token_903|>",
907
- "<|audio_token_904|>",
908
- "<|audio_token_905|>",
909
- "<|audio_token_906|>",
910
- "<|audio_token_907|>",
911
- "<|audio_token_908|>",
912
- "<|audio_token_909|>",
913
- "<|audio_token_910|>",
914
- "<|audio_token_911|>",
915
- "<|audio_token_912|>",
916
- "<|audio_token_913|>",
917
- "<|audio_token_914|>",
918
- "<|audio_token_915|>",
919
- "<|audio_token_916|>",
920
- "<|audio_token_917|>",
921
- "<|audio_token_918|>",
922
- "<|audio_token_919|>",
923
- "<|audio_token_920|>",
924
- "<|audio_token_921|>",
925
- "<|audio_token_922|>",
926
- "<|audio_token_923|>",
927
- "<|audio_token_924|>",
928
- "<|audio_token_925|>",
929
- "<|audio_token_926|>",
930
- "<|audio_token_927|>",
931
- "<|audio_token_928|>",
932
- "<|audio_token_929|>",
933
- "<|audio_token_930|>",
934
- "<|audio_token_931|>",
935
- "<|audio_token_932|>",
936
- "<|audio_token_933|>",
937
- "<|audio_token_934|>",
938
- "<|audio_token_935|>",
939
- "<|audio_token_936|>",
940
- "<|audio_token_937|>",
941
- "<|audio_token_938|>",
942
- "<|audio_token_939|>",
943
- "<|audio_token_940|>",
944
- "<|audio_token_941|>",
945
- "<|audio_token_942|>",
946
- "<|audio_token_943|>",
947
- "<|audio_token_944|>",
948
- "<|audio_token_945|>",
949
- "<|audio_token_946|>",
950
- "<|audio_token_947|>",
951
- "<|audio_token_948|>",
952
- "<|audio_token_949|>",
953
- "<|audio_token_950|>",
954
- "<|audio_token_951|>",
955
- "<|audio_token_952|>",
956
- "<|audio_token_953|>",
957
- "<|audio_token_954|>",
958
- "<|audio_token_955|>",
959
- "<|audio_token_956|>",
960
- "<|audio_token_957|>",
961
- "<|audio_token_958|>",
962
- "<|audio_token_959|>",
963
- "<|audio_token_960|>",
964
- "<|audio_token_961|>",
965
- "<|audio_token_962|>",
966
- "<|audio_token_963|>",
967
- "<|audio_token_964|>",
968
- "<|audio_token_965|>",
969
- "<|audio_token_966|>",
970
- "<|audio_token_967|>",
971
- "<|audio_token_968|>",
972
- "<|audio_token_969|>",
973
- "<|audio_token_970|>",
974
- "<|audio_token_971|>",
975
- "<|audio_token_972|>",
976
- "<|audio_token_973|>",
977
- "<|audio_token_974|>",
978
- "<|audio_token_975|>",
979
- "<|audio_token_976|>",
980
- "<|audio_token_977|>",
981
- "<|audio_token_978|>",
982
- "<|audio_token_979|>",
983
- "<|audio_token_980|>",
984
- "<|audio_token_981|>",
985
- "<|audio_token_982|>",
986
- "<|audio_token_983|>",
987
- "<|audio_token_984|>",
988
- "<|audio_token_985|>",
989
- "<|audio_token_986|>",
990
- "<|audio_token_987|>",
991
- "<|audio_token_988|>",
992
- "<|audio_token_989|>",
993
- "<|audio_token_990|>",
994
- "<|audio_token_991|>",
995
- "<|audio_token_992|>",
996
- "<|audio_token_993|>",
997
- "<|audio_token_994|>",
998
- "<|audio_token_995|>",
999
- "<|audio_token_996|>",
1000
- "<|audio_token_997|>",
1001
- "<|audio_token_998|>",
1002
- "<|audio_token_999|>",
1003
- "<|audio_token_1000|>",
1004
- "<|audio_token_1001|>",
1005
- "<|audio_token_1002|>",
1006
- "<|audio_token_1003|>",
1007
- "<|audio_token_1004|>",
1008
- "<|audio_token_1005|>",
1009
- "<|audio_token_1006|>",
1010
- "<|audio_token_1007|>",
1011
- "<|audio_token_1008|>",
1012
- "<|audio_token_1009|>",
1013
- "<|audio_token_1010|>",
1014
- "<|audio_token_1011|>",
1015
- "<|audio_token_1012|>",
1016
- "<|audio_token_1013|>",
1017
- "<|audio_token_1014|>",
1018
- "<|audio_token_1015|>",
1019
- "<|audio_token_1016|>",
1020
- "<|audio_token_1017|>",
1021
- "<|audio_token_1018|>",
1022
- "<|audio_token_1019|>",
1023
- "<|audio_token_1020|>",
1024
- "<|audio_token_1021|>",
1025
- "<|audio_token_1022|>",
1026
- "<|audio_token_1023|>",
1027
- "<|audio_token_end|>"
1028
- ],
1029
- "bos_token": {
1030
- "content": "<|begin_of_text|>",
1031
- "lstrip": false,
1032
- "normalized": false,
1033
- "rstrip": false,
1034
- "single_word": false
1035
- },
1036
- "eos_token": {
1037
- "content": "<|eot_id|>",
1038
- "lstrip": false,
1039
- "normalized": false,
1040
- "rstrip": false,
1041
- "single_word": false
1042
- },
1043
- "pad_token": {
1044
- "content": "<|eot_id|>",
1045
- "lstrip": false,
1046
- "normalized": false,
1047
- "rstrip": false,
1048
- "single_word": false
1049
- }
1050
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Muyan-TTS/tokenizer.json DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:63073cea824329b6a7c8735dd147d86e5655bcef2b1dda93c034d1709560f565
3
- size 17410734
 
 
 
 
Muyan-TTS/tokenizer_config.json DELETED
The diff for this file is too large to render. See raw diff