Easy-to-use Speech Toolkit including Self-Supervised Learning model, SOTA/Streaming ASR with punctuation, Streaming TTS with text frontend, Speaker Verification System, End-to-End Speech Translation and Keyword Spotting. Won NAACL2022 Best Demo Award.
Exception in main training loop: Optimizer set error, embedding_2.w_0_moment1_0 should in state dict
Traceback (most recent call last):
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\trainer.py", line 149, in run
update()
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\updaters\standard_updater.py", line 110, in update
self.update_core(batch)
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\models\fastspeech2\fastspeech2_updater.py", line 118, in update_core
optimizer.step()
File "", line 2, in step
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\dygraph\base.py", line 319, in impl
return func(*args, kwargs)
File "", line 2, in step
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\wrapped_decorator.py", line 26, in impl
return wrapped_func(*args, *kwargs)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\framework.py", line 534, in impl
return func(args, kwargs)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 550, in step
param_group_idx=0,
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 1167, in _apply_optimize
params_grads, param_group_idx=param_group_idx
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 928, in _create_optimization_pass
for p in parameters_and_grads
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 337, in _create_accumulators
self._add_moments_pows(p)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 293, in _add_moments_pows
self._add_accumulator(self._moment1_acc_str, p, dtype=acc_dtype)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 756, in _add_accumulator
var_name
Trainer extensions will try to handle the extension. Then all extensions will finalize.[2023-07-14 20:35:10] [ERROR] [app.py:1742] Exception on /train_canton_clone [POST]
Traceback (most recent call last):
File "C:\Users\leib.l\Miniconda3\lib\site-packages\flask\app.py", line 2525, in wsgi_app
response = self.full_dispatch_request()
File "C:\Users\leib.l\Miniconda3\lib\site-packages\flask\app.py", line 1822, in full_dispatch_request
rv = self.handle_user_exception(e)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\flask\app.py", line 1820, in full_dispatch_request
rv = self.dispatch_request()
File "C:\Users\leib.l\Miniconda3\lib\site-packages\flask\app.py", line 1796, in dispatch_request
return self.ensure_sync(self.view_functions[rule.endpoint])(view_args)
File "C:/jisufenxiang/PaddleSpeech/examples/other/tts_finetune/tts3/main.py", line 54, in train
local.finetune.finetune_train(pretrained_model_dir,dump_dir,output_dir)
File "C:\jisufenxiang\PaddleSpeech\examples\other\tts_finetune\tts3\local\finetune.py", line 276, in finetune_train
train_sp(train_args, config)
File "C:\jisufenxiang\PaddleSpeech\examples\other\tts_finetune\tts3\local\finetune.py", line 204, in train_sp
trainer.run()
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\trainer.py", line 198, in run
six.reraise(exc_info)
File "C:\Users\leib.l\AppData\Roaming\Python\Python37\site-packages\six.py", line 703, in reraise
raise value
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\trainer.py", line 149, in run
update()
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\updaters\standard_updater.py", line 110, in update
self.update_core(batch)
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\models\fastspeech2\fastspeech2_updater.py", line 118, in update_core
optimizer.step()
File "", line 2, in step
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\dygraph\base.py", line 319, in impl
return func(args, kwargs)
File "", line 2, in step
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\wrapped_decorator.py", line 26, in impl
return wrapped_func(*args, *kwargs)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\framework.py", line 534, in impl
return func(args, **kwargs)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 550, in step
param_group_idx=0,
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 1167, in _apply_optimize
params_grads, param_group_idx=param_group_idx
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 928, in _create_optimization_pass
for p in parameters_and_grads
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 337, in _create_accumulators
self._add_moments_pows(p)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 293, in _add_moments_pows
self._add_accumulator(self._moment1_acc_str, p, dtype=acc_dtype)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 756, in _add_accumulator
var_name
AssertionError: Optimizer set error, embedding_2.w_0_moment1_0 should in state dict
Exception in main training loop: Optimizer set error, embedding_2.w_0_moment1_0 should in state dict Traceback (most recent call last): File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\trainer.py", line 149, in run update() File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\updaters\standard_updater.py", line 110, in update self.update_core(batch) File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\models\fastspeech2\fastspeech2_updater.py", line 118, in update_core optimizer.step() File "", line 2, in step
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\dygraph\base.py", line 319, in impl
return func(*args, kwargs)
File "", line 2, in step
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\wrapped_decorator.py", line 26, in impl
return wrapped_func(*args, *kwargs)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\framework.py", line 534, in impl
return func(args, kwargs)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 550, in step
param_group_idx=0,
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 1167, in _apply_optimize
params_grads, param_group_idx=param_group_idx
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 928, in _create_optimization_pass
for p in parameters_and_grads
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 337, in _create_accumulators
self._add_moments_pows(p)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 293, in _add_moments_pows
self._add_accumulator(self._moment1_acc_str, p, dtype=acc_dtype)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 756, in _add_accumulator
var_name
Trainer extensions will try to handle the extension. Then all extensions will finalize.[2023-07-14 20:35:10] [ERROR] [app.py:1742] Exception on /train_canton_clone [POST]
Traceback (most recent call last):
File "C:\Users\leib.l\Miniconda3\lib\site-packages\flask\app.py", line 2525, in wsgi_app
response = self.full_dispatch_request()
File "C:\Users\leib.l\Miniconda3\lib\site-packages\flask\app.py", line 1822, in full_dispatch_request
rv = self.handle_user_exception(e)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\flask\app.py", line 1820, in full_dispatch_request
rv = self.dispatch_request()
File "C:\Users\leib.l\Miniconda3\lib\site-packages\flask\app.py", line 1796, in dispatch_request
return self.ensure_sync(self.view_functions[rule.endpoint])(view_args)
File "C:/jisufenxiang/PaddleSpeech/examples/other/tts_finetune/tts3/main.py", line 54, in train
local.finetune.finetune_train(pretrained_model_dir,dump_dir,output_dir)
File "C:\jisufenxiang\PaddleSpeech\examples\other\tts_finetune\tts3\local\finetune.py", line 276, in finetune_train
train_sp(train_args, config)
File "C:\jisufenxiang\PaddleSpeech\examples\other\tts_finetune\tts3\local\finetune.py", line 204, in train_sp
trainer.run()
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\trainer.py", line 198, in run
six.reraise(exc_info)
File "C:\Users\leib.l\AppData\Roaming\Python\Python37\site-packages\six.py", line 703, in reraise
raise value
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\trainer.py", line 149, in run
update()
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\training\updaters\standard_updater.py", line 110, in update
self.update_core(batch)
File "C:\jisufenxiang\PaddleSpeech\paddlespeech\t2s\models\fastspeech2\fastspeech2_updater.py", line 118, in update_core
optimizer.step()
File "", line 2, in step
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\dygraph\base.py", line 319, in impl
return func( args, kwargs)
File "", line 2, in step
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\wrapped_decorator.py", line 26, in impl
return wrapped_func(*args, *kwargs)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\fluid\framework.py", line 534, in impl
return func(args, **kwargs)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 550, in step
param_group_idx=0,
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 1167, in _apply_optimize
params_grads, param_group_idx=param_group_idx
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 928, in _create_optimization_pass
for p in parameters_and_grads
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 337, in _create_accumulators
self._add_moments_pows(p)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\adam.py", line 293, in _add_moments_pows
self._add_accumulator(self._moment1_acc_str, p, dtype=acc_dtype)
File "C:\Users\leib.l\Miniconda3\lib\site-packages\paddle\optimizer\optimizer.py", line 756, in _add_accumulator
var_name
AssertionError: Optimizer set error, embedding_2.w_0_moment1_0 should in state dict