I am trying to generate alignments in synthesis mode. To get accurate alignments , I tried to run synthesis with input text and ground_truth mel. But when run synthesis, a strange error occurs.
is_trainging and is_evaluating all set to False. then it will run into TacoTestHelper
here is my teachering forcing helper
`
class TacoTestHelper(Helper):
def init(self, batch_size, targets, target_lengths, hparams):
with tf.name_scope('TacoTestHelper'):
self._batch_size = batch_size
self._output_dim = hparams.num_mels
self._reduction_factor = hparams.outputs_per_step
self.stop_at_any = hparams.stop_at_any
r = self._reduction_factor
self._targets = targets[:, r-1::r, :]
self._lengths = target_lengths # tf.tile([tf.shape(self._targets)[1]], [self._batch_size])
@property
def batch_size(self):
return self._batch_size
@property
def token_output_size(self):
return self._reduction_factor
@property
def sample_ids_shape(self):
return tf.TensorShape([])
@property
def sample_ids_dtype(self):
return np.int32
def initialize(self, name=None):
return (tf.tile([False], [self._batch_size]), _go_frames(self._batch_size, self._output_dim))
def sample(self, time, outputs, state, name=None):
return tf.tile([0], [self._batch_size]) # Return all 0; we ignore them
def next_inputs(self, time, outputs, state, sample_ids, stop_token_prediction, name=None):
'''Stop on EOS. Otherwise, pass the last output as the next input and pass through state.'''
with tf.name_scope('TacoTestHelper'):
#A sequence is finished when the output probability is > 0.5
#finished = tf.cast(tf.round(stop_token_prediction), tf.bool)
finished = (time + 1 >= self._lengths)
#Since we are predicting r frames at each step, two modes are
#then possible:
# Stop when the model outputs a p > 0.5 for any frame between r frames (Recommended)
# Stop when the model outputs a p > 0.5 for all r frames (Safer)
#Note:
# With enough training steps, the model should be able to predict when to stop correctly
# and the use of stop_at_any = True would be recommended. If however the model didn't
# learn to stop correctly yet, (stops too soon) one could choose to use the safer option
# to get a correct synthesis
if self.stop_at_any:
finished = tf.reduce_any(tf.reduce_all(finished, axis=0)) #Recommended
else:
finished = tf.reduce_all(tf.reduce_all(finished, axis=0)) #Safer option
# Feed last output frame as next input. outputs is [N, output_dim * r]
next_inputs = outputs[:, -self._output_dim:]
next_inputs = self._targets[:, time, :]
next_state = state
return (finished, next_inputs, next_state)
`
inputs and inputs_lengths, mel_targets and target_lengths are all fed into graph from saved checkpoints.
Error occurs on line : next_inputs = self._targets[:, time, :]
here I used mel_targets
Error msg:
`2020-04-30 10:13:08.722813: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcublas.so.10.0
2020-04-30 10:13:09.003546: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcudnn.so.7
2020-04-30 10:13:10.096852: W tensorflow/core/framework/op_kernel.cc:1502] OP_REQUIRES failed at strided_slice_op.cc:107 : Invalid argument: slice index 0 of dimension 1 out of bounds.
Traceback (most recent call last):
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1356, in _do_call
return fn(*args)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1341, in _run_fn
options, feed_dict, fetch_list, target_list, run_metadata)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1429, in _call_tf_sessionrun
run_metadata)
tensorflow.python.framework.errors_impl.InvalidArgumentError: 2 root error(s) found.
(0) Invalid argument: slice index 0 of dimension 1 out of bounds.
[[{{node Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice}}]]
[[Tacotron_model/inference/transpose/_277]]
(1) Invalid argument: slice index 0 of dimension 1 out of bounds.
[[{{node Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice}}]]
0 successful operations.
0 derived errors ignored.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "eval.py", line 94, in
main()
File "eval.py", line 91, in main
run_preprocess(args, modified_hp)
File "eval.py", line 49, in run_preprocess
alignments, mel_prediction = synthesizer.eval(txt, mel_truth, [stop_token_target])
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/synthesizer.py", line 100, in eval
mels, alignments, stop_tokens = self.session.run([self.mel_outputs, self.alignments, self.stop_token_prediction], feed_dict=feed_dict)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 950, in run
run_metadata_ptr)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1173, in _run
feed_dict_tensor, options, run_metadata)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1350, in _do_run
run_metadata)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1370, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.InvalidArgumentError: 2 root error(s) found.
(0) Invalid argument: slice index 0 of dimension 1 out of bounds.
[[node Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice (defined at /home/gang.he/tts_codes/Tacotron-2/tacotron/models/helpers.py:63) ]]
[[Tacotron_model/inference/transpose/_277]]
(1) Invalid argument: slice index 0 of dimension 1 out of bounds.
[[node Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice (defined at /home/gang.he/tts_codes/Tacotron-2/tacotron/models/helpers.py:63) ]]
0 successful operations.
0 derived errors ignored.
Original stack trace for 'Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice':
File "eval.py", line 94, in
main()
File "eval.py", line 91, in main
run_preprocess(args, modified_hp)
File "eval.py", line 36, in run_preprocess
synthesizer = get_synthesizer(args.checkpoint, hparams=hp)
File "eval.py", line 18, in get_synthesizer
synthesizer.load(checkpoint_path, hparams)
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/synthesizer.py", line 34, in load
is_evaluating=False, split_infos=split_infos)
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/models/tacotron.py", line 172, in initialize
swap_memory=hp.tacotron_swap_with_cpu)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/decoder.py", line 455, in dynamic_decode
swap_memory=swap_memory)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py", line 3501, in while_loop
return_same_structure)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py", line 3012, in BuildLoop
pred, body, original_loop_vars, loop_vars, shape_invariants)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py", line 2937, in _BuildLoop
body_result = body(packed_vars_for_body)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py", line 3456, in
body = lambda i, lv: (i + 1, orig_body(lv))
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/decoder.py", line 398, in body
decoder_finished) = decoder.step(time, inputs, state)
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/models/custom_decoder.py", line 131, in step
stop_token_prediction=stop_token)
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/models/helpers.py", line 63, in next_inputs
next_inputs = self._targets[:, time, :] #outputs[:, -self._output_dim:]#self._targets[:, time, :]
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/array_ops.py", line 680, in _slice_helper
name=name)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/array_ops.py", line 846, in strided_slice
shrink_axis_mask=shrink_axis_mask)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/gen_array_ops.py", line 9989, in strided_slice
shrink_axis_mask=shrink_axis_mask, name=name)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/framework/op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/util/deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 3616, in create_op
op_def=op_def)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 2005, in init
self._traceback = tf_stack.extract_stack()
`
I am trying to generate alignments in synthesis mode. To get accurate alignments , I tried to run synthesis with input text and ground_truth mel. But when run synthesis, a strange error occurs. is_trainging and is_evaluating all set to False. then it will run into TacoTestHelper here is my teachering forcing helper ` class TacoTestHelper(Helper): def init(self, batch_size, targets, target_lengths, hparams): with tf.name_scope('TacoTestHelper'): self._batch_size = batch_size self._output_dim = hparams.num_mels self._reduction_factor = hparams.outputs_per_step self.stop_at_any = hparams.stop_at_any r = self._reduction_factor self._targets = targets[:, r-1::r, :] self._lengths = target_lengths # tf.tile([tf.shape(self._targets)[1]], [self._batch_size])
next_inputs = outputs[:, -self._output_dim:]
`
inputs and inputs_lengths, mel_targets and target_lengths are all fed into graph from saved checkpoints.
Error occurs on line : next_inputs = self._targets[:, time, :] here I used mel_targets
Error msg: `2020-04-30 10:13:08.722813: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcublas.so.10.0 2020-04-30 10:13:09.003546: I tensorflow/stream_executor/platform/default/dso_loader.cc:42] Successfully opened dynamic library libcudnn.so.7 2020-04-30 10:13:10.096852: W tensorflow/core/framework/op_kernel.cc:1502] OP_REQUIRES failed at strided_slice_op.cc:107 : Invalid argument: slice index 0 of dimension 1 out of bounds. Traceback (most recent call last): File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1356, in _do_call return fn(*args) File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1341, in _run_fn options, feed_dict, fetch_list, target_list, run_metadata) File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1429, in _call_tf_sessionrun run_metadata) tensorflow.python.framework.errors_impl.InvalidArgumentError: 2 root error(s) found. (0) Invalid argument: slice index 0 of dimension 1 out of bounds. [[{{node Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice}}]] [[Tacotron_model/inference/transpose/_277]] (1) Invalid argument: slice index 0 of dimension 1 out of bounds. [[{{node Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice}}]] 0 successful operations. 0 derived errors ignored.
During handling of the above exception, another exception occurred:
Traceback (most recent call last): File "eval.py", line 94, in
main()
File "eval.py", line 91, in main
run_preprocess(args, modified_hp)
File "eval.py", line 49, in run_preprocess
alignments, mel_prediction = synthesizer.eval(txt, mel_truth, [stop_token_target])
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/synthesizer.py", line 100, in eval
mels, alignments, stop_tokens = self.session.run([self.mel_outputs, self.alignments, self.stop_token_prediction], feed_dict=feed_dict)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 950, in run
run_metadata_ptr)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1173, in _run
feed_dict_tensor, options, run_metadata)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1350, in _do_run
run_metadata)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/client/session.py", line 1370, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.InvalidArgumentError: 2 root error(s) found.
(0) Invalid argument: slice index 0 of dimension 1 out of bounds.
[[node Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice (defined at /home/gang.he/tts_codes/Tacotron-2/tacotron/models/helpers.py:63) ]]
[[Tacotron_model/inference/transpose/_277]]
(1) Invalid argument: slice index 0 of dimension 1 out of bounds.
[[node Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice (defined at /home/gang.he/tts_codes/Tacotron-2/tacotron/models/helpers.py:63) ]]
0 successful operations.
0 derived errors ignored.
Original stack trace for 'Tacotron_model/inference/decoder/while/CustomDecoderStep/TacoTestHelper/strided_slice': File "eval.py", line 94, in
main()
File "eval.py", line 91, in main
run_preprocess(args, modified_hp)
File "eval.py", line 36, in run_preprocess
synthesizer = get_synthesizer(args.checkpoint, hparams=hp)
File "eval.py", line 18, in get_synthesizer
synthesizer.load(checkpoint_path, hparams)
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/synthesizer.py", line 34, in load
is_evaluating=False, split_infos=split_infos)
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/models/tacotron.py", line 172, in initialize
swap_memory=hp.tacotron_swap_with_cpu)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/decoder.py", line 455, in dynamic_decode
swap_memory=swap_memory)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py", line 3501, in while_loop
return_same_structure)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py", line 3012, in BuildLoop
pred, body, original_loop_vars, loop_vars, shape_invariants)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py", line 2937, in _BuildLoop
body_result = body(packed_vars_for_body)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/control_flow_ops.py", line 3456, in
body = lambda i, lv: (i + 1, orig_body( lv))
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/contrib/seq2seq/python/ops/decoder.py", line 398, in body
decoder_finished) = decoder.step(time, inputs, state)
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/models/custom_decoder.py", line 131, in step
stop_token_prediction=stop_token)
File "/home/gang.he/tts_codes/Tacotron-2/tacotron/models/helpers.py", line 63, in next_inputs
next_inputs = self._targets[:, time, :] #outputs[:, -self._output_dim:]#self._targets[:, time, :]
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/array_ops.py", line 680, in _slice_helper
name=name)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/array_ops.py", line 846, in strided_slice
shrink_axis_mask=shrink_axis_mask)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/ops/gen_array_ops.py", line 9989, in strided_slice
shrink_axis_mask=shrink_axis_mask, name=name)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/framework/op_def_library.py", line 788, in _apply_op_helper
op_def=op_def)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/util/deprecation.py", line 507, in new_func
return func(*args, **kwargs)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 3616, in create_op
op_def=op_def)
File "/home/gang.he/anaconda3/lib/python3.7/site-packages/tensorflow/python/framework/ops.py", line 2005, in init
self._traceback = tf_stack.extract_stack()
`