renmada / t5-pegasus-pytorch

400 stars 61 forks source link

按照那个例子,直接报错啊 #17

Closed yang-collect closed 3 years ago

yang-collect commented 3 years ago

ValueError: You have to specify either decoder_inputs or decoder_inputs_embeds

ValueError Traceback (most recent call last)

in 4 decoder_start_token_id=tokenizer.cls_token_id, 5 eos_token_id=tokenizer.sep_token_id, ----> 6 max_length=30).numpy()[0] 7 print(''.join(tokenizer.decode(output[1:])).replace(' ', '')) ~\Miniconda3\lib\site-packages\torch\autograd\grad_mode.py in decorate_context(*args, **kwargs) 25 def decorate_context(*args, **kwargs): 26 with self.__class__(): ---> 27 return func(*args, **kwargs) 28 return cast(F, decorate_context) 29 ~\Miniconda3\lib\site-packages\transformers\generation_utils.py in generate(self, input_ids, max_length, min_length, do_sample, early_stopping, num_beams, temperature, top_k, top_p, repetition_penalty, bad_words_ids, bos_token_id, pad_token_id, eos_token_id, length_penalty, no_repeat_ngram_size, encoder_no_repeat_ngram_size, num_return_sequences, max_time, decoder_start_token_id, use_cache, num_beam_groups, diversity_penalty, prefix_allowed_tokens_fn, output_attentions, output_hidden_states, output_scores, return_dict_in_generate, forced_bos_token_id, forced_eos_token_id, remove_invalid_values, synced_gpus, **model_kwargs) 977 return_dict_in_generate=return_dict_in_generate, 978 synced_gpus=synced_gpus, --> 979 **model_kwargs, 980 ) 981 ~\Miniconda3\lib\site-packages\transformers\generation_utils.py in greedy_search(self, input_ids, logits_processor, stopping_criteria, max_length, pad_token_id, eos_token_id, output_attentions, output_hidden_states, output_scores, return_dict_in_generate, synced_gpus, **model_kwargs) 1275 return_dict=True, 1276 output_attentions=output_attentions, -> 1277 output_hidden_states=output_hidden_states, 1278 ) 1279 ~\Miniconda3\lib\site-packages\torch\nn\modules\module.py in _call_impl(self, *input, **kwargs) 887 result = self._slow_forward(*input, **kwargs) 888 else: --> 889 result = self.forward(*input, **kwargs) 890 for hook in itertools.chain( 891 _global_forward_hooks.values(), ~\Miniconda3\lib\site-packages\transformers\models\t5\modeling_t5.py in forward(self, input_ids, attention_mask, decoder_input_ids, decoder_attention_mask, head_mask, decoder_head_mask, cross_attn_head_mask, encoder_outputs, past_key_values, inputs_embeds, decoder_inputs_embeds, use_cache, output_attentions, output_hidden_states, return_dict) 1389 output_attentions=output_attentions, 1390 output_hidden_states=output_hidden_states, -> 1391 return_dict=return_dict, 1392 ) 1393 ~\Miniconda3\lib\site-packages\torch\nn\modules\module.py in _call_impl(self, *input, **kwargs) 887 result = self._slow_forward(*input, **kwargs) 888 else: --> 889 result = self.forward(*input, **kwargs) 890 for hook in itertools.chain( 891 _global_forward_hooks.values(), ~\Miniconda3\lib\site-packages\transformers\models\t5\modeling_t5.py in forward(self, input_ids, attention_mask, encoder_hidden_states, encoder_attention_mask, inputs_embeds, head_mask, cross_attn_head_mask, past_key_values, use_cache, output_attentions, output_hidden_states, return_dict) 885 else: 886 err_msg_prefix = "decoder_" if self.is_decoder else "" --> 887 raise ValueError(f"You have to specify either {err_msg_prefix}inputs or {err_msg_prefix}inputs_embeds") 888 889 if inputs_embeds is None: ValueError: You have to specify either decoder_inputs or decoder_inputs_embeds