Open kusumlata123 opened 2 years ago
I am having this code to call Bert model and getting error: File "/home/dr/Desktop/dali-md-master/biaffine_md.py", line 26, in add_model_specific_valuables self.bert_tokenizer = self.load_bert_vocab() File "/home/dr/Desktop/dali-md-master/biaffine_md.py", line 53, in load_bert_vocab vocab_info = bert_model(signature="tokenization_info",as_dict=True) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/saved_model/load.py", line 438, in _call_attribute return instance.call(*args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 449, in call self._initialize(args, kwds, add_initializers_to=initializer_map) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 392, in _initialize *args, *kwds)) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 1847, in _get_concrete_function_internal_garbage_collected graphfunction, , _ = self._maybe_define_function(args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 2147, in _maybe_define_function graph_function = self._create_graph_function(args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 2038, in _create_graph_function capture_by_value=self._capture_by_value), File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/framework/func_graph.py", line 915, in func_graph_from_py_func func_outputs = python_func(func_args, func_kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 335, in wrapped_fn return weak_wrapped_fn().wrapped(*args, *kwds) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/saved_model/function_deserialization.py", line 262, in restored_function_body "\n\n".join(signature_descriptions))) ValueError: Could not find matching function to call loaded from the SavedModel. Got: Positional arguments (2 total): False * None Keyword arguments: {'as_dict': True, 'signature': 'tokenization_info'}
Expected these arguments to match one of the following 4 option(s):
Option 1: Positional arguments (3 total): {u'input_word_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_word_ids'), u'input_mask': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_mask'), u'input_type_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_type_ids')} False * None Keyword arguments: {}
Option 2: Positional arguments (3 total): {u'input_word_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_word_ids'), u'input_mask': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_mask'), u'input_type_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_type_ids')} False * None Keyword arguments: {}
Option 3: Positional arguments (3 total): {u'input_word_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_word_ids'), u'input_mask': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_mask'), u'input_type_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_type_ids')} True * None Keyword arguments: {}
Option 4: Positional arguments (3 total): {u'input_word_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_word_ids'), u'input_mask': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_mask'), u'input_type_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_type_ids')} True * None Keyword arguments: {} below is code:
def load_bert_vocab(self): with tf.Graph().as_default(): # bert_model = hub.Module(self.bert_url) bert_model = hub.load(self.bert_url) vocab_info = bert_model(signature="tokenization_info",as_dict=True)
with tf.Session() as sess: vocab_file, do_lower_case = sess.run([vocab_info["vocab_file"],vocab_info["do_lower_case"]]) return bert_tokenization.FullTokenizer(vocab_file=vocab_file,do_lower_case=do_lower_case)
is any find solution wht it is happening?
I am having this code to call Bert model and getting error: File "/home/dr/Desktop/dali-md-master/biaffine_md.py", line 26, in add_model_specific_valuables self.bert_tokenizer = self.load_bert_vocab() File "/home/dr/Desktop/dali-md-master/biaffine_md.py", line 53, in load_bert_vocab vocab_info = bert_model(signature="tokenization_info",as_dict=True) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/saved_model/load.py", line 438, in _call_attribute return instance.call(*args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 449, in call self._initialize(args, kwds, add_initializers_to=initializer_map) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 392, in _initialize *args, *kwds)) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 1847, in _get_concrete_function_internal_garbage_collected graphfunction, , _ = self._maybe_define_function(args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 2147, in _maybe_define_function graph_function = self._create_graph_function(args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 2038, in _create_graph_function capture_by_value=self._capture_by_value), File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/framework/func_graph.py", line 915, in func_graph_from_py_func func_outputs = python_func(func_args, func_kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 335, in wrapped_fn return weak_wrapped_fn().wrapped(*args, *kwds) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/saved_model/function_deserialization.py", line 262, in restored_function_body "\n\n".join(signature_descriptions))) ValueError: Could not find matching function to call loaded from the SavedModel. Got: Positional arguments (2 total): False None Keyword arguments: {'as_dict': True, 'signature': 'tokenization_info'} Expected these arguments to match one of the following 4 option(s): Option 1: Positional arguments (3 total): {u'input_word_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_word_ids'), u'input_mask': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_mask'), u'input_type_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_type_ids')} False None Keyword arguments: {} Option 2: Positional arguments (3 total): {u'input_word_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_word_ids'), u'input_mask': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_mask'), u'input_type_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_type_ids')} False None Keyword arguments: {} Option 3: Positional arguments (3 total): {u'input_word_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_word_ids'), u'input_mask': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_mask'), u'input_type_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'inputs/input_type_ids')} True None Keyword arguments: {} Option 4: Positional arguments (3 total): {u'input_word_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_word_ids'), u'input_mask': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_mask'), u'input_type_ids': TensorSpec(shape=(?, ?), dtype=tf.int32, name=u'input_type_ids')} True * None Keyword arguments: {} below is code: def load_bert_vocab(self): with tf.Graph().as_default(): # bert_model = hub.Module(self.bert_url) bert_model = hub.load(self.bert_url) vocab_info = bert_model(signature="tokenization_info",as_dict=True)
with tf.Session() as sess: vocab_file, do_lower_case = sess.run([vocab_info["vocab_file"],vocab_info["do_lower_case"]]) return bert_tokenization.FullTokenizer(vocab_file=vocab_file,do_lower_case=do_lower_case)
Why did you send pic. need solution of this error
I am having this code to call Bert model and getting error: File "/home/dr/Desktop/dali-md-master/biaffine_md.py", line 26, in add_model_specific_valuables self.bert_tokenizer = self.load_bert_vocab() File "/home/dr/Desktop/dali-md-master/biaffine_md.py", line 53, in load_bert_vocab vocab_info = bert_model(signature="tokenization_info",as_dict=True) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/saved_model/load.py", line 438, in _call_attribute return instance.call(*args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 449, in call self._initialize(args, kwds, add_initializers_to=initializer_map) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 392, in _initialize *args, *kwds)) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 1847, in _get_concrete_function_internal_garbage_collected graphfunction, , _ = self._maybe_define_function(args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 2147, in _maybe_define_function graph_function = self._create_graph_function(args, kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/function.py", line 2038, in _create_graph_function capture_by_value=self._capture_by_value), File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/framework/func_graph.py", line 915, in func_graph_from_py_func func_outputs = python_func(func_args, func_kwargs) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/eager/def_function.py", line 335, in wrapped_fn return weak_wrapped_fn().wrapped(*args, **kwds) File "/home/dr/anaconda3/envs/hcoref/lib/python2.7/site-packages/tensorflow_core/python/saved_model/function_deserialization.py", line 262, in restored_function_body "\n\n".join(signature_descriptions))) ValueError: Could not find matching function to call loaded from the SavedModel. Got: Positional arguments (2 total):
Expected these arguments to match one of the following 4 option(s):
Option 1: Positional arguments (3 total):
Option 2: Positional arguments (3 total):
Option 3: Positional arguments (3 total):
Option 4: Positional arguments (3 total):
def load_bert_vocab(self): with tf.Graph().as_default():
bert_model = hub.Module(self.bert_url)