Open tommy19970714 opened 3 years ago
@tommy19970714 - thanks for the succinct example. I can reproduce this issue. Here is the stack trace:
Converting Frontend ==> MIL Ops: 52%|βββββββββββββββββββββββββββββββββββββ | 12/23 [00:00<00:00, 6845.05 ops/s]
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-1-0dc5b90d7ed5> in <module>
19 coreml_model = ct.convert(
20 trace_model,
---> 21 inputs=[ct.TensorType(name="x", shape=input.shape, dtype=types.float)]
22 )
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/_converters_entry.py in convert(model, source, inputs, outputs, classifier_config, minimum_deployment_target, convert_to, **kwargs)
180 outputs=outputs,
181 classifier_config=classifier_config,
--> 182 **kwargs
183 )
184
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/converter.py in mil_convert(model, convert_from, convert_to, **kwargs)
127 """
128 proto = mil_convert_to_proto(model, convert_from, convert_to,
--> 129 ConverterRegistry, **kwargs)
130 if convert_to == 'mil':
131 return proto
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/converter.py in mil_convert_to_proto(model, convert_from, convert_to, converter_registry, **kwargs)
169 frontend_converter = frontend_converter_type()
170
--> 171 prog = frontend_converter(model, **kwargs)
172 common_pass(prog)
173
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/converter.py in __call__(self, *args, **kwargs)
83 from .frontend.torch import load
84
---> 85 return load(*args, **kwargs)
86
87
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/frontend/torch/load.py in load(model_spec, debug, **kwargs)
81 raise e
82 except Exception as e:
---> 83 raise e
84
85 return prog
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/frontend/torch/load.py in load(model_spec, debug, **kwargs)
71
72 try:
---> 73 prog = converter.convert()
74 except RuntimeError as e:
75 if debug and "convert function" in str(e):
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/frontend/torch/converter.py in convert(self)
225
226 # Add the rest of the operations
--> 227 convert_nodes(self.context, self.graph)
228
229 graph_outputs = [self.context[name] for name in self.graph.outputs]
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/frontend/torch/ops.py in convert_nodes(context, graph)
56 )
57 else:
---> 58 _add_op(context, node)
59
60 # We've generated all the outputs the graph needs, terminate conversion.
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/frontend/torch/ops.py in view(context, node)
854 shape = mb.concat(values=shape, axis=0)
855
--> 856 view = mb.reshape(x=x, shape=shape, name=node.name)
857 context.add(view)
858
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/mil/ops/registry.py in add_op(cls, **kwargs)
60 @classmethod
61 def add_op(cls, **kwargs):
---> 62 return cls._add_op(op_cls, **kwargs)
63
64 setattr(Builder, op_type, add_op)
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/mil/builder.py in _add_op(cls, op_cls, **kwargs)
172 op_name=kwargs["name"], before_op=before_op,
173 candidate_kv=kwargs))
--> 174 new_op = op_cls(**kwargs)
175
176 # Initialize optional input Vars if it wasn't in kwargs
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/mil/ops/defs/tensor_transformation.py in __init__(self, **kwargs)
190
191 def __init__(self, **kwargs):
--> 192 super(reshape, self).__init__(**kwargs)
193
194 def type_inference(self):
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/mil/operation.py in __init__(self, **kwargs)
168 input_kv = {k: v for k, v in kwargs.items() \
169 if k in self._input_types and v is not None}
--> 170 self._validate_and_set_inputs(input_kv)
171 self._ensure_required_inputs()
172
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/mil/operation.py in _validate_and_set_inputs(self, input_kvs, no_check_var_types)
453 v_old.remove_child_op(op, no_check_var_types)
454
--> 455 self.input_spec.validate_inputs(self.name, self.op_type, input_kvs)
456
457 for name, var in input_kvs.items():
~/opt/miniconda3/envs/test/lib/python3.7/site-packages/coremltools/converters/mil/mil/input_type.py in validate_inputs(self, op_name, op_type, candidate_kvs)
119 "{} but got {}"
120 raise ValueError(msg.format(name, var.name, input_type.type_str,
--> 121 var.sym_type.__type_info__()))
122
123
ValueError: Op "13" (op_type: reshape) Input shape="12" expects integer tensor but got tensor[0,fp32]
This still does not work in coremltools 6.0 but there is a new error message and stacktrace:
Cell In [1], line 19
17 import coremltools as ct
18 from coremltools.converters.mil.mil import types
---> 19 coreml_model = ct.convert(
20 trace_model,
21 inputs=[ct.TensorType(name="x", shape=input.shape, dtype=types.float)]
22 )
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/_converters_entry.py:451, in convert(model, source, inputs, outputs, classifier_config, minimum_deployment_target, convert_to, compute_precision, skip_model_load, compute_units, package_dir, debug)
448 if specification_version is None:
449 specification_version = _set_default_specification_version(exact_target)
--> 451 mlmodel = mil_convert(
452 model,
453 convert_from=exact_source,
454 convert_to=exact_target,
455 inputs=inputs,
456 outputs=outputs_as_tensor_or_image_types, # None or list[ct.ImageType/ct.TensorType]
457 classifier_config=classifier_config,
458 transforms=tuple(transforms),
459 skip_model_load=skip_model_load,
460 compute_units=compute_units,
461 package_dir=package_dir,
462 debug=debug,
463 specification_version=specification_version,
464 )
466 if exact_target == 'milinternal':
467 return mlmodel # Returns the MIL program
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/converter.py:193, in mil_convert(model, convert_from, convert_to, compute_units, **kwargs)
154 @_profile
155 def mil_convert(
156 model,
(...)
160 **kwargs
161 ):
162 """
163 Convert model from a specified frontend `convert_from` to a specified
164 converter backend `convert_to`.
(...)
191 See `coremltools.converters.convert`
192 """
--> 193 return _mil_convert(model, convert_from, convert_to, ConverterRegistry, MLModel, compute_units, **kwargs)
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/converter.py:220, in _mil_convert(model, convert_from, convert_to, registry, modelClass, compute_units, **kwargs)
217 # To make sure everyone can read and write to this directory (on par with os.mkdir())
218 _os.chmod(weights_dir, _stat.S_IRWXU | _stat.S_IRWXG | _stat.S_IRWXO)
--> 220 proto, mil_program = mil_convert_to_proto(
221 model,
222 convert_from,
223 convert_to,
224 registry,
225 **kwargs
226 )
228 _reset_conversion_state()
230 if convert_to == 'milinternal':
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/converter.py:285, in mil_convert_to_proto(model, convert_from, convert_to, converter_registry, **kwargs)
282 kwargs.setdefault("convert_to", convert_to)
283 frontend_converter = frontend_converter_type()
--> 285 prog = frontend_converter(model, **kwargs)
287 if convert_to.lower() != "neuralnetwork":
288 passes = kwargs.get("transforms", list())
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/converter.py:115, in TorchFrontend.__call__(self, *args, **kwargs)
112 def __call__(self, *args, **kwargs):
113 from .frontend.torch import load
--> 115 return load(*args, **kwargs)
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/frontend/torch/load.py:53, in load(model_spec, debug, **kwargs)
51 opset_version = kwargs["specification_version"]
52 converter = TorchConverter(torchscript, inputs, outputs, cut_at_symbols, opset_version)
---> 53 return _perform_torch_convert(converter, debug)
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/frontend/torch/load.py:100, in _perform_torch_convert(converter, debug)
98 print("the following model ops are MISSING:")
99 print("\n".join([" " + str(x) for x in sorted(missing)]))
--> 100 raise e
102 return prog
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/frontend/torch/load.py:92, in _perform_torch_convert(converter, debug)
90 def _perform_torch_convert(converter, debug):
91 try:
---> 92 prog = converter.convert()
93 except RuntimeError as e:
94 if debug and "convert function" in str(e):
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/frontend/torch/converter.py:299, in TorchConverter.convert(self)
297 if self.outputs is not None:
298 prog.set_main_output_types(self.outputs)
--> 299 self.torch_passes(prog)
300 return prog
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/frontend/torch/ssa_passes/torch_passes.py:25, in torch_passes(prog)
23 for p in passes:
24 logging.info('Performing passes for torch frontend: "{}"'.format(p))
---> 25 PASS_REGISTRY[p](prog)
26 prog.validate()
28 logging.debug("Program after torch frontend passes:\n{}".format(prog))
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/mil/passes/graph_pass.py:13, in AbstractGraphPass.__call__(self, prog)
11 def __call__(self, prog):
12 if not prog.skip_all_passes:
---> 13 self.apply(prog)
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/frontend/torch/ssa_passes/torch_tensor_assign_to_core.py:38, in torch_tensor_assign_to_core.apply(self, prog)
36 def apply(self, prog):
37 for f in prog.functions.values():
---> 38 _torch_tensor_assign_to_core_block(f)
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/mil/passes/helper.py:41, in block_context_manager.<locals>.wrapper(*args)
39 raise ValueError("The function decorated with block_context_manager must have a Block type argument as the first input.")
40 with args[0]:
---> 41 return func(*args)
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/frontend/torch/ssa_passes/torch_tensor_assign_to_core.py:47, in _torch_tensor_assign_to_core_block(block)
44 _torch_tensor_assign_to_core_block(b)
46 if op.op_type in ["torch_tensor_assign"]:
---> 47 _transform_tensor_assign(op, block)
File ~/miniconda3/envs/prod/lib/python3.9/site-packages/coremltools/converters/mil/frontend/torch/ssa_passes/torch_tensor_assign_to_core.py:63, in _transform_tensor_assign(op, block)
60 raise NotImplementedError("Only tensor assignment with stride 1 is supported.")
62 if sum(squeeze_mask) != 1:
---> 63 raise NotImplementedError("Only tensor assignment with exactly 1 pure dimension selection is supported")
65 for i in range(len(squeeze_mask)):
66 if not squeeze_mask[i]:
NotImplementedError: Only tensor assignment with exactly 1 pure dimension selection is supported
πDescribe the bug
I got the following error when I tried to convert the coreml model from the pytorch model. If you run
x[:, :] = 0
aftertorch.cat
, the coreml conversion will fail.Trace
To Reproduce
Here is a colab to reproduce. https://colab.research.google.com/drive/1eR3HLQh5zdPQzg9rMFt641NH7h7T85bE?usp=sharing
System environment (please complete the following information):
Additional context
I wrote the following, and it worked fine.