Got an error when calling convert_sparkml() function on MultilayerPerceptronClassificationModel.
Supposedly a minor bug in convert/sparkml/ops_input_output.py the key value of the model is missing. I've submitted my pull request of the solution.
Error logs:
/usr/local/lib/python3.7/dist-packages/aimodelshare/aimsonnx.py in _pyspark_to_onnx(model, initial_types, spark_session, transfer_learning, deep_learning, task_type)
360 # convert to onnx
361 onx = convert_sparkml(model, 'Pyspark model', initial_types,
--> 362 spark_session=spark_session)
363
364 # generate metadata dict
/usr/local/lib/python3.7/dist-packages/onnxmltools/convert/main.py in convert_sparkml(model, name, initial_types, doc_string, target_opset, targeted_onnx, custom_conversion_functions, custom_shape_calculators, spark_session)
164 from .sparkml.convert import convert
165 return convert(model, name, initial_types, doc_string, target_opset, targeted_onnx,
--> 166 custom_conversion_functions, custom_shape_calculators, spark_session)
167
168
/usr/local/lib/python3.7/dist-packages/onnxmltools/convert/sparkml/convert.py in convert(model, name, initial_types, doc_string, target_opset, targeted_onnx, custom_conversion_functions, custom_shape_calculators, spark_session)
63 target_opset = target_opset if target_opset else get_maximum_opset_supported()
64 # Parse spark-ml model as our internal data structure (i.e., Topology)
---> 65 topology = parse_sparkml(spark_session, model, initial_types, target_opset, custom_conversion_functions, custom_shape_calculators)
66
67 # Infer variable shapes
/usr/local/lib/python3.7/dist-packages/onnxmltools/convert/sparkml/_parse.py in parse_sparkml(spark, model, initial_types, target_opset, custom_conversion_functions, custom_shape_calculators)
117 # Parse the input spark-ml model as a Topology object.
118 output_dict = {}
--> 119 _parse_sparkml(spark, scope, model, inputs, output_dict)
120 outputs = []
121 for k, v in output_dict.items():
/usr/local/lib/python3.7/dist-packages/onnxmltools/convert/sparkml/_parse.py in _parse_sparkml(spark, scope, model, global_inputs, output_dict)
85 return _parse_sparkml_pipeline(spark, scope, model, global_inputs, output_dict)
86 else:
---> 87 return _parse_sparkml_simple_model(spark, scope, model, global_inputs, output_dict)
88
89
/usr/local/lib/python3.7/dist-packages/onnxmltools/convert/sparkml/_parse.py in _parse_sparkml_simple_model(spark, scope, model, global_inputs, output_dict)
48 this_operator = scope.declare_local_operator(get_sparkml_operator_name(type(model)), model)
49 this_operator.raw_params = {'SparkSession': spark}
---> 50 raw_input_names = get_input_names(model)
51 this_operator.inputs = [_get_variable_for_input(scope, x, global_inputs, output_dict) for x in raw_input_names]
52 raw_output_names = get_output_names(model)
/usr/local/lib/python3.7/dist-packages/onnxmltools/convert/sparkml/ops_input_output.py in get_input_names(model)
175 :return: list of input names
176 '''
--> 177 return io_name_map[get_sparkml_operator_name(type(model))][0](model)
178
179
KeyError: 'pyspark.ml.classification.MultilayerPerceptronClassificationModel'
Got an error when calling convert_sparkml() function on MultilayerPerceptronClassificationModel. Supposedly a minor bug in
convert/sparkml/ops_input_output.py
the key value of the model is missing. I've submitted my pull request of the solution.Error logs: