Binary_229_Unary_104_Binary_228_Unary_103_Conv2D_71_Conv2D_69_Conv2D_70_Binary_231_Binary_230_Conv2D
Unhandled exception. System.AggregateException: One or more errors occurred. (Value cannot be null. (Parameter 'key'))
---> System.ArgumentNullException: Value cannot be null. (Parameter 'key')
at System.Collections.Generic.Dictionary`2.TryInsert(TKey key, TValue value, InsertionBehavior behavior)
at System.Linq.Enumerable.ToDictionary[TSource,TKey,TElement](IEnumerable`1 source, Func`2 keySelector, Func`2 elementSelector, IEqualityComparer`1 comparer)
at Nncase.Passes.Rules.ShapeBucket.ShapeBucketHelper.MakeVarValuesForAllSegment(ShapeBucketOptions options, Int32 segmentCount, Boolean staticShape)
at Nncase.Passes.Rules.ShapeBucket.RecordFusionShape.RunCoreAsync(BaseFunction main, RunPassContext context)
at Nncase.Passes.Pass`2.RunAsync(TInput input, RunPassContext context)
at Nncase.Passes.PassManager.FunctionPassGroup.Runner.RunAsync()
at Nncase.Passes.PassManager.FunctionPassGroup.RunAsync(IRModule module)
at Nncase.Passes.PassManager.RunAsync(IRModule module)
at Nncase.Compiler.Compiler.RunPassAsync(Action`1 register, String name, IProgress`1 progress, CancellationToken token)
at Nncase.Compiler.Compiler.CompileAsync(IProgress`1 progress, CancellationToken token)
--- End of inner exception stack trace ---
at System.Threading.Tasks.Task.Wait(Int32 millisecondsTimeout, CancellationToken cancellationToken)
at Nncase.Compiler.Interop.CApi.CompilerCompile(IntPtr compilerHandle)
转换代码:
import os
import shutil
import nncase
import numpy as np
import onnx
import onnxsim
def generate_data_encoder(data_dir, input_shapes, data_count):
data = [[]]
for i in range(data_count):
x_batch = np.fromfile(os.path.join(data_dir, 'X_{}.bin'.format(i)), dtype='int64').reshape(input_shapes[0])
data[0].append(x_batch)
return data
def parse_model_input_output(model_file, input_shapes_):
onnx_model = onnx.load(model_file)
input_all = [node.name for node in onnx_model.graph.input]
input_initializer = [node.name for node in onnx_model.graph.initializer]
input_names = list(set(input_all) - set(input_initializer))
input_tensors = [
node for node in onnx_model.graph.input if node.name in input_names]
# input
inputs = []
for i, e in enumerate(input_tensors):
onnx_type = e.type.tensor_type
input_dict = {
'name': e.name,
'dtype': onnx.helper.tensor_dtype_to_np_dtype(onnx_type.elem_type),
'shape': input_shapes_[i]
}
inputs.append(input_dict)
return onnx_model, inputs
def onnx_simplify(model_file, dump_dir, input_shapes_):
onnx_model, inputs = parse_model_input_output(model_file, input_shapes_)
onnx_model = onnx.shape_inference.infer_shapes(onnx_model)
input_shapes = {}
for input in inputs:
input_shapes[input['name']] = input['shape']
onnx_model, check = onnxsim.simplify(onnx_model, overwrite_input_shapes=input_shapes, )
print(onnx.helper.printable_graph(onnx_model.graph))
assert check, "Simplified ONNX model could not be validated"
model_file = os.path.join(dump_dir, 'simplified.onnx')
onnx.save_model(onnx_model, model_file)
return model_file
def read_model_file(model_file):
with open(model_file, 'rb') as f:
model_content = f.read()
return model_content
def encoder_tokmodel(onnx_model_path, kmodel_path, data_dir, ptq_option, input_shapes, data_count, tmp_path,
target='k230'):
if not os.path.exists(tmp_path):
os.makedirs(tmp_path)
# onnx simplify
model_file = onnx_simplify(onnx_model_path, tmp_path, input_shapes)
# compile_options
compile_options = nncase.CompileOptions()
compile_options.target = target
compile_options.preprocess = False
compile_options.dump_ir = True
compile_options.dump_asm = True
compile_options.dump_dir = tmp_path
compile_options.shape_bucket_enable = True
compile_options.shape_bucket_range_info = {"seq_len": [1, 64]}
compile_options.shape_bucket_segments_count = 64
compile_options.shape_bucket_fix_var_map = {"batch_size": 1}
# compiler
compiler = nncase.Compiler(compile_options)
# import
model_content = read_model_file(model_file)
import_options = nncase.ImportOptions()
compiler.import_onnx(model_content, import_options)
# ptq_options
ptq_options = nncase.PTQTensorOptions()
ptq_options.samples_count = data_count
if ptq_option == 0:
pass
elif ptq_option == 1:
ptq_options.calibrate_method = 'NoClip'
ptq_options.w_quant_type = 'int16'
elif ptq_option == 2:
ptq_options.calibrate_method = 'NoClip'
ptq_options.quant_type = 'int16'
elif ptq_option == 3:
ptq_options.w_quant_type = 'int16'
elif ptq_option == 4:
ptq_options.quant_type = 'int16'
ptq_options.set_tensor_data(generate_data_encoder(data_dir, input_shapes, data_count))
compiler.use_ptq(ptq_options)
# compile
compiler.compile()
# model
kmodel = compiler.gencode_tobytes()
with open(kmodel_path, 'wb') as f:
f.write(kmodel)
if os.path.exists(tmp_path):
shutil.rmtree(tmp_path)
if __name__ == "__main__":
encoder_tokmodel(onnx_model_path="onnx/example.onnx",
kmodel_path="onnx/example.kmodel",
data_dir="generate_data",
ptq_option=0,
input_shapes=[[1, 64]],
data_count=30,
tmp_path='./tmp')
转换一个Transformer 模型,采用 ShapeBucket时候失败,不采用ShapeBucket可以正常转换。 错误为: nncase 2.4/2.9版本结果均一致
转换代码:
运行日志