Closed Rayman96 closed 1 year ago
paddle用的是什么版本?
paddle用的是什么版本?
用的2.4的paddle
paddle用的是什么版本?
尝试了2.3和2.4的paddle都会报同样的错,再老版本的paddle需要适配老版本的paddleslim,里边就没有AutoCompression的功能了
应该是模型产出使用的paddle比较旧了,还没mkldnn这个属性,可以在https://github.com/PaddlePaddle/PaddleSlim/blob/develop/paddleslim/common/recover_program.py#L78
加上一行:
op._set_attr("use_mkldnn", False)
应该是模型产出使用的paddle比较旧了,还没mkldnn这个属性,可以在https://github.com/PaddlePaddle/PaddleSlim/blob/develop/paddleslim/common/recover_program.py#L78 加上一行:
op._set_attr("use_mkldnn", False)
添加这个后不再报没有属性的错误了,但是AutoCompression的过程中会出现这样的问题,辛苦看下这里应该怎么解决呢?原本的模型进行inference是没问题的。
`RuntimeError: In user code:
File "/home/public/liuyiding01/python_env/Python-2.7.8/lib/python2.7/site-packages/paddle/fluid/framework.py", line 2610, in append_op
attrs=kwargs.get("attrs", None))
File "/home/public/liuyiding01/python_env/Python-2.7.8/lib/python2.7/site-packages/paddle/fluid/layer_helper.py", line 43, in append_op
return self.main_program.current_block().append_op(*args, **kwargs)
File "/home/public/liuyiding01/python_env/Python-2.7.8/lib/python2.7/site-packages/paddle/fluid/layers/nn.py", line 6449, in matmul
attrs=attrs)
File "/home/yangxiaodi/remove_site_word/baidu/personal-code/sat-ernie-rank/models/model.py", line 106, in _build_model
transpose_y=True)
File "/home/yangxiaodi/remove_site_word/baidu/personal-code/sat-ernie-rank/models/model.py", line 72, in __init__
self._build_model(src_ids, position_ids, sentence_ids, input_mask)
File "/home/yangxiaodi/remove_site_word/baidu/personal-code/sat-ernie-rank/models/multi_field_model_two_flows.py", line 97, in create_multi_field_two_flows
config=bert_config)
File "/home/yangxiaodi/remove_site_word/baidu/personal-code/sat-ernie-rank/trainer/eval.py", line 46, in train
mode=mode)
File "/home/yangxiaodi/remove_site_word/baidu/personal-code/sat-ernie-rank/trainer/base.py", line 92, in __init__
self.train()
File "finetune.py", line 55, in <module>
tn(rd, md, conf)
NotFoundError: Operator (matmul) does not have kernel for {data_type[int64_t]; data_layout[Undefined(AnyLayout)]; place[Place(gpu:0)]; library_type[PLAIN]}.
[Hint: Expected kernel_iter != kernels.end(), but received kernel_iter == kernels.end().] (at /paddle/paddle/fluid/framework/operator.cc:2019)
[operator < matmul > error]`
已解决
`RuntimeError Traceback (most recent call last) /tmp/ipykernel_9478/3276394805.py in
9 train_dataloader=val_data_loader,
10 eval_dataloader=val_data_loader)
---> 11 ac.compress()
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddleslim-0.0.0.dev0-py3.7.egg/paddleslim/auto_compression/compressor.py in compress(self) 594 ) in enumerate(zip(self._strategy, self._config, self.train_config)): 595 self.single_strategy_compress(strategy, config, strategy_idx, --> 596 train_config) 597 598 if strategy == 'ptq_hpo' and config.max_quant_count == 1 and platform.system(
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddleslim-0.0.0.dev0-py3.7.egg/paddleslim/auto_compression/compressor.py in single_strategy_compress(self, strategy, config, strategy_idx, train_config) 771 train_program_info, test_program_info = self._prepare_program( 772 inference_program, feed_target_names, fetch_targets, patterns, --> 773 strategy, config, train_config) 774 if 'unstructure' in strategy: 775 test_program_info.program._program = remove_unused_var_nodes(
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddleslim-0.0.0.dev0-py3.7.egg/paddleslim/auto_compression/compressor.py in _prepare_program(self, program, feed_target_names, fetch_targets, patterns, strategy, config, train_config) 500 self._pruner, train_program_info = build_prune_program( 501 self._exe, self._places, config_dict, train_program_info, --> 502 strategy, patterns, self.eval_dataloader) 503 504 if train_config.use_fleet:
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddleslim-0.0.0.dev0-py3.7.egg/paddleslim/auto_compression/create_compressed_program.py in build_prune_program(executor, place, config, train_program_info, strategy, patterns, eval_dataloader) 521 width_mult=(1.0 - config['pruned_ratio']), 522 dataloader=eval_dataloader, --> 523 fetch_targets=train_program_info.fetch_targets) 524 pruned_program = pruner.prune() 525 train_program_info.program = pruned_program
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddleslim-0.0.0.dev0-py3.7.egg/paddleslim/auto_compression/transformer_pruner.py in init(self, exe, places, inference_program, patterns, label_info, width_mult, fetch_targets, dataloader) 284 _logger.info("start to reorder weight in program") 285 self.scope = self.reorder(inference_program, self.scope, patterns, --> 286 layer_num, head_num, mha_weight, ffn_weight) 287 288 def _preprocess_patterns(self, patterns, graph):
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddleslim-0.0.0.dev0-py3.7.egg/paddleslim/auto_compression/transformer_pruner.py in reorder(self, inference_program, scope, patterns, layer_num, head_num, mha_weight, ffn_weight) 491 compute_program, head_importance, neuron_importance = self.compute_importance( 492 self.exe, compute_program, patterns, ffn_weight, layer_num, --> 493 head_num, self.label_info, self.fetch_targets, self.dataloader) 494 495 ############################### REORDER ##################################
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddleslim-0.0.0.dev0-py3.7.egg/paddleslim/auto_compression/transformer_pruner.py in compute_importance(self, exe, program, patterns, ffn_weight, layer_num, head_num, label_info, fetch_targets, dataloader) 366 Compute head importance according gradients of head_mask""" 367 program = self._program_add_mask(program, patterns, layer_num, head_num, --> 368 label_info, fetch_targets) 369 370 ### define importance matrix
/opt/conda/envs/python35-paddle120-env/lib/python3.7/site-packages/paddleslim-0.0.0.dev0-py3.7.egg/paddleslim/auto_compression/transformer_pruner.py in _program_add_mask(self, program, patterns, layer_num, head_num, label_info, fetch_targets) 357 358 program._sync_with_cpp() --> 359 paddle.static.append_backward(loss) 360 program._sync_with_cpp() 361 return program