Open junaid3d opened 8 months ago
It was working properly but now after a few updates, I'm getting this error:
`Error occurred when executing InsightFaceLoader_Zho:
[ONNXRuntimeError] : 7 : INVALID_PROTOBUF : Load model from D:\AI\ComfyUI\comfyui\git\custom_nodes\ComfyUI-InstantID\models\antelopev2\1k3d68.onnx failed:Protobuf parsing failed.
File "D:\AI\ComfyUI\comfyui\git\execution.py", line 152, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "D:\AI\ComfyUI\comfyui\git\execution.py", line 82, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "D:\AI\ComfyUI\comfyui\git\execution.py", line 75, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) File "D:\AI\ComfyUI\comfyui\git\custom_nodes\ComfyUI-InstantID\InstantIDNode.py", line 71, in load_insight_face_antelopev2 model = FaceAnalysis(name="antelopev2", root=current_directory, providers=[provider + 'ExecutionProvider',]) File "D:\AI\ComfyUI\comfyui\lib\site-packages\insightface\app\face_analysis.py", line 31, in init model = model_zoo.get_model(onnx_file, kwargs) File "D:\AI\ComfyUI\comfyui\lib\site-packages\insightface\model_zoo\model_zoo.py", line 96, in get_model model = router.get_model(providers=providers, provider_options=provider_options) File "D:\AI\ComfyUI\comfyui\lib\site-packages\insightface\model_zoo\model_zoo.py", line 40, in get_model session = PickableInferenceSession(self.onnx_file, kwargs) File "D:\AI\ComfyUI\comfyui\lib\site-packages\insightface\model_zoo\model_zoo.py", line 25, in init super().init(model_path, kwargs) File "D:\AI\ComfyUI\comfyui\lib\site-packages\onnxruntime\capi\onnxruntime_inference_collection.py", line 419, in init self._create_inference_session(providers, provider_options, disabled_optimizers) File "D:\AI\ComfyUI\comfyui\lib\site-packages\onnxruntime\capi\onnxruntime_inference_collection.py", line 452, in _create_inference_session sess = C.InferenceSession(session_options, self._model_path, True, self._read_config_from_model)`
you need download https://huggingface.co/DIAMONIK7777/antelopev2/tree/main files replace ComfyUI\custom_nodes\ComfyUI-InstantID\models\antelopev2
It was working properly but now after a few updates, I'm getting this error:
`Error occurred when executing InsightFaceLoader_Zho:
[ONNXRuntimeError] : 7 : INVALID_PROTOBUF : Load model from D:\AI\ComfyUI\comfyui\git\custom_nodes\ComfyUI-InstantID\models\antelopev2\1k3d68.onnx failed:Protobuf parsing failed.
File "D:\AI\ComfyUI\comfyui\git\execution.py", line 152, in recursive_execute output_data, output_ui = get_output_data(obj, input_data_all) File "D:\AI\ComfyUI\comfyui\git\execution.py", line 82, in get_output_data return_values = map_node_over_list(obj, input_data_all, obj.FUNCTION, allow_interrupt=True) File "D:\AI\ComfyUI\comfyui\git\execution.py", line 75, in map_node_over_list results.append(getattr(obj, func)(slice_dict(input_data_all, i))) File "D:\AI\ComfyUI\comfyui\git\custom_nodes\ComfyUI-InstantID\InstantIDNode.py", line 71, in load_insight_face_antelopev2 model = FaceAnalysis(name="antelopev2", root=current_directory, providers=[provider + 'ExecutionProvider',]) File "D:\AI\ComfyUI\comfyui\lib\site-packages\insightface\app\face_analysis.py", line 31, in init model = model_zoo.get_model(onnx_file, kwargs) File "D:\AI\ComfyUI\comfyui\lib\site-packages\insightface\model_zoo\model_zoo.py", line 96, in get_model model = router.get_model(providers=providers, provider_options=provider_options) File "D:\AI\ComfyUI\comfyui\lib\site-packages\insightface\model_zoo\model_zoo.py", line 40, in get_model session = PickableInferenceSession(self.onnx_file, kwargs) File "D:\AI\ComfyUI\comfyui\lib\site-packages\insightface\model_zoo\model_zoo.py", line 25, in init super().init(model_path, kwargs) File "D:\AI\ComfyUI\comfyui\lib\site-packages\onnxruntime\capi\onnxruntime_inference_collection.py", line 419, in init self._create_inference_session(providers, provider_options, disabled_optimizers) File "D:\AI\ComfyUI\comfyui\lib\site-packages\onnxruntime\capi\onnxruntime_inference_collection.py", line 452, in _create_inference_session sess = C.InferenceSession(session_options, self._model_path, True, self._read_config_from_model)`