Closed ben-da6 closed 7 months ago
Looks like the model generated has an unsupported type. Can you attach the onnx model here? Meanwhile, let me see if I can repro. I'm getting a link error when running the python script. Can you post repro instructions? How was the .so created? What libs were linked, etc.? OSError: /home/pranav/libonnx-op-shared.so: undefined symbol: _ZTVN5torch8autograd12AutogradMetaE
This is my cmake run with
torch_path
via python3 -c 'import torch;print(torch.utils.cmake_prefix_path)'
cmake -DCMAKE_PREFIX_PATH=torch_path .
cmake_minimum_required(VERSION 3.18 FATAL_ERROR)
project(onnx-op)
find_package(Torch REQUIRED) include_directories("/persist/onnxruntime/include/") find_library(ONNXRUNTIME_LIBRARY onnxruntime HINTS "/persist/onnxruntime/lib/") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${TORCH_CXX_FLAGS}")
add_library(onnx-op-shared SHARED ort_op.cpp torch_op.cpp) target_link_libraries(onnx-op-shared "${TORCH_LIBRARIES}" "${ONNXRUNTIME_LIBRARY}")
set_property(TARGET onnx-op-shared PROPERTY CXX_STANDARD 17)
python venv
coloredlogs==15.0.1 custom-group-norm==0.0.0 filelock==3.13.1 flatbuffers==23.5.26 fsspec==2024.2.0 humanfriendly==10.0 Jinja2==3.1.3 MarkupSafe==2.1.5 mpmath==1.3.0 netron==7.5.0 networkx==3.2.1 numpy==1.26.4 onnx==1.15.0 onnxruntime==1.17.1 packaging==23.2 protobuf==4.25.3 sympy==1.12 torch==2.1.2+cu118 triton==2.1.0 typing_extensions==4.10.0
@ben-da6
the custom_op_one should be defined as static
void RegisterOps(Ort::CustomOpDomain& domain) {
static
std::unique_ptr
thanks!
Describe the issue
I am trying to follow these:
to create a library of custom operators which I can use from python as torch ops, and then load the custom ops in the runtime.
RuntimeError: tensor type -689693312 is not supported
(the number changes each time). Its not clear to me what I am doing wrong, as I think I am following the tutorial correctly and the types should be the same.To reproduce
class MyOp(torch.nn.Module): def forward(self, x: torch.Tensor) -> torch.Tensor: return torch.ops.my_namespace.pie_maker( x, )
def register(): def custom_normalization(g, x): output = g.op( "my_domain::Pie", x, ) output_type = x.type() output.setType(output_type) return output opset_version = 16 register_custom_op_symbolic("my_namespace::pie_maker", custom_normalization, opset_version)
def test_normalisation() -> None: model = MyOp() BATCH_SIZE = 1 NUM_FEATURES = 10 batch = torch.rand(BATCH_SIZE, NUM_FEATURES) torch.onnx.export( model, batch, "model.onnx", export_params=True, do_constant_folding=True, input_names=["input"], output_names=["output"], dynamic_axes={"input": {0: "batch_size"}, "output": {0: "batch_size"}}, opset_version=16, custom_opsets={"my_domain": 16}, )
if name == "main": torch.ops.load_library("libonnx-op-shared.so") register() test_normalisation()
REGISTERED!!!! EP Error tensor type 1489264000 is not supported when using ['CPUExecutionProvider'] Falling back to ['CPUExecutionProvider'] and retrying. Traceback (most recent call last): File "/persist/code/RQFluyt/RQ.Fluyt/.venv/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 419, in init self._create_inference_session(providers, provider_options, disabled_optimizers) File "/persist/code/RQFluyt/RQ.Fluyt/.venv/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 460, in _create_inference_session sess = C.InferenceSession(session_options, self._model_path, True, self._read_config_from_model) RuntimeError: tensor type 1489264000 is not supported
The above exception was the direct cause of the following exception:
Traceback (most recent call last): File "/persist/code/RQFluyt/RQ.Fluyt/Fluyt/onnx_ops/build/../my_op.py", line 66, in
test_normalisation()
File "/persist/code/RQFluyt/RQ.Fluyt/Fluyt/onnx_ops/build/../my_op.py", line 50, in test_normalisation
ort_sess = ort.InferenceSession(
File "/persist/code/RQFluyt/RQ.Fluyt/.venv/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 430, in init
raise fallback_error from e
File "/persist/code/RQFluyt/RQ.Fluyt/.venv/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 425, in init
self._create_inference_session(self._fallback_providers, None)
File "/persist/code/RQFluyt/RQ.Fluyt/.venv/lib/python3.10/site-packages/onnxruntime/capi/onnxruntime_inference_collection.py", line 460, in _create_inference_session
sess = C.InferenceSession(session_options, self._model_path, True, self._read_config_from_model)
RuntimeError: tensor type 1489264000 is not supported