Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\torch\jit\mobile__init__.py", line 137, in _backport_for_mobile
return torch._C._backport_for_mobile(str(f_input), str(f_output), to_version)
RuntimeError: PytorchStreamReader failed reading zip archive: failed finding central directory
import torch
from torch.utils.mobile_optimizer import optimize_for_mobile
from torch.jit.mobile import (
_backport_for_mobile,
_get_model_bytecode_version,
)
model = torch.hub.load('pytorch/vision:v0.10.0', 'deeplabv3_mobilenet_v3_large', pretrained=True)
model.eval()
scripted_module = torch.jit.script(model)
# Export full jit version model (not compatible mobile interpreter), leave it here for comparison
# scripted_module._save_for_lite_interpreter("deeplabv3_scripted.pt")
# Export mobile interpreter version model (compatible with mobile interpreter)
optimized_scripted_module = optimize_for_mobile(scripted_module)
optimized_scripted_module._save_for_lite_interpreter("deeplabv3_scripted.ptl")
MODEL_INPUT_FILE = "deeplabv3_scripted.ptl"
MODEL_OUTPUT_FILE = "deeplabv5_scripted.ptl"
print("model version", _get_model_bytecode_version(f_input=MODEL_INPUT_FILE))
_backport_for_mobile(f_input=MODEL_INPUT_FILE, f_output=MODEL_OUTPUT_FILE, to_version=5)
print("new model version", _get_model_bytecode_version(MODEL_OUTPUT_FILE))
Packages\PythonSoftwareFoundation.Python.3.10_qbz5n2kfra8p0\LocalCache\local-packages\Python310\site-packages\torch\jit\mobile__init__.py", line 137, in _backport_for_mobile return torch._C._backport_for_mobile(str(f_input), str(f_output), to_version) RuntimeError: PytorchStreamReader failed reading zip archive: failed finding central directory