kuijiang94 / MSPFN

Multi-Scale Progressive Fusion Network for Single Image Deraining
162 stars 32 forks source link

Several issues related to protobuff and "Tensorflow" incompatibility issues #29

Open Vijay-Krishna-Engineer opened 8 months ago

Vijay-Krishna-Engineer commented 8 months ago

Traceback (most recent call last): File "C:\Users\batci\Downloads\2024\MSPFN-master\MSPFN-master\model\test\test_MSPFN.py", line 7, in import tensorflow as tf File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow__init__.py", line 37, in from tensorflow.python.tools import module_util as _module_util File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python__init.py", line 42, in from tensorflow.python import data File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\data\init.py", line 21, in from tensorflow.python.data import experimental File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\data\experimental\init.py", line 96, in from tensorflow.python.data.experimental import service File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\data\experimental\service\init__.py", line 419, in from tensorflow.python.data.experimental.ops.data_service_ops import distribute File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\data\experimental\ops\data_service_ops.py", line 24, in from tensorflow.python.data.experimental.ops import compression_ops File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\data\experimental\ops\compression_ops.py", line 16, in from tensorflow.python.data.util import structure File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\data\util\structure.py", line 23, in from tensorflow.python.data.util import nest File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\data\util\nest.py", line 36, in from tensorflow.python.framework import sparse_tensor as _sparse_tensor File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\framework\sparse_tensor.py", line 24, in from tensorflow.python.framework import constant_op File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\framework\constant_op.py", line 25, in from tensorflow.python.eager import execute File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\eager\execute.py", line 23, in from tensorflow.python.framework import dtypes File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\framework\dtypes.py", line 42, in class DType( File "C:\Users\batci\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\framework\dtypes.py", line 202, in DType def experimental_type_proto(cls) -> Type[types_pb2.SerializedDType]: AttributeError: module 'tensorflow.core.framework.types_pb2' has no attribute 'SerializedDType' !pip install --upgrade protobuf Requirement already satisfied: protobuf in c:\users\batci\anaconda3\envs\tfgpu\lib\site-packages (3.18.0) Collecting protobuf Using cached protobuf-4.25.2-cp39-cp39-win_amd64.whl.metadata (541 bytes) Using cached protobuf-4.25.2-cp39-cp39-win_amd64.whl (413 kB) Installing collected packages: protobuf Attempting uninstall: protobuf Found existing installation: protobuf 3.18.0 Uninstalling protobuf-3.18.0: Successfully uninstalled protobuf-3.18.0 Successfully installed protobuf-4.25.2 ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. tensorflow 2.10.1 requires keras<2.11,>=2.10.0, but you have keras 2.8.0 which is incompatible. tensorflow 2.10.1 requires protobuf<3.20,>=3.9.2, but you have protobuf 4.25.2 which is incompatible. tensorflow 2.10.1 requires tensorboard<2.11,>=2.10, but you have tensorboard 2.8.0 which is incompatible. tensorflow 2.10.1 requires tensorflow-estimator<2.11,>=2.10.0, but you have tensorflow-estimator 2.8.0 which is incompatible. 9 !pip install protobuf==3.19.0 Collecting protobuf==3.19.0 Downloading protobuf-3.19.0-cp39-cp39-win_amd64.whl (895 kB) ---------------------------------------- 0.0/895.7 kB ? eta -:--:--

File ~\anaconda3\envs\tfgpu\lib\site-packages\tensorflow__init__.py:37 34 import sys as _sys 35 import typing as _typing ---> 37 from tensorflow.python.tools import module_util as _module_util 38 from tensorflow.python.util.lazy_loader import LazyLoader as _LazyLoader 40 # Make sure code inside the TensorFlow codebase can use tf2.enabled() at import.

File ~\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python__init__.py:37 29 # We aim to keep this file minimal and ideally remove completely. 30 # If you are adding a new file with @tf_export decorators, 31 # import it in modules_with_exports.py instead. 32 33 # go/tf-wildcard-import 34 # pylint: disable=wildcard-import,g-bad-import-order,g-import-not-at-top 36 from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow ---> 37 from tensorflow.python.eager import context 39 # pylint: enable=wildcard-import 40 41 # Bring in subpackages. 42 from tensorflow.python import data

File ~\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\python\eager\context.py:29 26 import numpy as np 27 import six ---> 29 from tensorflow.core.framework import function_pb2 30 from tensorflow.core.protobuf import config_pb2 31 from tensorflow.core.protobuf import coordination_config_pb2

File ~\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\core\framework\function_pb2.py:16 11 # @@protoc_insertion_point(imports) 13 _sym_db = _symbol_database.Default() ---> 16 from tensorflow.core.framework import attr_value_pb2 as tensorflow_dot_core_dot_framework_dot_attrvaluepb2 17 from tensorflow.core.framework import node_def_pb2 as tensorflow_dot_core_dot_framework_dot_nodedefpb2 18 from tensorflow.core.framework import op_def_pb2 as tensorflow_dot_core_dot_framework_dot_opdefpb2

File ~\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\core\framework\attr_value_pb2.py:16 11 # @@protoc_insertion_point(imports) 13 _sym_db = _symbol_database.Default() ---> 16 from tensorflow.core.framework import tensor_pb2 as tensorflow_dot_core_dot_framework_dot_tensorpb2 17 from tensorflow.core.framework import tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensorshape__pb2 18 from tensorflow.core.framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2

File ~\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\core\framework\tensor_pb2.py:16 11 # @@protoc_insertion_point(imports) 13 _sym_db = _symbol_database.Default() ---> 16 from tensorflow.core.framework import resource_handle_pb2 as tensorflow_dot_core_dot_framework_dot_resourcehandlepb2 17 from tensorflow.core.framework import tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensorshapepb2 18 from tensorflow.core.framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_types__pb2

File ~\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\core\framework\resource_handle_pb2.py:16 11 # @@protoc_insertion_point(imports) 13 _sym_db = _symbol_database.Default() ---> 16 from tensorflow.core.framework import tensor_shape_pb2 as tensorflow_dot_core_dot_framework_dot_tensorshapepb2 17 from tensorflow.core.framework import types_pb2 as tensorflow_dot_core_dot_framework_dot_typespb2 20 DESCRIPTOR = _descriptor.FileDescriptor( 21 name='tensorflow/core/framework/resource_handle.proto', 22 package='tensorflow', (...) 26 , 27 dependencies=[tensorflow_dot_core_dot_framework_dot_tensorshapepb2.DESCRIPTOR,tensorflow_dot_core_dot_framework_dot_typespb2.DESCRIPTOR,])

File ~\anaconda3\envs\tfgpu\lib\site-packages\tensorflow\core\framework\tensor_shape_pb2.py:36 13 _sym_db = _symbol_database.Default() 18 DESCRIPTOR = _descriptor.FileDescriptor( 19 name='tensorflow/core/framework/tensor_shape.proto', 20 package='tensorflow', (...) 23 serialized_pb=_b('\n,tensorflow/core/framework/tensor_shape.proto\x12\ntensorflow\"z\n\x10TensorShapeProto\x12-\n\x03\x64im\x18\x02 \x03(\x0b\x32 .tensorflow.TensorShapeProto.Dim\x12\x14\n\x0cunknown_rank\x18\x03 \x01(\x08\x1a!\n\x03\x44im\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\tB\x87\x01\n\x18org.tensorflow.frameworkB\x11TensorShapeProtosP\x01ZSgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_shape_go_proto\xf8\x01\x01\x62\x06proto3') 24 ) 29 _TENSORSHAPEPROTO_DIM = _descriptor.Descriptor( 30 name='Dim', 31 full_name='tensorflow.TensorShapeProto.Dim', 32 filename=None, 33 file=DESCRIPTOR, 34 containing_type=None, 35 fields=[ ---> 36 _descriptor.FieldDescriptor( 37 name='size', full_name='tensorflow.TensorShapeProto.Dim.size', index=0, 38 number=1, type=3, cpp_type=2, label=1, 39 has_default_value=False, default_value=0, 40 message_type=None, enum_type=None, containing_type=None, 41 is_extension=False, extension_scope=None, 42 serialized_options=None, file=DESCRIPTOR), 43 _descriptor.FieldDescriptor( 44 name='name', full_name='tensorflow.TensorShapeProto.Dim.name', index=1, 45 number=2, type=9, cpp_type=9, label=1, 46 has_default_value=False, default_value=_b("").decode('utf-8'), 47 message_type=None, enum_type=None, containing_type=None, 48 is_extension=False, extension_scope=None, 49 serialized_options=None, file=DESCRIPTOR), 50 ], 51 extensions=[ 52 ], 53 nested_types=[], 54 enum_types=[ 55 ], 56 serialized_options=None, 57 is_extendable=False, 58 syntax='proto3', 59 extension_ranges=[], 60 oneofs=[ 61 ], 62 serialized_start=149, 63 serialized_end=182, 64 ) 66 _TENSORSHAPEPROTO = _descriptor.Descriptor( 67 name='TensorShapeProto', 68 full_name='tensorflow.TensorShapeProto', (...) 100 serialized_end=182, 101 ) 103 _TENSORSHAPEPROTO_DIM.containing_type = _TENSORSHAPEPROTO

File ~\anaconda3\envs\tfgpu\lib\site-packages\google\protobuf\descriptor.py:553, in FieldDescriptor.new(cls, name, full_name, index, number, type, cpp_type, label, default_value, message_type, enum_type, containing_type, is_extension, extension_scope, options, serialized_options, has_default_value, containing_oneof, json_name, file, create_key) 547 def new(cls, name, full_name, index, number, type, cpp_type, label, 548 default_value, message_type, enum_type, containing_type, 549 is_extension, extension_scope, options=None, 550 serialized_options=None, 551 has_default_value=True, containing_oneof=None, json_name=None, 552 file=None, create_key=None): # pylint: disable=redefined-builtin --> 553 _message.Message._CheckCalledFromGeneratedFile() 554 if is_extension: 555 return _message.default_pool.FindExtensionByName(full_name)

TypeError: Descriptors cannot be created directly. If this call came from a _pb2.py file, your generated code is out of date and must be regenerated with protoc >= 3.19.0. If you cannot immediately regenerate your protos, some other possible workarounds are:

  1. Downgrade the protobuf package to 3.20.x or lower.
  2. Set PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python (but this will use pure-Python parsing and will be much slower).

More information: https://developers.google.com/protocol-buffers/docs/news/2022-05-06#python-updates