Closed BananaJuices1 closed 1 year ago
I have data in this format for list image_pairs = [(image1, image2),(image1, image 3)...] labels = [true, true, ...]
right now i am trying to create a binary file by:
import struct from PIL import Image
output_file = "output.bin"
with open(output_file, "wb") as bin_file: for i in range(len(image_pairs)): image_pair = image_pairs[i] label = labels[i]
# Open and load the images image_1 = Image.open(image_pair[0]) image_2 = Image.open(image_pair[1]) # Convert images to bytes image_1_bytes = image_1.tobytes() image_2_bytes = image_2.tobytes() # Write the length of image 1 and image 2 as 4-byte integers bin_file.write(struct.pack("i", len(image_1_bytes))) bin_file.write(struct.pack("i", len(image_2_bytes))) # Write the image bytes bin_file.write(image_1_bytes) bin_file.write(image_2_bytes) # Write the label as a 1-byte boolean bin_file.write(struct.pack("?", label))
but it is throwing me an error when i train
data_path = '/content/drive/MyDrive/data'
basic_model = GhostFaceNets.buildin_models("ghostnetv1", dropout=0, emb_shape=512, output_layer='GDC', bn_momentum=0.9, bn_epsilon=1e-5) basic_model = GhostFaceNets.add_l2_regularizer_2_model(basic_model, weight_decay=5e-4, apply_to_batch_normal=False) basic_model = GhostFaceNets.replace_ReLU_with_PReLU(basic_model)
optimizer = tfa.optimizers.AdamW(learning_rate=0.001, weight_decay=5e-5) eval_paths = ["/content/drive/MyDrive/eval_data.bin"]
Strides of 2
tt = train.Train(data_path, eval_paths=eval_paths, eval_freq = 1, save_path='ghostnetv1_w1.3_s2_epoch51.h5', basic_model=basic_model, model=None, lr_base=0.1, lr_decay=0.5, lr_decay_steps=45, lr_min=1e-5, batch_size=128, random_status=1, output_weight_decay=1)
sch = [ {"loss": losses.ArcfaceLoss(scale=32), "epoch": 1, "optimizer": optimizer}, {"loss": losses.ArcfaceLoss(scale=64), "epoch": 350}, ] tt.train(sch, 0)
UnpicklingError Traceback (most recent call last) /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 437 try: --> 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e:
UnpicklingError: invalid load key, '\x04'.
The above exception was the direct cause of the following exception:
UnpicklingError Traceback (most recent call last) 4 frames /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e: --> 440 raise pickle.UnpicklingError( 441 f"Failed to interpret file {file!r} as a pickle") from e 442
UnpicklingError: Failed to interpret file '/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.bin' as a pickleUnpicklingError Traceback (most recent call last) /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 437 try: --> 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e:
UnpicklingError: invalid load key, '\x04'.
The above exception was the direct cause of the following exception:
UnpicklingError Traceback (most recent call last) 4 frames /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e: --> 440 raise pickle.UnpicklingError( 441 f"Failed to interpret file {file!r} as a pickle") from e 442
UnpicklingError: Failed to interpret file '/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.bin' as a pickle
when i try to convert to a .pkl file: import pickle output_file = "eval_data.pkl"
data = { 'image_pairs': image_pairs, 'labels': labels }
with open(output_file, 'wb') as file: pickle.dump(data, file)
and use that instead i run into the error of ValueError Traceback (most recent call last) in <cell line: 11>() 9 eval_paths = ["/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.pkl"] 10 # Strides of 2 ---> 11 tt = train.Train(data_path, eval_paths=eval_paths, eval_freq = 1, 12 save_path='/content/drive/MyDrive/GhostFaceNets/try1/ghostnetv1_w1.3_s2_epoch51.h5', 13 basic_model="/content/drive/MyDrive/GhostFaceNets/model/GN_W0.5_S2_ArcFace_epoch16.h5", model=None, lr_base=0.1, lr_decay=0.5, lr_decay_steps=45, lr_min=1e-5,
9 frames /usr/local/lib/python3.10/dist-packages/tensorflow/python/framework/tensor_spec.py in _unbatch(self) 364 def _unbatch(self): 365 if self._shape.ndims == 0: --> 366 raise ValueError("Unbatching a tensor is only supported for rank >= 1") 367 return TensorSpec(self._shape[1:], self._dtype) 368
ValueError: Unbatching a tensor is only supported for rank >= 1
Any help is appreciated, thank you!
Hi, Sorry for the late response, I will check this soon.
I have data in this format for list image_pairs = [(image1, image2),(image1, image 3)...] labels = [true, true, ...]
right now i am trying to create a binary file by:
import struct from PIL import Image
output_file = "output.bin"
with open(output_file, "wb") as bin_file: for i in range(len(image_pairs)): image_pair = image_pairs[i] label = labels[i]
# Open and load the images image_1 = Image.open(image_pair[0]) image_2 = Image.open(image_pair[1]) # Convert images to bytes image_1_bytes = image_1.tobytes() image_2_bytes = image_2.tobytes() # Write the length of image 1 and image 2 as 4-byte integers bin_file.write(struct.pack("i", len(image_1_bytes))) bin_file.write(struct.pack("i", len(image_2_bytes))) # Write the image bytes bin_file.write(image_1_bytes) bin_file.write(image_2_bytes) # Write the label as a 1-byte boolean bin_file.write(struct.pack("?", label))
but it is throwing me an error when i train
data_path = '/content/drive/MyDrive/data'
basic_model = GhostFaceNets.buildin_models("ghostnetv1", dropout=0, emb_shape=512, output_layer='GDC', bn_momentum=0.9, bn_epsilon=1e-5) basic_model = GhostFaceNets.add_l2_regularizer_2_model(basic_model, weight_decay=5e-4, apply_to_batch_normal=False) basic_model = GhostFaceNets.replace_ReLU_with_PReLU(basic_model)
optimizer = tfa.optimizers.AdamW(learning_rate=0.001, weight_decay=5e-5) eval_paths = ["/content/drive/MyDrive/eval_data.bin"]
Strides of 2
tt = train.Train(data_path, eval_paths=eval_paths, eval_freq = 1, save_path='ghostnetv1_w1.3_s2_epoch51.h5', basic_model=basic_model, model=None, lr_base=0.1, lr_decay=0.5, lr_decay_steps=45, lr_min=1e-5, batch_size=128, random_status=1, output_weight_decay=1)
sch = [ {"loss": losses.ArcfaceLoss(scale=32), "epoch": 1, "optimizer": optimizer}, {"loss": losses.ArcfaceLoss(scale=64), "epoch": 350}, ] tt.train(sch, 0)
UnpicklingError Traceback (most recent call last) /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 437 try: --> 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e:
UnpicklingError: invalid load key, '\x04'.
The above exception was the direct cause of the following exception:
UnpicklingError Traceback (most recent call last) 4 frames /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e: --> 440 raise pickle.UnpicklingError( 441 f"Failed to interpret file {file!r} as a pickle") from e 442
UnpicklingError: Failed to interpret file '/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.bin' as a pickleUnpicklingError Traceback (most recent call last) /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 437 try: --> 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e:
UnpicklingError: invalid load key, '\x04'.
The above exception was the direct cause of the following exception:
UnpicklingError Traceback (most recent call last) 4 frames /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e: --> 440 raise pickle.UnpicklingError( 441 f"Failed to interpret file {file!r} as a pickle") from e 442
UnpicklingError: Failed to interpret file '/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.bin' as a pickle
when i try to convert to a .pkl file: import pickle output_file = "eval_data.pkl"
data = { 'image_pairs': image_pairs, 'labels': labels }
with open(output_file, 'wb') as file: pickle.dump(data, file)
and use that instead i run into the error of ValueError Traceback (most recent call last) in <cell line: 11>() 9 eval_paths = ["/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.pkl"] 10 # Strides of 2 ---> 11 tt = train.Train(data_path, eval_paths=eval_paths, eval_freq = 1, 12 save_path='/content/drive/MyDrive/GhostFaceNets/try1/ghostnetv1_w1.3_s2_epoch51.h5', 13 basic_model="/content/drive/MyDrive/GhostFaceNets/model/GN_W0.5_S2_ArcFace_epoch16.h5", model=None, lr_base=0.1, lr_decay=0.5, lr_decay_steps=45, lr_min=1e-5,
9 frames /usr/local/lib/python3.10/dist-packages/tensorflow/python/framework/tensor_spec.py in _unbatch(self) 364 def _unbatch(self): 365 if self._shape.ndims == 0: --> 366 raise ValueError("Unbatching a tensor is only supported for rank >= 1") 367 return TensorSpec(self._shape[1:], self._dtype) 368
ValueError: Unbatching a tensor is only supported for rank >= 1
Any help is appreciated, thank you!
Hi, Did you try the below?
https://github.com/leondgarse/Keras_insightface/discussions/71
I have data in this format for list image_pairs = [(image1, image2),(image1, image 3)...] labels = [true, true, ...]
right now i am trying to create a binary file by:
import struct from PIL import Image
output_file = "output.bin"
with open(output_file, "wb") as bin_file: for i in range(len(image_pairs)): image_pair = image_pairs[i] label = labels[i]
# Open and load the images image_1 = Image.open(image_pair[0]) image_2 = Image.open(image_pair[1]) # Convert images to bytes image_1_bytes = image_1.tobytes() image_2_bytes = image_2.tobytes() # Write the length of image 1 and image 2 as 4-byte integers bin_file.write(struct.pack("i", len(image_1_bytes))) bin_file.write(struct.pack("i", len(image_2_bytes))) # Write the image bytes bin_file.write(image_1_bytes) bin_file.write(image_2_bytes) # Write the label as a 1-byte boolean bin_file.write(struct.pack("?", label))
but it is throwing me an error when i train
data_path = '/content/drive/MyDrive/data'
basic_model = GhostFaceNets.buildin_models("ghostnetv1", dropout=0, emb_shape=512, output_layer='GDC', bn_momentum=0.9, bn_epsilon=1e-5) basic_model = GhostFaceNets.add_l2_regularizer_2_model(basic_model, weight_decay=5e-4, apply_to_batch_normal=False) basic_model = GhostFaceNets.replace_ReLU_with_PReLU(basic_model)
optimizer = tfa.optimizers.AdamW(learning_rate=0.001, weight_decay=5e-5) eval_paths = ["/content/drive/MyDrive/eval_data.bin"]
Strides of 2
tt = train.Train(data_path, eval_paths=eval_paths, eval_freq = 1, save_path='ghostnetv1_w1.3_s2_epoch51.h5', basic_model=basic_model, model=None, lr_base=0.1, lr_decay=0.5, lr_decay_steps=45, lr_min=1e-5, batch_size=128, random_status=1, output_weight_decay=1)
sch = [ {"loss": losses.ArcfaceLoss(scale=32), "epoch": 1, "optimizer": optimizer}, {"loss": losses.ArcfaceLoss(scale=64), "epoch": 350}, ] tt.train(sch, 0)
UnpicklingError Traceback (most recent call last) /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 437 try: --> 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e:
UnpicklingError: invalid load key, '\x04'.
The above exception was the direct cause of the following exception:
UnpicklingError Traceback (most recent call last) 4 frames /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e: --> 440 raise pickle.UnpicklingError( 441 f"Failed to interpret file {file!r} as a pickle") from e 442
UnpicklingError: Failed to interpret file '/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.bin' as a pickleUnpicklingError Traceback (most recent call last) /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 437 try: --> 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e:
UnpicklingError: invalid load key, '\x04'.
The above exception was the direct cause of the following exception:
UnpicklingError Traceback (most recent call last) 4 frames /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e: --> 440 raise pickle.UnpicklingError( 441 f"Failed to interpret file {file!r} as a pickle") from e 442
UnpicklingError: Failed to interpret file '/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.bin' as a pickle
when i try to convert to a .pkl file: import pickle output_file = "eval_data.pkl"
data = { 'image_pairs': image_pairs, 'labels': labels }
with open(output_file, 'wb') as file: pickle.dump(data, file)
and use that instead i run into the error of ValueError Traceback (most recent call last) in <cell line: 11>() 9 eval_paths = ["/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.pkl"] 10 # Strides of 2 ---> 11 tt = train.Train(data_path, eval_paths=eval_paths, eval_freq = 1, 12 save_path='/content/drive/MyDrive/GhostFaceNets/try1/ghostnetv1_w1.3_s2_epoch51.h5', 13 basic_model="/content/drive/MyDrive/GhostFaceNets/model/GN_W0.5_S2_ArcFace_epoch16.h5", model=None, lr_base=0.1, lr_decay=0.5, lr_decay_steps=45, lr_min=1e-5,
9 frames /usr/local/lib/python3.10/dist-packages/tensorflow/python/framework/tensor_spec.py in _unbatch(self) 364 def _unbatch(self): 365 if self._shape.ndims == 0: --> 366 raise ValueError("Unbatching a tensor is only supported for rank >= 1") 367 return TensorSpec(self._shape[1:], self._dtype) 368
ValueError: Unbatching a tensor is only supported for rank >= 1
Any help is appreciated, thank you!
Check this too https://github.com/leondgarse/Keras_insightface/issues/39
I have data in this format for list image_pairs = [(image1, image2),(image1, image 3)...] labels = [true, true, ...]
right now i am trying to create a binary file by:
import struct from PIL import Image
output_file = "output.bin"
with open(output_file, "wb") as bin_file: for i in range(len(image_pairs)): image_pair = image_pairs[i] label = labels[i]
but it is throwing me an error when i train
data_path = '/content/drive/MyDrive/data'
basic_model = GhostFaceNets.buildin_models("ghostnetv1", dropout=0, emb_shape=512, output_layer='GDC', bn_momentum=0.9, bn_epsilon=1e-5) basic_model = GhostFaceNets.add_l2_regularizer_2_model(basic_model, weight_decay=5e-4, apply_to_batch_normal=False) basic_model = GhostFaceNets.replace_ReLU_with_PReLU(basic_model)
optimizer = tfa.optimizers.AdamW(learning_rate=0.001, weight_decay=5e-5) eval_paths = ["/content/drive/MyDrive/eval_data.bin"]
Strides of 2
tt = train.Train(data_path, eval_paths=eval_paths, eval_freq = 1, save_path='ghostnetv1_w1.3_s2_epoch51.h5', basic_model=basic_model, model=None, lr_base=0.1, lr_decay=0.5, lr_decay_steps=45, lr_min=1e-5, batch_size=128, random_status=1, output_weight_decay=1)
sch = [ {"loss": losses.ArcfaceLoss(scale=32), "epoch": 1, "optimizer": optimizer}, {"loss": losses.ArcfaceLoss(scale=64), "epoch": 350}, ] tt.train(sch, 0)
UnpicklingError Traceback (most recent call last) /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 437 try: --> 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e:
UnpicklingError: invalid load key, '\x04'.
The above exception was the direct cause of the following exception:
UnpicklingError Traceback (most recent call last) 4 frames /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e: --> 440 raise pickle.UnpicklingError( 441 f"Failed to interpret file {file!r} as a pickle") from e 442
UnpicklingError: Failed to interpret file '/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.bin' as a pickleUnpicklingError Traceback (most recent call last) /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 437 try: --> 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e:
UnpicklingError: invalid load key, '\x04'.
The above exception was the direct cause of the following exception:
UnpicklingError Traceback (most recent call last) 4 frames /usr/local/lib/python3.10/dist-packages/numpy/lib/npyio.py in load(file, mmap_mode, allow_pickle, fix_imports, encoding) 438 return pickle.load(fid, **pickle_kwargs) 439 except Exception as e: --> 440 raise pickle.UnpicklingError( 441 f"Failed to interpret file {file!r} as a pickle") from e 442
UnpicklingError: Failed to interpret file '/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.bin' as a pickle
when i try to convert to a .pkl file: import pickle output_file = "eval_data.pkl"
data = { 'image_pairs': image_pairs, 'labels': labels }
with open(output_file, 'wb') as file: pickle.dump(data, file)
and use that instead i run into the error of ValueError Traceback (most recent call last) in <cell line: 11>()
9 eval_paths = ["/content/drive/MyDrive/IMAGINE_LOSING_TIL23/eval_data.pkl"]
10 # Strides of 2
---> 11 tt = train.Train(data_path, eval_paths=eval_paths, eval_freq = 1,
12 save_path='/content/drive/MyDrive/GhostFaceNets/try1/ghostnetv1_w1.3_s2_epoch51.h5',
13 basic_model="/content/drive/MyDrive/GhostFaceNets/model/GN_W0.5_S2_ArcFace_epoch16.h5", model=None, lr_base=0.1, lr_decay=0.5, lr_decay_steps=45, lr_min=1e-5,
9 frames /usr/local/lib/python3.10/dist-packages/tensorflow/python/framework/tensor_spec.py in _unbatch(self) 364 def _unbatch(self): 365 if self._shape.ndims == 0: --> 366 raise ValueError("Unbatching a tensor is only supported for rank >= 1") 367 return TensorSpec(self._shape[1:], self._dtype) 368
ValueError: Unbatching a tensor is only supported for rank >= 1
Any help is appreciated, thank you!