使用的是phoenix-T数据集,为什么会找不到数据,是哪里的配置需要更改吗
完整报错:
Current dir exists, do you want to remove and refresh it?
y
Dir removed !
Loading model
[0, 1]
Loading model finished.
Loading data
train 7096
Apply training transform.
0%| | 0/3548 [00:00<?, ?it/s]
Traceback (most recent call last):
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/main.py", line 251, in
processor.start()
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/main.py", line 62, in start
seq_train(self.data_loader['train'], self.model, self.optimizer,
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/seq_scripts.py", line 20, in seq_train
for batch_idx, data in enumerate(tqdm(loader)):
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/tqdm/std.py", line 1178, in iter
for obj in iterable:
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/dataloader.py", line 633, in next
data = self._next_data()
^^^^^^^^^^^^^^^^^
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/dataloader.py", line 1345, in _next_data
return self._process_data(data)
^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/dataloader.py", line 1371, in _process_data
data.reraise()
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/_utils.py", line 644, in reraise
raise exception
IndexError: Caught IndexError in DataLoader worker process 0.
Original Traceback (most recent call last):
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/_utils/worker.py", line 308, in _worker_loop
data = fetcher.fetch(index)
^^^^^^^^^^^^^^^^^^^^
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/_utils/fetch.py", line 51, in fetch
data = [self.dataset[idx] for idx in possibly_batched_index]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/_utils/fetch.py", line 51, in
data = [self.dataset[idx] for idx in possibly_batched_index]
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/dataset/dataloader_video.py", line 50, in __getitem__
input_data, label = self.normalize(input_data, label)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/dataset/dataloader_video.py", line 87, in normalize
video, label = self.data_aug(video, label, file_id)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/utils/video_augmentation.py", line 24, in __call__
image = t(image)
^^^^^^^^
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/utils/video_augmentation.py", line 119, in __call__
if isinstance(clip[0], np.ndarray):
~~~~^^^
IndexError: list index out of range
使用的是phoenix-T数据集,为什么会找不到数据,是哪里的配置需要更改吗 完整报错: Current dir exists, do you want to remove and refresh it? y Dir removed ! Loading model [0, 1] Loading model finished. Loading data train 7096 Apply training transform.
train 7096 Apply testing transform.
dev 519 Apply testing transform.
test 642 Apply testing transform.
Loading data finished. .git does not exist in current dir [ Fri May 17 09:25:11 2024 ] Parameters: {'work_dir': './work_dir/baseline_res18/', 'config': './configs/baseline.yaml', 'random_fix': True, 'device': '0,1', 'phase': 'train', 'save_interval': 5, 'rando m_seed': 0, 'eval_interval': 1, 'print_log': True, 'log_interval': 10000, 'evaluate_tool': 'python', 'feeder': 'dataset.dataloader_video.BaseFeeder', 'dataset': 'phoenix2014-T', 'dataset_info': {'dataset_root': './dataset/phoenix2014-T', 'dict_path': './preprocess/phoenix2014-T/gloss_dict.npy', 'evaluation_dir': './evalu ation/slr_eval', 'evaluation_prefix': 'phoenix2014-T-groundtruth'}, 'num_worker': 10, 'feeder_args': {'mode': 'test', 'datatype': 'video', 'num_gloss': -1, 'drop _ratio': 1.0, 'frame_interval': 1, 'image_scale': 1.0, 'input_size': 224, 'prefix': './dataset/phoenix2014-T', 'transform_mode': False}, 'model': 'slr_network.SL RModel', 'model_args': {'num_classes': 1116, 'c2d_type': 'resnet18', 'conv_type': 2, 'use_bn': 1, 'share_classifier': True, 'weight_norm': True}, 'load_weights': None, 'load_checkpoints': None, 'decode_mode': 'beam', 'ignore_weights': [], 'batch_size': 2, 'test_batch_size': 2, 'loss_weights': {'SeqCTC': 1.0, 'ConvCTC': 1 .0, 'Dist': 25.0}, 'optimizer_args': {'optimizer': 'Adam', 'base_lr': 0.0001, 'step': [20, 35], 'learning_ratio': 1, 'weight_decay': 0.0001, 'start_epoch': 0, 'nesterov': False}, 'num_epoch': 40}
0%| | 0/3548 [00:00<?, ?it/s] Traceback (most recent call last): File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/main.py", line 251, in
processor.start()
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/main.py", line 62, in start
seq_train(self.data_loader['train'], self.model, self.optimizer,
File "/mnt/d/论文demo/CorrNet-main/CorrNet-main/seq_scripts.py", line 20, in seq_train
for batch_idx, data in enumerate(tqdm(loader)):
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/tqdm/std.py", line 1178, in iter
for obj in iterable:
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/dataloader.py", line 633, in next
data = self._next_data()
^^^^^^^^^^^^^^^^^
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/dataloader.py", line 1345, in _next_data
return self._process_data(data)
^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/dataloader.py", line 1371, in _process_data
data.reraise()
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/_utils.py", line 644, in reraise
raise exception
IndexError: Caught IndexError in DataLoader worker process 0.
Original Traceback (most recent call last):
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/_utils/worker.py", line 308, in _worker_loop
data = fetcher.fetch(index)
^^^^^^^^^^^^^^^^^^^^
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/_utils/fetch.py", line 51, in fetch
data = [self.dataset[idx] for idx in possibly_batched_index]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/home/congwenhui/anaconda3/lib/python3.11/site-packages/torch/utils/data/_utils/fetch.py", line 51, in
data = [self.dataset[idx] for idx in possibly_batched_index]