You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
0%| | 0/3548 [00:00<?, ?it/s]
Traceback (most recent call last):
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/main.py", line 301, in
processor.start()
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/main.py", line 67, in start
seq_train(self.data_loader['train'], self.model, self.optimizer,
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/seq_scripts.py", line 21, in seq_train
for batch_idx, data in enumerate(tqdm(loader)):
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/tqdm/std.py", line 1181, in iter
for obj in iterable:
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/dataloader.py", line 708, in next
data = self._next_data()
^^^^^^^^^^^^^^^^^
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/dataloader.py", line 1480, in _next_data
return self._process_data(data)
^^^^^^^^^^^^^^^^^^^^^^^^
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/dataloader.py", line 1505, in _process_data
data.reraise()
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/_utils.py", line 733, in reraise
raise exception
IndexError: Caught IndexError in DataLoader worker process 0.
Original Traceback (most recent call last):
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/_utils/worker.py", line 349, in _worker_loop
data = fetcher.fetch(index) # type: ignore[possibly-undefined]
^^^^^^^^^^^^^^^^^^^^
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/_utils/fetch.py", line 52, in fetch
data = [self.dataset[idx] for idx in possibly_batched_index]
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/dataset/dataloader_video.py", line 53, in getitem
input_data, label = self.normalize(input_data, label)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/dataset/dataloader_video.py", line 90, in normalize
video, label = self.data_aug(video, label, file_id)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/utils/video_augmentation.py", line 24, in call
image = t(image)
^^^^^^^^
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/utils/video_augmentation.py", line 120, in call
if isinstance(clip[0], np.ndarray):
~~~~^^^
IndexError: list index out of range
The text was updated successfully, but these errors were encountered:
If you get an error like IndexError: list index out of range on the PHOENIX2014-T dataset, you may first check if you have correctly linked the dataset path to the real path, or refer to this issue to tackle the problem.
Loading model
Loading model finished.
Loading data
train 7096
Apply training transform.
dev 519
Apply testing transform.
test 642
Apply testing transform.
Loading data finished.
.git does not exist in current dir
[ Sat Feb 22 13:32:49 2025 ] Parameters:
{'work_dir': './work_dir/baseline/', 'config': './configs/baseline.yaml', 'random_fix': True, 'device': '0', 'phase': 'train', 'save_interval': 5, 'random_seed': 0, 'eval_interval': 1, 'print_log': True, 'log_interval': 10000, 'evaluate_tool': 'sclite', 'feeder': 'dataset.dataloader_video.BaseFeeder', 'dataset': 'phoenix2014-T', 'dataset_info': {'dataset_root': './dataset/phoenix2014-T', 'dict_path': './preprocess/phoenix2014-T/gloss_dict.npy', 'evaluation_dir': './evaluation/slr_eval', 'evaluation_prefix': 'phoenix2014-T-groundtruth'}, 'num_worker': 10, 'feeder_args': {'mode': 'test', 'datatype': 'video', 'num_gloss': -1, 'drop_ratio': 1.0, 'frame_interval': 1, 'image_scale': 1.0, 'input_size': 224, 'prefix': './dataset/phoenix2014-T', 'transform_mode': False}, 'model': 'slr_network.SLRModel', 'model_args': {'num_classes': 1116, 'c2d_type': 'resnet18', 'conv_type': 2, 'use_bn': 1, 'share_classifier': True, 'weight_norm': True}, 'load_weights': None, 'load_checkpoints': None, 'decode_mode': 'beam', 'ignore_weights': [], 'batch_size': 2, 'test_batch_size': 2, 'loss_weights': {'SeqCTC': 1.0, 'ConvCTC': 1.0, 'Dist': 25.0, 'Cu': 0.0005, 'Cp': 0.0005}, 'optimizer_args': {'optimizer': 'Adam', 'base_lr': 0.0001, 'step': [40, 60], 'learning_ratio': 1, 'weight_decay': 0.0001, 'start_epoch': 0, 'nesterov': False}, 'num_epoch': 80}
0%| | 0/3548 [00:00<?, ?it/s]
Traceback (most recent call last):
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/main.py", line 301, in
processor.start()
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/main.py", line 67, in start
seq_train(self.data_loader['train'], self.model, self.optimizer,
File "/media/mohan/mohan/CorrNet_Plus/CorrNet_Plus_CSLR/seq_scripts.py", line 21, in seq_train
for batch_idx, data in enumerate(tqdm(loader)):
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/tqdm/std.py", line 1181, in iter
for obj in iterable:
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/dataloader.py", line 708, in next
data = self._next_data()
^^^^^^^^^^^^^^^^^
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/dataloader.py", line 1480, in _next_data
return self._process_data(data)
^^^^^^^^^^^^^^^^^^^^^^^^
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/dataloader.py", line 1505, in _process_data
data.reraise()
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/_utils.py", line 733, in reraise
raise exception
IndexError: Caught IndexError in DataLoader worker process 0.
Original Traceback (most recent call last):
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/_utils/worker.py", line 349, in _worker_loop
data = fetcher.fetch(index) # type: ignore[possibly-undefined]
^^^^^^^^^^^^^^^^^^^^
File "/media/mohan/mohan/anaconda3/envs/CorrNet_Plus/lib/python3.12/site-packages/torch/utils/data/_utils/fetch.py", line 52, in fetch
data = [self.dataset[idx] for idx in possibly_batched_index]
The text was updated successfully, but these errors were encountered: