Open IceHowe opened 2 years ago
(DAnA) zhanghao@dl-pc:~/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection$ python train.py --dataset coco_base --flip --net DAnA --lr 0.001 --lr_decay_step 12 --bs 4 --epochs 16 --disp_interval 20 --save_dir models/DAnA --way 2 --shot 3 Namespace(ascale=4, batch_size=4, cfg_file='cfgs/res50.yml', checkepoch=1, checkpoint=0, dataset='coco_base', disp_interval=20, dlog=False, eval=False, eval_dir=None, fewshot=False, imdb_name='coco_60_set1', imlog=False, large_scale=False, load_dir='models', lr=0.001, lr_decay_gamma=0.1, lr_decay_step=12, mGPUs=False, max_epochs=16, net='DAnA', num_workers=8, old_n_classes=81, optimizer='sgd', resume=False, save_dir='models/DAnA', set_cfgs=['ANCHOR_SCALES', '[4, 8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]', 'MAX_NUM_GT_BOXES', '50'], shot=3, start_epoch=1, sup_dir='all', use_flip=True, way=2) coco_60_set1 loading annotations into memory... Traceback (most recent call last): File "train.py", line 50, in imdb, roidb, ratio_list, ratio_index = combined_roidb(args.imdb_name) File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/roi_data_layer/roidb.py", line 121, in combined_roidb roidbs = [get_roidb(s) for s in imdb_names.split('+')] File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/roi_data_layer/roidb.py", line 121, in roidbs = [get_roidb(s) for s in imdb_names.split('+')] File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/roi_data_layer/roidb.py", line 114, in get_roidb imdb = get_imdb(imdb_name) File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/datasets/factory.py", line 97, in get_imdb return sets[name]() File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/datasets/factory.py", line 70, in sets[name] = (lambda split=split, year=year: coco_split(split, year)) File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/datasets/coco_split.py", line 38, in init self._COCO = COCO(self._get_ann_file()) File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/site-packages/pycocotools-2.0-py3.6-linux-x86_64.egg/pycocotools/coco.py", line 84, in init dataset = json.load(open(annotation_file, 'r')) File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/json/init.py", line 299, in load parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw) File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/json/init.py", line 354, in loads return _default_decoder.decode(s) File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/json/decoder.py", line 339, in decode obj, end = self.raw_decode(s, idx=_w(s, 0).end()) File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/json/decoder.py", line 355, in raw_decode obj, end = self.scan_once(s, idx) json.decoder.JSONDecodeError: Expecting ',' delimiter: line 1 column 56360961 (char 56360960)
I followed the github completely, but I got this error, could author or anyone help? thanks
(DAnA) zhanghao@dl-pc:~/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection$ python train.py --dataset coco_base --flip --net DAnA --lr 0.001 --lr_decay_step 12 --bs 4 --epochs 16 --disp_interval 20 --save_dir models/DAnA --way 2 --shot 3 Namespace(ascale=4, batch_size=4, cfg_file='cfgs/res50.yml', checkepoch=1, checkpoint=0, dataset='coco_base', disp_interval=20, dlog=False, eval=False, eval_dir=None, fewshot=False, imdb_name='coco_60_set1', imlog=False, large_scale=False, load_dir='models', lr=0.001, lr_decay_gamma=0.1, lr_decay_step=12, mGPUs=False, max_epochs=16, net='DAnA', num_workers=8, old_n_classes=81, optimizer='sgd', resume=False, save_dir='models/DAnA', set_cfgs=['ANCHOR_SCALES', '[4, 8, 16, 32]', 'ANCHOR_RATIOS', '[0.5,1,2]', 'MAX_NUM_GT_BOXES', '50'], shot=3, start_epoch=1, sup_dir='all', use_flip=True, way=2) coco_60_set1 loading annotations into memory... Traceback (most recent call last): File "train.py", line 50, in
imdb, roidb, ratio_list, ratio_index = combined_roidb(args.imdb_name)
File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/roi_data_layer/roidb.py", line 121, in combined_roidb
roidbs = [get_roidb(s) for s in imdb_names.split('+')]
File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/roi_data_layer/roidb.py", line 121, in
roidbs = [get_roidb(s) for s in imdb_names.split('+')]
File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/roi_data_layer/roidb.py", line 114, in get_roidb
imdb = get_imdb(imdb_name)
File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/datasets/factory.py", line 97, in get_imdb
return sets[name]()
File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/datasets/factory.py", line 70, in
sets[name] = (lambda split=split, year=year: coco_split(split, year))
File "/home/zhanghao/PycharmProjects/Dual-awareness-Attention-for-Few-shot-Object-Detection/lib/datasets/coco_split.py", line 38, in init
self._COCO = COCO(self._get_ann_file())
File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/site-packages/pycocotools-2.0-py3.6-linux-x86_64.egg/pycocotools/coco.py", line 84, in init
dataset = json.load(open(annotation_file, 'r'))
File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/json/init.py", line 299, in load
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/json/init.py", line 354, in loads
return _default_decoder.decode(s)
File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/json/decoder.py", line 339, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "/home/zhanghao/anaconda3/envs/DAnA/lib/python3.6/json/decoder.py", line 355, in raw_decode
obj, end = self.scan_once(s, idx)
json.decoder.JSONDecodeError: Expecting ',' delimiter: line 1 column 56360961 (char 56360960)
I followed the github completely, but I got this error, could author or anyone help? thanks