Open Bentonmaster opened 1 year ago
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
Cell In[3], line 20
18 ret, frame = cap.read()
19 frame = cv2.cvtColor(frame,cv2.COLOR_BGR2RGB)
---> 20 pred_mask = segtracker.seg(frame)
21 torch.cuda.empty_cache()
22 obj_ids = np.unique(pred_mask)
File ~/workspace/Segment-and-Track-Anything-main/SegTracker.py:45, in SegTracker.seg(self, frame)
38 '''
39 Arguments:
40 frame: numpy array (h,w,3)
41 Return:
42 origin_merged_mask: numpy array (h,w)
43 '''
44 frame = frame[:, :, ::-1]
---> 45 anns = self.sam.everything_generator.generate(frame)
47 # anns is a list recording all predictions in an image
48 if len(anns) == 0:
File ~/workspace/anaconda3/envs/track/lib/python3.9/site-packages/torch/utils/_contextlib.py:115, in context_decorator.<locals>.decorate_context(*args, **kwargs)
112 @functools.wraps(func)
113 def decorate_context(*args, **kwargs):
114 with ctx_factory():
--> 115 return func(*args, **kwargs)
File ~/workspace/Segment-and-Track-Anything-main/sam/segment_anything/automatic_mask_generator.py:163, in SamAutomaticMaskGenerator.generate(self, image)
138 """
139 Generates masks for the given image.
140
(...)
159 the mask, given in XYWH format.
160 """
162 # Generate masks
--> 163 mask_data = self._generate_masks(image)
165 # Filter small disconnected regions and holes in masks
166 if self.min_mask_region_area > 0:
File ~/workspace/Segment-and-Track-Anything-main/sam/segment_anything/automatic_mask_generator.py:212, in SamAutomaticMaskGenerator._generate_masks(self, image)
209 # Remove duplicate masks between crops
210 if len(crop_boxes) > 1:
211 # Prefer masks from smaller crops
--> 212 scores = 1 / box_area(data["crop_boxes"])
213 scores = scores.to(data["boxes"].device)
214 keep_by_nms = batched_nms(
215 data["boxes"].float(),
216 scores,
217 torch.zeros(len(data["boxes"])), # categories
218 iou_threshold=self.crop_nms_thresh,
219 )
File ~/workspace/anaconda3/envs/track/lib/python3.9/site-packages/torchvision/ops/boxes.py:235, in box_area(boxes)
233 _log_api_usage_once(box_area)
234 boxes = _upcast(boxes)
--> 235 return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1])
IndexError: too many indices for tensor of dimension 1
When I run demo. ipynb,The same error occurred
Is this a version issue? I didn't find a solution