Open thecondofitz opened 6 years ago
Do you solve this problem?
Did you get any answer to this problem? I am also facing same issue
I managed it using cv2.findContours to calculate pixel area of individual Mask, here my code:
# iterate over all objects found
for i in range(n_instances):
if not np.any(boxes[i]):
continue
y1, x1, y2, x2 = boxes[i]
label = names[ids[i]]
color = class_dict[label]
score = scores[i] if scores is not None else None
caption = '{} {:.2f}'.format(label, score) if score else label
mask = masks[:, :, i]
mask = mask.astype(np.uint8)*255 #convert mask into 0,255 format
contours, hierarchy = cv2.findContours(mask, cv2.RETR_LIST, cv2.CHAIN_APPROX_SIMPLE)[-2:] #find contour to calculate pixel area
#contour,_ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) #find contour to calculate pixel area
contour_area = cv2.contourArea(contours[0]) # ergot area in pixels
print("Ergot area {} :".format(i),contour_area)
image = apply_mask(image, mask, color)
image = cv2.rectangle(image, (x1, y1), (x2, y2), color, 2)
image = cv2.putText(image, caption, (x1, y1), cv2.FONT_HERSHEY_COMPLEX, 0.7, color, 2)
#cv2_imshow( image)
return image
masked_image = display_instances(img, r['rois'], r['masks'], r['class_ids'], dataset_train.class_names, r['scores'])
I'm finally done training on my own dataset and i'd like to extract the features of each mask for thousands of images. I thought of re-purposing the code from the CrowdAI mapping challenge notebook and adding a line to it, but it calculates the area of all the masks in one image. Essentially, all class_ids per image show the same area.
here's the line that i'm adding:
_mask["area"] = int(np.reshape(r['masks'], (-1, r['masks'].shape[-1])).astype(np.float32).sum())
to this code:
for files in tqdm.tqdm(ALL_FILES): images = [skimage.io.imread(x) for x in files] predoctions = model.detect(images, verbose=0) for _idx, r in enumerate(predoctions): _file = files[_idx] image_id = os.path.basename(_file) for _idx, class_id in enumerate(r["class_ids"]): if class_id == 1: mask = r["masks"].astype(np.uint8)[:, :, _idx] bbox = np.around(r["rois"][_idx], 1) bbox = [float(x) for x in bbox] _result = {} _result["image_id"] = image_id _result["category_id"] = 1 _result["score"] = float(r["scores"][_idx]) _mask = maskUtils.encode(np.asfortranarray(mask)) _mask["area"] = int(np.reshape(r['masks'], (-1, r['masks'].shape[-1])).astype(np.float32).sum()) _mask["counts"] = _mask["counts"].decode("UTF-8") _result["segmentation"] = _mask _result["bbox"] = [bbox[1], bbox[0], bbox[3] - bbox[1], bbox[2] - bbox[0]] _final_object.append(_result) if class_id == 2: mask = r["masks"].astype(np.uint8)[:, :, _idx] bbox = np.around(r["rois"][_idx], 1) bbox = [float(x) for x in bbox] _result = {} _result["image_id"] = image_id _result["category_id"] = 2 _result["score"] = float(r["scores"][_idx]) _mask = maskUtils.encode(np.asfortranarray(mask)) _mask["area"] = int(np.reshape(r['masks'], (-1, r['masks'].shape[-1])).astype(np.float32).sum()) _mask["counts"] = _mask["counts"].decode("UTF-8") _result["segmentation"] = _mask _result["bbox"] = [bbox[1], bbox[0], bbox[3] - bbox[1], bbox[2] - bbox[0]] _final_object.append(_result) if class_id == 3: mask = r["masks"].astype(np.uint8)[:, :, _idx] bbox = np.around(r["rois"][_idx], 1) bbox = [float(x) for x in bbox] _result = {} _result["image_id"] = image_id _result["category_id"] = 3 _result["score"] = float(r["scores"][_idx]) _mask = maskUtils.encode(np.asfortranarray(mask)) _mask["area"] = int(np.reshape(r['masks'], (-1, r['masks'].shape[-1])).astype(np.float32).sum()) _mask["counts"] = _mask["counts"].decode("UTF-8") _result["segmentation"] = _mask _result["bbox"] = [bbox[1], bbox[0], bbox[3] - bbox[1], bbox[2] - bbox[0]] _final_object.append(_result)
fp = open("predictions.json", "w") import json print("Writing JSON...") fp.write(json.dumps(_final_object)) fp.close()
After this, i save it json. and while it looks similar to an annotation.json dataset file, it lacks segmentation_ids. I'm sorry if this is an amateur question. I've been looking for a solution for days and have stopped making progress.