IRVLUTD / isaac_sim_grasping

Code repo for MultiGripperGrasp Dataset
GNU General Public License v3.0
50 stars 8 forks source link

Issues with visualizing the grasps from the generated json file #5

Closed SagarJoshi73249 closed 1 month ago

SagarJoshi73249 commented 1 month ago

Firstly thanks for sharing your work, it has been very insightful. I am using a robotiq 2f 85 gripper and converted it's urdf to xml and generated my own grasps for it for my object. But when trying to visualize these grasps, I discovered that the grasps were inverted. Then, to confirm if it was a gripper issue, I tried to visualize the grasps for the fetch gripper but they were inverted too. The gripper alignment with the object looked fine while running in the graspit simulator. When I use the same code to visualize data from the MultiGripperGrasp dataset, the visualization is fine(to visualize this I use the wxyz quaternion format but for the graspit data it should be xyzw format). For example, when generating my own grasps using the franka_panda gripper, the grasps are not correct but when using the data shared in your dataset, I get the correct visualizations. I am attaching the images for the same as well as the code for visualizing the code. Any insight on this would be greatly appreciated, thanks.

Using the fetch gripper: image Using the robotiq gripper: image

Code:

import trimesh
import numpy as np
import logging
import json
from trimesh.transformations import quaternion_matrix, rotation_matrix

class Gripper:
    def __init__(self, mesh_path):
        # Load the gripper mesh from a file
        self.mesh = self.load_mesh(mesh_path)
        self.standoff_range = (0.0, 0.1)
        self.maximum_aperture = 0.1
        self.standoff_fingertips = 0.05  # Example attribute, adjust as needed
        self.offset_inv = np.eye(4)
        self.closing_region = self.create_closing_region()  # Initialize closing_region

    def load_mesh(self, mesh_path):
        # Load the mesh and handle both Trimesh and Scene
        mesh = trimesh.load(mesh_path)
        if isinstance(mesh, trimesh.Scene):
            # If it's a scene, concatenate all geometries into a single mesh
            mesh = trimesh.util.concatenate([geometry for geometry in mesh.geometry.values()])
        return mesh

    def create_closing_region(self):
        # Define the closing region mesh or logic here
        # For example, using another trimesh primitive or a part of the loaded mesh
        return trimesh.primitives.Box(extents=[0.05, 0.05, 0.05])  # Example closing region mesh

def load_gripper(mesh_path):
    return Gripper(mesh_path)

def load_stl_mesh(file_path):
    # Load the OBJ mesh using Trimesh
    mesh = trimesh.load(file_path)
    if isinstance(mesh, trimesh.Scene):
        # If it's a scene, concatenate all geometries into a single mesh
        mesh = trimesh.util.concatenate([geometry for geometry in mesh.geometry.values()])
    return mesh

def create_gripper_marker(color=[0, 0, 255], tube_radius=0.001, sections=6):
    """Create a 3D mesh visualizing a parallel yaw gripper. It consists of four cylinders.

    Args:
        color (list, optional): RGB values of marker. Defaults to [0, 0, 255].
        tube_radius (float, optional): Radius of cylinders. Defaults to 0.001.
        sections (int, optional): Number of sections of each cylinder. Defaults to 6.

    Returns:
        trimesh.Trimesh: A mesh that represents a simple parallel yaw gripper.
    """
    cfl = trimesh.creation.cylinder(
        radius=0.002,
        sections=sections,
        segment=[
            [4.10000000e-02, -7.27595772e-12, 6.59999996e-02],
            [4.10000000e-02, -7.27595772e-12, 1.12169998e-01],
        ],
    )
    cfr = trimesh.creation.cylinder(
        radius=0.002,
        sections=sections,
        segment=[
            [-4.100000e-02, -7.27595772e-12, 6.59999996e-02],
            [-4.100000e-02, -7.27595772e-12, 1.12169998e-01],
        ],
    )
    cb1 = trimesh.creation.cylinder(
        radius=0.002, sections=sections, segment=[[0, 0, 0], [0, 0, 6.59999996e-02]]
    )
    cb2 = trimesh.creation.cylinder(
        radius=0.002,
        sections=sections,
        segment=[[-4.100000e-02, 0, 6.59999996e-02], [4.100000e-02, 0, 6.59999996e-02]],
    )

    tmp = trimesh.util.concatenate([cb1, cb2, cfr, cfl])
    tmp.visual.face_colors = color

    return tmp

def visualize(object_mesh, sampled_poses):
    scene = trimesh.Scene()

    # Add the object mesh to the scene
    scene.add_geometry(object_mesh)

    # Function to convert pose to transformation matrix
    def pose_to_matrix(pose):
        px, py, pz, x, y, z, w= pose
        translation = np.array([px, py, pz])
        quaternion = [w, x, y, z]
        rotation = quaternion_matrix(quaternion)
        matrix = np.eye(4)
        matrix[:3, :3] = rotation[:3, :3]
        matrix[:3, 3] = translation

        # Apply an additional rotation around the gripper's axis
        # rot_90_z = rotation_matrix(np.pi / 2, [0, 0, 1])
        # matrix[:3, :3] = np.dot(matrix[:3, :3], rot_90_z[:3, :3])
        return matrix

    # Create the gripper marker
    gripper_mesh = create_gripper_marker()

    # Add the gripper mesh to the scene at each sampled pose
    for pose in sampled_poses:
        try:
            gripper_instance = create_gripper_marker()
            transformation_matrix = pose_to_matrix(pose)
            gripper_instance.apply_transform(transformation_matrix)
            gripper_instance.visual.face_colors = [255, 0, 0, 100]  # Red and semi-transparent
            scene.add_geometry(gripper_instance)
        except Exception as e:
            logging.warning(f"Skipping invalid pose: {pose} due to error: {e}")

    # Show the scene
    scene.show()

def run_visualization():
    # Set up logging
    logging.basicConfig(level=logging.DEBUG)

    # Load or create a gripper and object mesh
    object_mesh = load_stl_mesh('/home/sam/Downloads/006_mustard_bottle.obj')  # Replace with the actual path

    # Load sampled poses from JSON file
    with open('/home/sam/Downloads/Extracted_pose_mustard_new.json', 'r') as file:
        pose_data = json.load(file)

    sampled_poses = pose_data['pose']
    print(f"Sampled Poses: {sampled_poses}")

    # Visualize the results
    visualize(object_mesh, sampled_poses)

if __name__ == "__main__":
    run_visualization()
kninad commented 1 month ago

If I understand correctly, when you visualize the grasps generated from graspit you get inverted visualization? (but in graspit it looks okay?). That seems like a weird issue if you have already account for the quaternion representation format. One suggestion to visualize graspit generated grasps is to try visualizing them in PyBullet?

kninad commented 1 month ago

Another thing to check is when creating the parallel jaw gripper marker, does the palm normal for the marker align with the palm normal for the gripper you are trying to visualize? If all the markers are pointing exactly 180 degrees away, then this could also be a potential fix in the visualization script.

SagarJoshi73249 commented 1 month ago

I do not think this is a visualizer code problem only. Because when we spawn the grippers with the standalone.py, we get objects behind the grippers as we have seen in this code as well. I presume it could also be that the coordinate system do not match but I do not see how we can set the reference coordinate system for graspit grasp generation using Neuralgrasp generation that you provided.

image

kninad commented 1 month ago

To clarify things, can you share the steps you are following to visualize the above figure? I am assuming you generate some grasps using graspit and then spwan them in isaac sim (keeping into account the quat representation change)? Did you also add some additional code for the new gripper (robotiq-2f) that you added?

kninad commented 1 month ago

Firstly thanks for sharing your work, it has been very insightful. I am using a robotiq 2f 85 gripper and converted it's urdf to xml and generated my own grasps for it for my object. But when trying to visualize these grasps, I discovered that the grasps were inverted. Then, to confirm if it was a gripper issue, I tried to visualize the grasps for the fetch gripper but they were inverted too. The gripper alignment with the object looked fine while running in the graspit simulator. When I use the same code to visualize data from the MultiGripperGrasp dataset, the visualization is fine(to visualize this I use the wxyz quaternion format but for the graspit data it should be xyzw format). For example, when generating my own grasps using the franka_panda gripper, the grasps are not correct but when using the data shared in your dataset, I get the correct visualizations. I am attaching the images for the same as well as the code for visualizing the code. Any insight on this would be greatly appreciated, thanks.

Using the fetch gripper: image Using the robotiq gripper: image

Code:

import trimesh
import numpy as np
import logging
import json
from trimesh.transformations import quaternion_matrix, rotation_matrix

class Gripper:
    def __init__(self, mesh_path):
        # Load the gripper mesh from a file
        self.mesh = self.load_mesh(mesh_path)
        self.standoff_range = (0.0, 0.1)
        self.maximum_aperture = 0.1
        self.standoff_fingertips = 0.05  # Example attribute, adjust as needed
        self.offset_inv = np.eye(4)
        self.closing_region = self.create_closing_region()  # Initialize closing_region

    def load_mesh(self, mesh_path):
        # Load the mesh and handle both Trimesh and Scene
        mesh = trimesh.load(mesh_path)
        if isinstance(mesh, trimesh.Scene):
            # If it's a scene, concatenate all geometries into a single mesh
            mesh = trimesh.util.concatenate([geometry for geometry in mesh.geometry.values()])
        return mesh

    def create_closing_region(self):
        # Define the closing region mesh or logic here
        # For example, using another trimesh primitive or a part of the loaded mesh
        return trimesh.primitives.Box(extents=[0.05, 0.05, 0.05])  # Example closing region mesh

def load_gripper(mesh_path):
    return Gripper(mesh_path)

def load_stl_mesh(file_path):
    # Load the OBJ mesh using Trimesh
    mesh = trimesh.load(file_path)
    if isinstance(mesh, trimesh.Scene):
        # If it's a scene, concatenate all geometries into a single mesh
        mesh = trimesh.util.concatenate([geometry for geometry in mesh.geometry.values()])
    return mesh

def create_gripper_marker(color=[0, 0, 255], tube_radius=0.001, sections=6):
    """Create a 3D mesh visualizing a parallel yaw gripper. It consists of four cylinders.

    Args:
        color (list, optional): RGB values of marker. Defaults to [0, 0, 255].
        tube_radius (float, optional): Radius of cylinders. Defaults to 0.001.
        sections (int, optional): Number of sections of each cylinder. Defaults to 6.

    Returns:
        trimesh.Trimesh: A mesh that represents a simple parallel yaw gripper.
    """
    cfl = trimesh.creation.cylinder(
        radius=0.002,
        sections=sections,
        segment=[
            [4.10000000e-02, -7.27595772e-12, 6.59999996e-02],
            [4.10000000e-02, -7.27595772e-12, 1.12169998e-01],
        ],
    )
    cfr = trimesh.creation.cylinder(
        radius=0.002,
        sections=sections,
        segment=[
            [-4.100000e-02, -7.27595772e-12, 6.59999996e-02],
            [-4.100000e-02, -7.27595772e-12, 1.12169998e-01],
        ],
    )
    cb1 = trimesh.creation.cylinder(
        radius=0.002, sections=sections, segment=[[0, 0, 0], [0, 0, 6.59999996e-02]]
    )
    cb2 = trimesh.creation.cylinder(
        radius=0.002,
        sections=sections,
        segment=[[-4.100000e-02, 0, 6.59999996e-02], [4.100000e-02, 0, 6.59999996e-02]],
    )

    tmp = trimesh.util.concatenate([cb1, cb2, cfr, cfl])
    tmp.visual.face_colors = color

    return tmp

def visualize(object_mesh, sampled_poses):
    scene = trimesh.Scene()

    # Add the object mesh to the scene
    scene.add_geometry(object_mesh)

    # Function to convert pose to transformation matrix
    def pose_to_matrix(pose):
        px, py, pz, x, y, z, w= pose
        translation = np.array([px, py, pz])
        quaternion = [w, x, y, z]
        rotation = quaternion_matrix(quaternion)
        matrix = np.eye(4)
        matrix[:3, :3] = rotation[:3, :3]
        matrix[:3, 3] = translation

        # Apply an additional rotation around the gripper's axis
        # rot_90_z = rotation_matrix(np.pi / 2, [0, 0, 1])
        # matrix[:3, :3] = np.dot(matrix[:3, :3], rot_90_z[:3, :3])
        return matrix

    # Create the gripper marker
    gripper_mesh = create_gripper_marker()

    # Add the gripper mesh to the scene at each sampled pose
    for pose in sampled_poses:
        try:
            gripper_instance = create_gripper_marker()
            transformation_matrix = pose_to_matrix(pose)
            gripper_instance.apply_transform(transformation_matrix)
            gripper_instance.visual.face_colors = [255, 0, 0, 100]  # Red and semi-transparent
            scene.add_geometry(gripper_instance)
        except Exception as e:
            logging.warning(f"Skipping invalid pose: {pose} due to error: {e}")

    # Show the scene
    scene.show()

def run_visualization():
    # Set up logging
    logging.basicConfig(level=logging.DEBUG)

    # Load or create a gripper and object mesh
    object_mesh = load_stl_mesh('/home/sam/Downloads/006_mustard_bottle.obj')  # Replace with the actual path

    # Load sampled poses from JSON file
    with open('/home/sam/Downloads/Extracted_pose_mustard_new.json', 'r') as file:
        pose_data = json.load(file)

    sampled_poses = pose_data['pose']
    print(f"Sampled Poses: {sampled_poses}")

    # Visualize the results
    visualize(object_mesh, sampled_poses)

if __name__ == "__main__":
    run_visualization()

To debug this figure, as suggested earlier you might need to check with the palm normal direction of the gripper and also the direction for the marker you are using. Check this line if you are adding a new gripper + generating new data.