nutonomy / nuscenes-devkit

The devkit of the nuScenes dataset.
https://www.nuScenes.org
Other
2.19k stars 616 forks source link

[Solved] wrong transform previous that ego_pose is not accurate in long term #1037

Closed Kin-Zhang closed 4 months ago

Kin-Zhang commented 4 months ago

Thanks for your work in devkit!

I found out that the ego_pose looks like not accurate, and here is an example image:

image

And code to run to get the above result (I selected already in v1.0-mini):

"""
# Created: 2024-02-24 10:48
# Author: Qingwen ZHANG  (https://kin-zhang.github.io/), Peizheng Li
"""

from pathlib import Path
import numpy as np
import fire, time, os
import open3d as o3d

from nuscenes.nuscenes import NuScenes
from nuscenes.utils.geometry_utils import transform_matrix
from pyquaternion import Quaternion

def _load_points_from_file(filename: str) -> np.ndarray:
    """
    Private function to load point cloud from file.
    :param filename: Path of the point cloud file.
    :return: Point cloud as numpy array.
    """
    pc = np.fromfile(filename, dtype=np.float32)
    pc = pc.reshape((-1, 5))[:, :4]
    return pc

def get_data(sample_data, nusc):
    # lidar point cloud
    pc = _load_points_from_file(os.path.join(nusc.dataroot, sample_data['filename']))

    # pose from lidar to world
    lidar2ego = nusc.get('calibrated_sensor', sample_data['calibrated_sensor_token'])
    ego2world = nusc.get('ego_pose', sample_data['ego_pose_token'])
    lidar2ego_np = transform_matrix(lidar2ego['translation'], Quaternion(lidar2ego['rotation']), inverse=True)
    ego2world_np = transform_matrix(ego2world['translation'], Quaternion(ego2world['rotation']), inverse=True)
    pose = np.dot(np.linalg.inv(ego2world_np),lidar2ego_np)
    return pc, pose

def main(
    data_dir: str = "/home/kin/data/nuScenes",
    mode: str = "v1.0-mini"
):
    data_dir = Path(data_dir) / mode
    scene_num_id = 1
    nusc = NuScenes(dataroot=data_dir, version=mode, verbose=False)
    scene = nusc.scene[scene_num_id]
    log_id = scene['name']

    sample = nusc.get('sample', scene['first_sample_token'])
    sample_data = nusc.get('sample_data', sample['data']['LIDAR_TOP'])
    sample_data_lst = []
    while sample_data['next'] != '':
        sample_data_lst.append(sample_data)
        sample_data = nusc.get('sample_data', sample_data['next'])

    pc0, pose0 = get_data(sample_data_lst[0], nusc)
    pc1, pose1 = get_data(sample_data_lst[10], nusc)

    pcd0 = o3d.geometry.PointCloud()
    pcd0.points = o3d.utility.Vector3dVector(pc0[:, :3])
    pcd0.paint_uniform_color([0.0, 1.0, 0.0])
    pcd1 = o3d.geometry.PointCloud()
    pcd1.points = o3d.utility.Vector3dVector(pc1[:, :3])
    pcd1.paint_uniform_color([1.0, 0.0, 0.0])
    ego_motion = np.dot(np.linalg.inv(pose1), pose0)
    pcd1.transform(ego_motion)
    o3d.visualization.draw_geometries([pcd0, pcd1])

if __name__ == '__main__':
    start_time = time.time()
    fire.Fire(main)
    print(f"\nTime used: {(time.time() - start_time)/60:.2f} mins")

Is there anything I misunderstood that get this kind of error?

related issue:

Note: try to integrate all point clouds in the scene to get raw point cloud map.

whyekit-motional commented 4 months ago

@Kin-Zhang pls use from_file_multisweep as a reference to do the aggregation of multiple sweeps

Kin-Zhang commented 4 months ago

Thanks a lot!

I see, the problem happened in the transform error, here is the fixed one if someone is interested:

- lidar2ego_np = transform_matrix(lidar2ego['translation'], Quaternion(lidar2ego['rotation']), inverse=True)
- ego2world_np = transform_matrix(ego2world['translation'], Quaternion(ego2world['rotation']), inverse=True)
- pose = np.dot(np.linalg.inv(ego2world_np),lidar2ego_np)
+ ego2lidar_np = transform_matrix(ego2lidar['translation'], Quaternion(ego2lidar['rotation']))
+ world2ego_np = transform_matrix(world2ego['translation'], Quaternion(world2ego['rotation']))
+ pose = np.dot(world2ego_np, ego2lidar_np)
"""
# Created: 2024-02-24 10:48
# Author: Qingwen ZHANG  (https://kin-zhang.github.io/), Peizheng Li
"""

from pathlib import Path
import numpy as np
import fire, time, os
import open3d as o3d

from nuscenes.nuscenes import NuScenes
from nuscenes.utils.geometry_utils import transform_matrix
from pyquaternion import Quaternion

def _load_points_from_file(filename: str) -> np.ndarray:
    """
    Private function to load point cloud from file.
    :param filename: Path of the point cloud file.
    :return: Point cloud as numpy array.
    """
    pc = np.fromfile(filename, dtype=np.float32)
    pc = pc.reshape((-1, 5))[:, :4]
    return pc

def get_data(sample_data, nusc):
    # lidar point cloud
    pc = _load_points_from_file(os.path.join(nusc.dataroot, sample_data['filename']))

    # pose from lidar to world
    lidar2ego = nusc.get('calibrated_sensor', sample_data['calibrated_sensor_token'])
    ego2world = nusc.get('ego_pose', sample_data['ego_pose_token'])
    ego2lidar_np = transform_matrix(lidar2ego['translation'], Quaternion(lidar2ego['rotation']))
    world2ego_np = transform_matrix(ego2world['translation'], Quaternion(ego2world['rotation']))
    pose = np.dot(world2ego_np, ego2lidar_np)
    return pc, pose

def main(
    data_dir: str = "/home/kin/data/nuScenes",
    mode: str = "v1.0-mini"
):
    data_dir = Path(data_dir) / mode
    scene_num_id = 1
    nusc = NuScenes(dataroot=data_dir, version=mode, verbose=False)
    scene = nusc.scene[scene_num_id]
    log_id = scene['name']

    sample = nusc.get('sample', scene['first_sample_token'])
    sample_data = nusc.get('sample_data', sample['data']['LIDAR_TOP'])
    sample_data_lst = []
    while sample_data['next'] != '':
        sample_data_lst.append(sample_data)
        sample_data = nusc.get('sample_data', sample_data['next'])

    pc0, pose0 = get_data(sample_data_lst[0], nusc)
    pc1, pose1 = get_data(sample_data_lst[10], nusc)

    pcd0 = o3d.geometry.PointCloud()
    pcd0.points = o3d.utility.Vector3dVector(pc0[:, :3])
    pcd0.paint_uniform_color([0.0, 1.0, 0.0])

    pcd1 = o3d.geometry.PointCloud()
    pcd1.points = o3d.utility.Vector3dVector(pc1[:, :3])
    pcd1.paint_uniform_color([1.0, 0.0, 0.0])

    ego_motion = np.linalg.inv(pose0) @ pose1 # 1 -> 0
    pcd1.transform(ego_motion)
    o3d.visualization.draw_geometries([pcd0, pcd1])

if __name__ == '__main__':
    start_time = time.time()
    fire.Fire(main)
    print(f"\nTime used: {(time.time() - start_time)/60:.2f} mins")