ethz-asl / maplab

A Modular and Multi-Modal Mapping Framework
https://maplab.asl.ethz.ch
Apache License 2.0
2.63k stars 724 forks source link

vo-feature-tracking-pipeline.cc failed: num_cameras == trackers_.size() (2 vs. 4) #256

Closed NicoNorena closed 3 years ago

NicoNorena commented 4 years ago

[label] question

I am using the 'develop' branch rather than the master. As there are significant changes on this branch from master, I cant figure out why the tracker size is 4 ...

I updated a map builder example using the master branch from maplab, to the develop branch. https://github.com/rpng/ov_maplab/tree/master/src/mapper

when it is going to save to disk, when it goes to extract features from raw images it failed:
Progress:   0.4% [--------------------------------------------------]WARNING: Logging before InitGoogleLogging() is written to STDERR
F0509 04:14:34.587486  6840 vo-feature-tracking-pipeline.cc:80] Check failed: num_cameras == trackers_.size() (2 vs. 4) 

Do you have any idea why? I added the sections that I have updated

aslam::NCamera::Ptr camera_rig_Ptr = vi_map::getSelectedNCamera(*_sensor_manager);
    feature_tracking::FeatureTrackingExtractorSettings extractor_settings;
    extractor_settings.rotation_invariant = true;
    feature_tracking::FeatureTrackingDetectorSettings detector_settings;
    // Go through and extract things
    feature_tracking::VOFeatureTrackingPipeline trackpipe(camera_rig_Ptr, extractor_settings, detector_settings);
    trackpipe.runTrackingAndTriangulationForAllMissions(map); 

function updating


 void MapBuilder::feed_measurement_camera(double timestamp, std::vector<cv::Mat> &images, std::vector<size_t> &cam_ids) {
 // Create our VIO state object
    vio::MapUpdate::Ptr vio_update = aligned_shared<vio::MapUpdate>();

    vio_update->timestamp_ns = (int64_t)(1e9*timestamp_inI);
    vio_update->vinode = 
   vio::ViNodeState(aslam::Transformation::constructAndRenormalizeRotation(T_ItoG),
        _app->get_state()->_imu->vel(), _app->get_state()->_imu->bias_a(), _app->get_state()- 
         >_imu->bias_g());

    // Create the camera frame system
    const vi_map::SensorManager sensor_manager = map->getSensorManager();
    aslam::NCamera::Ptr ncamera_rig_Ptr = vi_map::getSelectedNCamera(sensor_manager);
    aslam::VisualNFrame::Ptr nframe(new aslam::VisualNFrame(ncamera_rig_Ptr));
    nframe->setId(aslam::createRandomId<aslam::NFramesId>());

    for(size_t i=0; i<images.size(); i++) {
        std::shared_ptr<aslam::VisualFrame> frame = std::make_shared<aslam::VisualFrame>();
        frame->setCameraGeometry(ncamera_rig_Ptr->getCameraShared(i));
        frame->setId(aslam::createRandomId<aslam::FrameId>());
        frame->setTimestampNanoseconds((int64_t)(1e9*timestamp_inI));
        frame->clearKeypointChannels();
        frame->setRawImage(images.at(i).clone());
        nframe->setFrame(cam_ids.at(i),frame);
    }
    assert(nframe->areAllFramesSet());
    assert(nframe->hasRawImagesInAllFrames());

    // Save the current frame to our vio update object
    vio::SynchronizedNFrameImu sync_nframe;
    sync_nframe.nframe = nframe;
    sync_nframe.motion_wrt_last_nframe = vio::MotionType::kGeneralMotion;

    // return if this is the first ever pose
    if(last_timestamp == -1) {
        // Send it to our map builder and store the raw images for later use
        vio_update->keyframe = std::make_shared<vio::SynchronizedNFrame>(sync_nframe.nframe, sync_nframe.motion_wrt_last_nframe);
        //vio_update->vio_state = vio::EstimatorState::kRunning;
        //vio_update->map_update_type = vio::UpdateType::kNormalUpdate;
        builder->apply(*vio_update);
        for (size_t frame_idx = 0u; frame_idx < nframe->getNumFrames(); frame_idx++) {
            map->storeFrameResource(
                nframe->getFrame(frame_idx).getRawImage(), frame_idx, backend::ResourceType::kRawImage,
                map->getVertexPtr(builder->getLastVertexId()));
        }
        // Save this timestamp
        last_timestamp = timestamp;
        last_prop_time_offset = _app->get_state()->_calib_dt_CAMtoIMU->value()(0);
        return;
    }
    else {

        vio_update->keyframe = std::make_shared<vio::SynchronizedNFrame>(sync_nframe.nframe, sync_nframe.motion_wrt_last_nframe);
        //vio_update->vio_state = vio::EstimatorState::kRunning;
        //vio_update->map_update_type = vio::UpdateType::kNormalUpdate;
    }

    //==========================================================================
    // GET IMU BETWEEN LAST AND CURRENT AND LINK THE NODES
    //==========================================================================

    // Get what our IMU-camera offset should be (t_imu = t_cam + calib_dt)
    double t_off_new = _app->get_state()->_calib_dt_CAMtoIMU->value()(0);

    // First lets construct an IMU vector of measurements we need
    double time0 = last_timestamp+last_prop_time_offset;
    double time1 = timestamp+t_off_new;
    vector<Propagator::IMUDATA> prop_data = Propagator::select_imu_readings(imu_data, time0, time1);

    // If empty, then we should skip this camera measurement
    if(prop_data.size()<2)
        return;

    // Convert into the maplab format (order is acc, gyro??)
    sync_nframe.imu_timestamps.resize(1,prop_data.size());
    sync_nframe.imu_measurements.resize(6,prop_data.size());
    for (size_t i=0; i < prop_data.size(); i++){
        sync_nframe.imu_timestamps(i) = (int64_t)(1e9*prop_data.at(i).timestamp);
        sync_nframe.imu_measurements.block(0,i,3,1) = prop_data.at(i).am;
        sync_nframe.imu_measurements.block(3,i,3,1) = prop_data.at(i).wm;
    }

    vio_update->imu_timestamps = sync_nframe.imu_timestamps;
    vio_update->imu_measurements = sync_nframe.imu_measurements;
    // Send it to our map builder and store the raw images for later use
    builder->apply(*vio_update);
    for (size_t frame_idx = 0u; frame_idx < nframe->getNumFrames(); frame_idx++) {
        map->storeFrameResource(nframe->getFrame(frame_idx).getRawImage(), frame_idx, backend::ResourceType::kRawImage,
                                map->getVertexPtr(builder->getLastVertexId()));
    }

    // Record the time we last propagated to
    last_timestamp = timestamp;
    last_prop_time_offset = t_off_new;

}

function creating the map builder:


_sensor_manager = std::unique_ptr<vi_map::SensorManager>(new vi_map::SensorManager());

    // Create our IMU object for map
    constexpr char kImuHardwareId[] = "imu0";
    aslam::SensorId imu_sensor_id = aslam::createRandomId<aslam::SensorId>();
    vi_map::Imu::UniquePtr imu_sensor = aligned_unique<vi_map::Imu>(imu_sensor_id, static_cast<std::string>(kImuHardwareId));
    imu_sensor->setImuSigmas(imu_sigmas);

    _sensor_manager->addSensorAsBase<vi_map::Imu>(std::move(imu_sensor));

    //==========================================================================
    // CAMERA PROPERTIES 
    //==========================================================================

    // Camera calibration values
    aslam::TransformationVector T_ItoCi, T_CitoI;
    std::vector<std::shared_ptr<aslam::Camera>> camera_vec;

    // Loop through through, and load each of the cameras
    for(int i=0; i<_params.state_options.num_cameras; i++) {

        // Camera intrinsic properties
        Eigen::Matrix<double,4,1> cam_proj = _params.camera_intrinsics.at(i).block(0,0,4,1);
        Eigen::Matrix<double,4,1> cam_dist = _params.camera_intrinsics.at(i).block(4,0,4,1);

        // If our distortions are fisheye or not!
        aslam::Distortion* distptr;
        if(_params.camera_fisheye.at(i)) {
            distptr = new aslam::EquidistantDistortion(cam_dist);
        } else{
            distptr = new aslam::RadTanDistortion(cam_dist);
        }

        // Finally create the camera object!
        aslam::Distortion::UniquePtr uniqdistptr(distptr);
        std::shared_ptr<aslam::Camera> camera = std::make_shared<aslam::PinholeCamera>(cam_proj,_params.camera_wh.at(i).first,_params.camera_wh.at(i).second,uniqdistptr);
        aslam::SensorId camera_id = aslam::createRandomId<aslam::SensorId>();
        camera->setId(camera_id);
        camera_vec.push_back(camera);

        // Our camera extrinsics transform
        Eigen::Matrix4d T_CntoI = Eigen::Matrix4d::Identity();
        T_CntoI.block(0,0,3,3) = quat_2_Rot(_params.camera_extrinsics.at(i).block(0,0,4,1)).transpose();
        T_CntoI.block(0,3,3,1) = -T_CntoI.block(0,0,3,3)*_params.camera_extrinsics.at(i).block(4,0,3,1);
        Eigen::Matrix4d T_ItoCn = T_CntoI.inverse();
        T_ItoCi.push_back(aslam::Transformation::constructAndRenormalizeRotation(T_ItoCn));
        T_CitoI.push_back(aslam::Transformation::constructAndRenormalizeRotation(T_CntoI));

        // Debug print
        camera->printParameters(cout,"[MAPBUILDER::MapBuilder] CAMERA "+std::to_string(i));
        cout << "[MAPBUILDER::MapBuilder] T_ItoC" << std::endl << T_ItoCi.at(i) << std::endl;

    }

    // Create our camera object for map
    std::string cam_label = "camera_rig";
    aslam::NCameraId ncamera_id = aslam::createRandomId<aslam::NCameraId>();
    aslam::NCamera::UniquePtr ncamera = aligned_unique<aslam::NCamera>(ncamera_id, T_ItoCi, camera_vec, cam_label);

    _sensor_manager->addSensor<aslam::NCamera>(std::move(ncamera),imu_sensor_id, T_CitoI.at(0));

    //create resources backend
    /*
    cv::Mat color_image_dummy = cv::Mat(1280, 720, CV_8UC3, cv::Scalar(20u, 30u, 40u));

    backend::ResourceId camera_resource_id = aslam::createRandomId<backend::ResourceId>();
    std::unique_ptr<cv::Mat> camera_resource;
    backend::ResourceTemplate<cv::Mat>::createUniqueResource(color_image_dummy, camera_resource_id, &camera_resource);
    map->addSensorResource<cv::Mat>(backend::ResourceType::kRawColorImage, ncamera_id, 10, *camera_resource, &mission);
    */

    // Create our map builder
    builder = new online_map_builders::StreamMapBuilder(*_sensor_manager, map);
goldbattle commented 2 years ago

I have updated the main repo to work on the develop branch. https://github.com/rpng/ov_maplab

It also required reworking the feature tracking since the runTrackingAndTriangulationForAllMissions helper function was removed in this commit: https://github.com/ethz-asl/maplab/commit/659e35c8fc3e23b8da6154d2ccfa43b61372d160