rayvburn / humap_local_planner

Human-aware robot trajectory planner using a hybrid trajectory candidates generation and spatiotemporal cost functions
BSD 3-Clause "New" or "Revised" License
0 stars 1 forks source link

consider extending the `PathCrossingDetector` with the analysis of the global path plan #124

Open rayvburn opened 7 months ago

rayvburn commented 7 months ago

Currently, only the robot's planned trajectory and predicted people trajectories are investigated against each other in subsequent time steps.

To resolve the issue, the detect method arguments list must be extended to:

bool PathCrossingDetector::detect(
    const Trajectory& traj_robot,
    const std::vector<geometry_msgs::PoseStamped>& global_plan,
    const std::vector<Trajectory>& traj_people
) {
    // ...
}

A preliminary procedure (tested):

std::vector<geometry::Pose> global_plan_poses;
for (const auto& pose_stamped: global_plan) {
    global_plan_poses.push_back(geometry::Pose(pose_stamped));
}

// TODO: separate detection for global path and trajectory
if (global_plan_poses.empty()) {
    return false;
}
// The loop is constructed as follows:
// - for each trajectory of detected people
//   - iterate over person's trajectory poses
//     - and check it agains each pose of the global path
for (const auto& traj_person: traj_people) {
    auto vel_person = traj_person.getVelocities().front();
    double speed_person = std::hypot(vel_person.getX(), vel_person.getY());

    printf("::_::   human trajectory with %3lu poses, speed %6.3f (threshold %6.3f) :::::\r\n", traj_person.getPoses().size(), speed_person, speed_negligible_threshold_);
    if (speed_person < speed_negligible_threshold_) {
        printf("::_::   human speed too low %6.3f and the threshold is %6.3f :::::\r\n", speed_person, speed_negligible_threshold_);
        continue;
    }

    // iterator must be referenced to an allocated container
    auto traj_person_poses = traj_person.getPoses();
    if (traj_person_poses.empty()) {
        break;
    }

    // store filtered motion directions of a person
    double direction_person = std::atan2(
        traj_person.getVelocities().front().getY(),
        traj_person.getVelocities().front().getX()
    );

    double timestamp = 0.0;
    bool first_iteration_person = true;
    bool collision_w_person_detected = false;
    // iterate over human poses
    for (
        auto person_pose_it = traj_person_poses.cbegin();
        person_pose_it != traj_person_poses.cend();
        person_pose_it++
    ) {
        printf("::_::     next pose %3lu/%3lu of human trajectory :::::\r\n", std::distance(traj_person_poses.cbegin(), person_pose_it), traj_person_poses.size() - 1);

        if (!first_iteration_person) {
            auto person_pos_diff = person_pose_it->getPosition() - std::prev(person_pose_it)->getPosition();
            auto person_new_dir = person_pos_diff.calculateDirection().getRadian();

            // complementary filter
            const double PERSON_INNOV_FACTOR = 0.6;
            // person predictions have small confidence
            direction_person = geometry::Angle(
                (1.0 - PERSON_INNOV_FACTOR) * direction_person + PERSON_INNOV_FACTOR * person_new_dir
            ).getRadian();
        }

        printf("::_::     before obtaining pose of robot - robot poses num %lu :::::\r\n", traj_robot.getPoses().size());
        // stores filtered motion direction of a robot
        double direction_robot = NAN;
        // non empty container ensured before
        if (global_plan_poses.size() >= 2) {
            direction_robot = (
                global_plan_poses.at(1).getPosition() - global_plan_poses.at(0).getPosition()
            ).calculateDirection().getRadian();
        } else if (!global_plan_poses.empty()) {
            // global path might not have the orientation updated
            direction_robot = global_plan_poses.at(0).getYaw();
        }
        printf("::_::     after  obtaining pose of robot - robot poses num %lu :::::\r\n", traj_robot.getPoses().size());

        bool first_iteration_path = true;
        // iterate over robot poses
        for (
            auto robot_pose_it = global_plan_poses.cbegin();
            robot_pose_it != global_plan_poses.cend();
            robot_pose_it++
        ) {
            printf("::_::     next pose %3lu/%3lu of global path -> %3lu of human trajectory :::::\r\n", std::distance(global_plan_poses.cbegin(), robot_pose_it), global_plan_poses.size() - 1, std::distance(traj_person_poses.cbegin(), person_pose_it));
            if (!first_iteration_path) {
                auto robot_pos_diff = robot_pose_it->getPosition() - std::prev(robot_pose_it)->getPosition();
                auto robot_new_dir = robot_pos_diff.calculateDirection().getRadian();

                // complementary filters
                const double ROBOT_INNOV_FACTOR = 0.8;
                direction_robot = geometry::Angle(
                    (1.0 - ROBOT_INNOV_FACTOR) * direction_robot + ROBOT_INNOV_FACTOR * robot_new_dir
                ).getRadian();
            }

            // estimated distance between the centers of the robot and human
            auto dist_center = (robot_pose_it->getPosition() - person_pose_it->getPosition()).calculateLength();
            // spatial gap between the models of the robot and human
            auto dist_gap = dist_center - person_model_radius_ - robot_model_radius_;
            // trim in case of collision detection
            dist_gap = std::max(0.0, dist_gap);

            // store only the actual (not predicted) closest gap - update this only during the first iteration
            if (first_iteration_person) {
                gap_closest_person_ = std::min(gap_closest_person_, dist_gap);
            }

            printf(
                "::_::       pose %3lu, dist centers %7.3f, dist gap %7.3f | robot {%6.3f, %6.3f}, person {%6.3f, %6.3f} :::::\r\n",
                std::distance(traj_person_poses.cbegin(), person_pose_it),
                dist_center,
                dist_gap,
                robot_pose_it->getX(), robot_pose_it->getY(),
                person_pose_it->getX(), person_pose_it->getY()
            );

            if (dist_gap > separation_threshold_) {
                first_iteration_path = false;
                continue;
            }

            // confidence of crossing based on the time of prediction ("how far from now"); approx. 50% at 2 sec pred.
            const double TIMING_EXP_FACTOR = -0.34;
            double cross_confidence = std::exp(TIMING_EXP_FACTOR * timestamp);

            // evaluate the angle of approaching crossing vectors
            geometry::Angle cross_angle(direction_robot - direction_person);

            // knowing whether the human approaches from the left or right is also necessary
            social_nav_utils::RelativeLocation rel_loc(
                robot_pose_it->getX(),
                robot_pose_it->getY(),
                robot_pose_it->getYaw(),
                person_pose_it->getX(),
                person_pose_it->getY()
            );

            /*
            * Detection of crossings between the paths of the robot and the human. Cases of interest are crossings
            * with the human approaching directly from the side (right or left).
            * It is modelled by the 1D Gaussian (for angle domain) with a mean at abs(M_PI_2)
            * and with a standard deviation of (M_PI_2 / 2.0) (2 sigma rule)
            */
            // side-angle confidence; whether the crossing person approaches directly from the side
            double angle_confidence = social_nav_utils::calculateGaussianAngle(
                cross_angle.getRadian(),
                rel_loc.isLeftSide() ? M_PI_2 : -M_PI_2,
                std::pow(M_PI_2 / 2.0, 2.0),
                true // normalize to 1.0 at mean
            );

            double confidence = cross_confidence * angle_confidence;
            bool person_crossing = confidence >= confidence_threshold_;

            // do not clear the flag if previously set
            crossing_detected_ = crossing_detected_ || person_crossing;

            if (person_crossing) {
                crossing_people_data_.push_back({traj_person.getPoses().front(), direction_person});
            }

            printf(
                "%s::_::       DETECTED COLLISION at t=%6.3f | on the %6s -> dir: robot %6.3f, person %6.3f -> crangle %6.3f | confidences: cross %7.4f, angle %7.4f, total %7.4f :::::\x1B[0m\r\n",
                person_crossing ? "\x1B[31m" : "\x1B[35m",
                timestamp,
                rel_loc.isLeftSide() ? "left" : "right",
                direction_robot,
                direction_person,
                cross_angle.getRadian(),
                cross_confidence,
                angle_confidence,
                confidence
            );

            first_iteration_path = false;
            collision_w_person_detected = true;
            break;
        }

        // exit to outer loop
        if (collision_w_person_detected) {
            break;
        }

        timestamp += traj_person.getTimeDelta();
        first_iteration_person = false;
    }
}