AlexeyAB / darknet

YOLOv4 / Scaled-YOLOv4 / YOLO - Neural Networks for Object Detection (Windows and Linux version of Darknet )
http://pjreddie.com/darknet/
Other
21.78k stars 7.96k forks source link

when i used the yolo-cpp-dll.dll ,the result is not right. #761

Open prety16 opened 6 years ago

prety16 commented 6 years ago

I trained a license plate detection network with Yolo,it works well when i used darknet.exe. But when I used the same configuration file and weight to detect the same picture in yolo-cpp-dll, the result was incorrect.

prety16 commented 6 years ago

My mailbox is 549480997@qq.com. If you have time and are willing to help me solve this problem, I hope you can contact me. Thank you very much.

AlexeyAB commented 6 years ago

Try to comment result_vec = detector.tracking_id(result_vec); and set the same nms and thresh in the yolo-cpp-dll. And test on this image: https://github.com/AlexeyAB/darknet/blob/master/build/darknet/x64/dogr.jpg

prety16 commented 6 years ago

I don't quite understand what you mean.

prety16 commented 6 years ago

include

include

include

include"yolo.hpp"

using namespace std;

pragma comment (lib,"F:\EndUWorkplace\lib\yolo_cpp_dll.lib")

int main() { char filename = "E:\毕业设计\图片相关\obj-park\010000011.jpg"; char cfg = "F:\EndUWorkplace\ref\yolo-park.cfg"; char* weight = "F:\EndUWorkplace\ref\yolo-park_7000.weights"; Detector detector = Detector(cfg, weight, 0); image_t img=detector.load_image(filename); vector vector_b = detector.detect(img, 0.2, false);

    detector.~Detector();
    return 0;

}

prety16 commented 6 years ago

this is my code

prety16 commented 6 years ago

pragma once

ifdef YOLODLL_EXPORTS

if defined(_MSC_VER)

define YOLODLL_API __declspec(dllexport)

else

define YOLODLL_API attribute((visibility("default")))

endif

else

if defined(_MSC_VER)

define YOLODLL_API __declspec(dllimport)

else

define YOLODLL_API

endif

endif

struct bbox_t { unsigned int x, y, w, h; // (x,y) - top-left corner, (w, h) - width & height of bounded box float prob; // confidence - probability that the object was found correctly unsigned int obj_id; // class of object - from range [0, classes-1] unsigned int track_id; // tracking id for video (0 - untracked, 1 - inf - tracked object) unsigned int frames_counter;// counter of frames on which the object was detected };

struct image_t { int h; // height int w; // width int c; // number of chanels (3 - for RGB) float *data; // pointer to the image data };

ifdef __cplusplus

include

include

include

include

ifdef OPENCV

include <opencv2/opencv.hpp> // C++

include "opencv2/highgui/highgui_c.h" // C

include "opencv2/imgproc/imgproc_c.h" // C

endif // OPENCV

class Detector { std::shared_ptr detector_gpu_ptr; std::deque<std::vector> prev_bbox_vec_deque; const int cur_gpu_id; public: float nms = .4; bool wait_stream;

YOLODLL_API Detector(char* cfg_filename, char* weight_filename, int gpu_id = 0);
YOLODLL_API ~Detector();

YOLODLL_API std::vector<bbox_t> detect(char* image_filename, float thresh = 0.2, bool use_mean = false);
YOLODLL_API std::vector<bbox_t> detect(image_t img, float thresh = 0.2, bool use_mean = false);
static YOLODLL_API image_t load_image(char* image_filename);
static YOLODLL_API void free_image(image_t m);
YOLODLL_API int get_net_width() const;
YOLODLL_API int get_net_height() const;

YOLODLL_API std::vector<bbox_t> tracking_id(std::vector<bbox_t> cur_bbox_vec, bool const change_history = true,
    int const frames_story = 10, int const max_dist = 150);

std::vector<bbox_t> detect_resized(image_t img, int init_w, int init_h, float thresh = 0.2, bool use_mean = false)
{
    if (img.data == NULL)
        throw std::runtime_error("Image is empty");
    auto detection_boxes = detect(img, thresh, use_mean);
    float wk = (float)init_w / img.w, hk = (float)init_h / img.h;
    for (auto &i : detection_boxes) i.x *= wk, i.w *= wk, i.y *= hk, i.h *= hk;
    return detection_boxes;
}

ifdef OPENCV

std::vector<bbox_t> detect(cv::Mat mat, float thresh = 0.2, bool use_mean = false)
{
    if (mat.data == NULL)
        throw std::runtime_error("Image is empty");
    auto image_ptr = mat_to_image_resize(mat);
    return detect_resized(*image_ptr, mat.cols, mat.rows, thresh, use_mean);
}

std::shared_ptr<image_t> mat_to_image_resize(cv::Mat mat) const
{
    if (mat.data == NULL) return std::shared_ptr<image_t>(NULL);
    cv::Mat det_mat;
    cv::resize(mat, det_mat, cv::Size(get_net_width(), get_net_height()));
    return mat_to_image(det_mat);
}

static std::shared_ptr<image_t> mat_to_image(cv::Mat img_src)
{
    cv::Mat img;
    cv::cvtColor(img_src, img, cv::COLOR_RGB2BGR);
    std::shared_ptr<image_t> image_ptr(new image_t, [](image_t *img) { free_image(*img); delete img; });
    std::shared_ptr<IplImage> ipl_small = std::make_shared<IplImage>(img);
    *image_ptr = ipl_to_image(ipl_small.get());
    return image_ptr;
}

private:

static image_t ipl_to_image(IplImage* src)
{
    unsigned char *data = (unsigned char *)src->imageData;
    int h = src->height;
    int w = src->width;
    int c = src->nChannels;
    int step = src->widthStep;
    image_t out = make_image_custom(w, h, c);
    int count = 0;

    for (int k = 0; k < c; ++k) {
        for (int i = 0; i < h; ++i) {
            int i_step = i*step;
            for (int j = 0; j < w; ++j) {
                out.data[count++] = data[i_step + j*c + k] / 255.;
            }
        }
    }

    return out;
}

static image_t make_empty_image(int w, int h, int c)
{
    image_t out;
    out.data = 0;
    out.h = h;
    out.w = w;
    out.c = c;
    return out;
}

static image_t make_image_custom(int w, int h, int c)
{
    image_t out = make_empty_image(w, h, c);
    out.data = (float *)calloc(h*w*c, sizeof(float));
    return out;
}

endif // OPENCV

};

if defined(TRACK_OPTFLOW) && defined(OPENCV) && defined(GPU)

include <opencv2/cudaoptflow.hpp>

include <opencv2/cudaimgproc.hpp>

include <opencv2/cudaarithm.hpp>

include <opencv2/core/cuda.hpp>

class Tracker_optflow { public: const int gpu_count; const int gpu_id; const int flow_error;

Tracker_optflow(int _gpu_id = 0, int win_size = 9, int max_level = 3, int iterations = 8000, int _flow_error = -1) :
    gpu_count(cv::cuda::getCudaEnabledDeviceCount()), gpu_id(std::min(_gpu_id, gpu_count - 1)),
    flow_error((_flow_error > 0) ? _flow_error : (win_size * 4))
{
    int const old_gpu_id = cv::cuda::getDevice();
    cv::cuda::setDevice(gpu_id);

    stream = cv::cuda::Stream();

    sync_PyrLKOpticalFlow_gpu = cv::cuda::SparsePyrLKOpticalFlow::create();
    sync_PyrLKOpticalFlow_gpu->setWinSize(cv::Size(win_size, win_size));    // 9, 15, 21, 31
    sync_PyrLKOpticalFlow_gpu->setMaxLevel(max_level);      // +- 3 pt
    sync_PyrLKOpticalFlow_gpu->setNumIters(iterations); // 2000, def: 30

    cv::cuda::setDevice(old_gpu_id);
}

// just to avoid extra allocations
cv::cuda::GpuMat src_mat_gpu;
cv::cuda::GpuMat dst_mat_gpu, dst_grey_gpu;
cv::cuda::GpuMat prev_pts_flow_gpu, cur_pts_flow_gpu;
cv::cuda::GpuMat status_gpu, err_gpu;

cv::cuda::GpuMat src_grey_gpu;  // used in both functions
cv::Ptr<cv::cuda::SparsePyrLKOpticalFlow> sync_PyrLKOpticalFlow_gpu;
cv::cuda::Stream stream;

std::vector<bbox_t> cur_bbox_vec;
std::vector<bool> good_bbox_vec_flags;
cv::Mat prev_pts_flow_cpu;

void update_cur_bbox_vec(std::vector<bbox_t> _cur_bbox_vec)
{
    cur_bbox_vec = _cur_bbox_vec;
    good_bbox_vec_flags = std::vector<bool>(cur_bbox_vec.size(), true);
    cv::Mat prev_pts, cur_pts_flow_cpu;

    for (auto &i : cur_bbox_vec) {
        float x_center = (i.x + i.w / 2.0F);
        float y_center = (i.y + i.h / 2.0F);
        prev_pts.push_back(cv::Point2f(x_center, y_center));
    }

    if (prev_pts.rows == 0)
        prev_pts_flow_cpu = cv::Mat();
    else
        cv::transpose(prev_pts, prev_pts_flow_cpu);

    if (prev_pts_flow_gpu.cols < prev_pts_flow_cpu.cols) {
        prev_pts_flow_gpu = cv::cuda::GpuMat(prev_pts_flow_cpu.size(), prev_pts_flow_cpu.type());
        cur_pts_flow_gpu = cv::cuda::GpuMat(prev_pts_flow_cpu.size(), prev_pts_flow_cpu.type());

        status_gpu = cv::cuda::GpuMat(prev_pts_flow_cpu.size(), CV_8UC1);
        err_gpu = cv::cuda::GpuMat(prev_pts_flow_cpu.size(), CV_32FC1);
    }

    prev_pts_flow_gpu.upload(cv::Mat(prev_pts_flow_cpu), stream);
}

void update_tracking_flow(cv::Mat src_mat, std::vector<bbox_t> _cur_bbox_vec)
{
    int const old_gpu_id = cv::cuda::getDevice();
    if (old_gpu_id != gpu_id)
        cv::cuda::setDevice(gpu_id);

    if (src_mat.channels() == 3) {
        if (src_mat_gpu.cols == 0) {
            src_mat_gpu = cv::cuda::GpuMat(src_mat.size(), src_mat.type());
            src_grey_gpu = cv::cuda::GpuMat(src_mat.size(), CV_8UC1);
        }

        update_cur_bbox_vec(_cur_bbox_vec);

        //src_grey_gpu.upload(src_mat, stream); // use BGR
        src_mat_gpu.upload(src_mat, stream);
        cv::cuda::cvtColor(src_mat_gpu, src_grey_gpu, CV_BGR2GRAY, 1, stream);
    }
    if (old_gpu_id != gpu_id)
        cv::cuda::setDevice(old_gpu_id);
}

std::vector<bbox_t> tracking_flow(cv::Mat dst_mat, bool check_error = true)
{
    if (sync_PyrLKOpticalFlow_gpu.empty()) {
        std::cout << "sync_PyrLKOpticalFlow_gpu isn't initialized \n";
        return cur_bbox_vec;
    }

    int const old_gpu_id = cv::cuda::getDevice();
    if (old_gpu_id != gpu_id)
        cv::cuda::setDevice(gpu_id);

    if (dst_mat_gpu.cols == 0) {
        dst_mat_gpu = cv::cuda::GpuMat(dst_mat.size(), dst_mat.type());
        dst_grey_gpu = cv::cuda::GpuMat(dst_mat.size(), CV_8UC1);
    }

    //dst_grey_gpu.upload(dst_mat, stream); // use BGR
    dst_mat_gpu.upload(dst_mat, stream);
    cv::cuda::cvtColor(dst_mat_gpu, dst_grey_gpu, CV_BGR2GRAY, 1, stream);

    if (src_grey_gpu.rows != dst_grey_gpu.rows || src_grey_gpu.cols != dst_grey_gpu.cols) {
        stream.waitForCompletion();
        src_grey_gpu = dst_grey_gpu.clone();
        cv::cuda::setDevice(old_gpu_id);
        return cur_bbox_vec;
    }

    ////sync_PyrLKOpticalFlow_gpu.sparse(src_grey_gpu, dst_grey_gpu, prev_pts_flow_gpu, cur_pts_flow_gpu, status_gpu, &err_gpu);    // OpenCV 2.4.x
    sync_PyrLKOpticalFlow_gpu->calc(src_grey_gpu, dst_grey_gpu, prev_pts_flow_gpu, cur_pts_flow_gpu, status_gpu, err_gpu, stream);  // OpenCV 3.x

    cv::Mat cur_pts_flow_cpu;
    cur_pts_flow_gpu.download(cur_pts_flow_cpu, stream);

    dst_grey_gpu.copyTo(src_grey_gpu, stream);

    cv::Mat err_cpu, status_cpu;
    err_gpu.download(err_cpu, stream);
    status_gpu.download(status_cpu, stream);

    stream.waitForCompletion();

    std::vector<bbox_t> result_bbox_vec;

    if (err_cpu.cols == cur_bbox_vec.size() && status_cpu.cols == cur_bbox_vec.size())
    {
        for (size_t i = 0; i < cur_bbox_vec.size(); ++i)
        {
            cv::Point2f cur_key_pt = cur_pts_flow_cpu.at<cv::Point2f>(0, i);
            cv::Point2f prev_key_pt = prev_pts_flow_cpu.at<cv::Point2f>(0, i);

            float moved_x = cur_key_pt.x - prev_key_pt.x;
            float moved_y = cur_key_pt.y - prev_key_pt.y;

            if (abs(moved_x) < 100 && abs(moved_y) < 100 && good_bbox_vec_flags[i])
                if (err_cpu.at<float>(0, i) < flow_error && status_cpu.at<unsigned char>(0, i) != 0 &&
                    ((float)cur_bbox_vec[i].x + moved_x) > 0 && ((float)cur_bbox_vec[i].y + moved_y) > 0)
                {
                    cur_bbox_vec[i].x += moved_x + 0.5;
                    cur_bbox_vec[i].y += moved_y + 0.5;
                    result_bbox_vec.push_back(cur_bbox_vec[i]);
                }
                else good_bbox_vec_flags[i] = false;
            else good_bbox_vec_flags[i] = false;

            //if(!check_error && !good_bbox_vec_flags[i]) result_bbox_vec.push_back(cur_bbox_vec[i]);
        }
    }

    cur_pts_flow_gpu.swap(prev_pts_flow_gpu);
    cur_pts_flow_cpu.copyTo(prev_pts_flow_cpu);

    if (old_gpu_id != gpu_id)
        cv::cuda::setDevice(old_gpu_id);

    return result_bbox_vec;
}

};

elif defined(TRACK_OPTFLOW) && defined(OPENCV)

//#include <opencv2/optflow.hpp>

include <opencv2/video/tracking.hpp>

class Tracker_optflow { public: const int flow_error;

Tracker_optflow(int win_size = 9, int max_level = 3, int iterations = 8000, int _flow_error = -1) :
    flow_error((_flow_error > 0) ? _flow_error : (win_size * 4))
{
    sync_PyrLKOpticalFlow = cv::SparsePyrLKOpticalFlow::create();
    sync_PyrLKOpticalFlow->setWinSize(cv::Size(win_size, win_size));    // 9, 15, 21, 31
    sync_PyrLKOpticalFlow->setMaxLevel(max_level);      // +- 3 pt

}

// just to avoid extra allocations
cv::Mat dst_grey;
cv::Mat prev_pts_flow, cur_pts_flow;
cv::Mat status, err;

cv::Mat src_grey;   // used in both functions
cv::Ptr<cv::SparsePyrLKOpticalFlow> sync_PyrLKOpticalFlow;

std::vector<bbox_t> cur_bbox_vec;
std::vector<bool> good_bbox_vec_flags;

void update_cur_bbox_vec(std::vector<bbox_t> _cur_bbox_vec)
{
    cur_bbox_vec = _cur_bbox_vec;
    good_bbox_vec_flags = std::vector<bool>(cur_bbox_vec.size(), true);
    cv::Mat prev_pts, cur_pts_flow;

    for (auto &i : cur_bbox_vec) {
        float x_center = (i.x + i.w / 2.0F);
        float y_center = (i.y + i.h / 2.0F);
        prev_pts.push_back(cv::Point2f(x_center, y_center));
    }

    if (prev_pts.rows == 0)
        prev_pts_flow = cv::Mat();
    else
        cv::transpose(prev_pts, prev_pts_flow);
}

void update_tracking_flow(cv::Mat new_src_mat, std::vector<bbox_t> _cur_bbox_vec)
{
    if (new_src_mat.channels() == 3) {

        update_cur_bbox_vec(_cur_bbox_vec);

        cv::cvtColor(new_src_mat, src_grey, CV_BGR2GRAY, 1);
    }
}

std::vector<bbox_t> tracking_flow(cv::Mat new_dst_mat, bool check_error = true)
{
    if (sync_PyrLKOpticalFlow.empty()) {
        std::cout << "sync_PyrLKOpticalFlow isn't initialized \n";
        return cur_bbox_vec;
    }

    cv::cvtColor(new_dst_mat, dst_grey, CV_BGR2GRAY, 1);

    if (src_grey.rows != dst_grey.rows || src_grey.cols != dst_grey.cols) {
        src_grey = dst_grey.clone();
        return cur_bbox_vec;
    }

    if (prev_pts_flow.cols < 1) {
        return cur_bbox_vec;
    }

    ////sync_PyrLKOpticalFlow_gpu.sparse(src_grey_gpu, dst_grey_gpu, prev_pts_flow_gpu, cur_pts_flow_gpu, status_gpu, &err_gpu);    // OpenCV 2.4.x
    sync_PyrLKOpticalFlow->calc(src_grey, dst_grey, prev_pts_flow, cur_pts_flow, status, err);  // OpenCV 3.x

    dst_grey.copyTo(src_grey);

    std::vector<bbox_t> result_bbox_vec;

    if (err.rows == cur_bbox_vec.size() && status.rows == cur_bbox_vec.size())
    {
        for (size_t i = 0; i < cur_bbox_vec.size(); ++i)
        {
            cv::Point2f cur_key_pt = cur_pts_flow.at<cv::Point2f>(0, i);
            cv::Point2f prev_key_pt = prev_pts_flow.at<cv::Point2f>(0, i);

            float moved_x = cur_key_pt.x - prev_key_pt.x;
            float moved_y = cur_key_pt.y - prev_key_pt.y;

            if (abs(moved_x) < 100 && abs(moved_y) < 100 && good_bbox_vec_flags[i])
                if (err.at<float>(0, i) < flow_error && status.at<unsigned char>(0, i) != 0 &&
                    ((float)cur_bbox_vec[i].x + moved_x) > 0 && ((float)cur_bbox_vec[i].y + moved_y) > 0)
                {
                    cur_bbox_vec[i].x += moved_x + 0.5;
                    cur_bbox_vec[i].y += moved_y + 0.5;
                    result_bbox_vec.push_back(cur_bbox_vec[i]);
                }
                else good_bbox_vec_flags[i] = false;
            else good_bbox_vec_flags[i] = false;

            //if(!check_error && !good_bbox_vec_flags[i]) result_bbox_vec.push_back(cur_bbox_vec[i]);
        }
    }

    prev_pts_flow = cur_pts_flow.clone();

    return result_bbox_vec;
}

};

else

class Tracker_optflow {};

endif // defined(TRACK_OPTFLOW) && defined(OPENCV)

ifdef OPENCV

cv::Scalar obj_id_to_color(int obj_id) { int const colors[6][3] = { { 1,0,1 },{ 0,0,1 },{ 0,1,1 },{ 0,1,0 },{ 1,1,0 },{ 1,0,0 } }; int const offset = obj_id 123457 % 6; int const color_scale = 150 + (obj_id 123457) % 100; cv::Scalar color(colors[offset][0], colors[offset][1], colors[offset][2]); color *= color_scale; return color; }

class preview_boxes_t { enum { frames_history = 30 }; // how long to keep the history saved

struct preview_box_track_t {
    unsigned int track_id, obj_id, last_showed_frames_ago;
    bool current_detection;
    bbox_t bbox;
    cv::Mat mat_obj, mat_resized_obj;
    preview_box_track_t() : track_id(0), obj_id(0), last_showed_frames_ago(frames_history), current_detection(false) {}
};
std::vector<preview_box_track_t> preview_box_track_id;
size_t const preview_box_size, bottom_offset;
bool const one_off_detections;

public: preview_boxes_t(size_t _preview_box_size = 100, size_t _bottom_offset = 100, bool _one_off_detections = false) : preview_box_size(_preview_box_size), bottom_offset(_bottom_offset), one_off_detections(_one_off_detections) {}

void set(cv::Mat src_mat, std::vector<bbox_t> result_vec)
{
    size_t const count_preview_boxes = src_mat.cols / preview_box_size;
    if (preview_box_track_id.size() != count_preview_boxes) preview_box_track_id.resize(count_preview_boxes);

    // increment frames history
    for (auto &i : preview_box_track_id)
        i.last_showed_frames_ago = std::min((unsigned)frames_history, i.last_showed_frames_ago + 1);

    // occupy empty boxes
    for (auto &k : result_vec) {
        bool found = false;
        // find the same (track_id)
        for (auto &i : preview_box_track_id) {
            if (i.track_id == k.track_id) {
                if (!one_off_detections) i.last_showed_frames_ago = 0; // for tracked objects
                found = true;
                break;
            }
        }
        if (!found) {
            // find empty box
            for (auto &i : preview_box_track_id) {
                if (i.last_showed_frames_ago == frames_history) {
                    if (!one_off_detections && k.frames_counter == 0) break; // don't show if obj isn't tracked yet
                    i.track_id = k.track_id;
                    i.obj_id = k.obj_id;
                    i.bbox = k;
                    i.last_showed_frames_ago = 0;
                    break;
                }
            }
        }
    }

    // draw preview box (from old or current frame)
    for (size_t i = 0; i < preview_box_track_id.size(); ++i)
    {
        // get object image
        cv::Mat dst = preview_box_track_id[i].mat_resized_obj;
        preview_box_track_id[i].current_detection = false;

        for (auto &k : result_vec) {
            if (preview_box_track_id[i].track_id == k.track_id) {
                if (one_off_detections && preview_box_track_id[i].last_showed_frames_ago > 0) {
                    preview_box_track_id[i].last_showed_frames_ago = frames_history; break;
                }
                bbox_t b = k;
                cv::Rect r(b.x, b.y, b.w, b.h);
                cv::Rect img_rect(cv::Point2i(0, 0), src_mat.size());
                cv::Rect rect_roi = r & img_rect;
                if (rect_roi.width > 1 || rect_roi.height > 1) {
                    cv::Mat roi = src_mat(rect_roi);
                    cv::resize(roi, dst, cv::Size(preview_box_size, preview_box_size), cv::INTER_NEAREST);
                    preview_box_track_id[i].mat_obj = roi.clone();
                    preview_box_track_id[i].mat_resized_obj = dst.clone();
                    preview_box_track_id[i].current_detection = true;
                    preview_box_track_id[i].bbox = k;
                }
                break;
            }
        }
    }
}

void draw(cv::Mat draw_mat, bool show_small_boxes = false)
{
    // draw preview box (from old or current frame)
    for (size_t i = 0; i < preview_box_track_id.size(); ++i)
    {
        auto &prev_box = preview_box_track_id[i];

        // draw object image
        cv::Mat dst = prev_box.mat_resized_obj;
        if (prev_box.last_showed_frames_ago < frames_history &&
            dst.size() == cv::Size(preview_box_size, preview_box_size))
        {
            cv::Rect dst_rect_roi(cv::Point2i(i * preview_box_size, draw_mat.rows - bottom_offset), dst.size());
            cv::Mat dst_roi = draw_mat(dst_rect_roi);
            dst.copyTo(dst_roi);

            cv::Scalar color = obj_id_to_color(prev_box.obj_id);
            int thickness = (prev_box.current_detection) ? 5 : 1;
            cv::rectangle(draw_mat, dst_rect_roi, color, thickness);

            unsigned int const track_id = prev_box.track_id;
            std::string track_id_str = (track_id > 0) ? std::to_string(track_id) : "";
            putText(draw_mat, track_id_str, dst_rect_roi.tl() - cv::Point2i(-4, 5), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.9, cv::Scalar(0, 0, 0), 2);

            std::string size_str = std::to_string(prev_box.bbox.w) + "x" + std::to_string(prev_box.bbox.h);
            putText(draw_mat, size_str, dst_rect_roi.tl() + cv::Point2i(0, 12), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0, 0, 0), 1);

            if (!one_off_detections && prev_box.current_detection) {
                cv::line(draw_mat, dst_rect_roi.tl() + cv::Point2i(preview_box_size, 0),
                    cv::Point2i(prev_box.bbox.x, prev_box.bbox.y + prev_box.bbox.h),
                    color);
            }

            if (one_off_detections && show_small_boxes) {
                cv::Rect src_rect_roi(cv::Point2i(prev_box.bbox.x, prev_box.bbox.y),
                    cv::Size(prev_box.bbox.w, prev_box.bbox.h));
                unsigned int const color_history = (255 * prev_box.last_showed_frames_ago) / frames_history;
                color = cv::Scalar(255 - 3 * color_history, 255 - 2 * color_history, 255 - 1 * color_history);
                if (prev_box.mat_obj.size() == src_rect_roi.size()) {
                    prev_box.mat_obj.copyTo(draw_mat(src_rect_roi));
                }
                cv::rectangle(draw_mat, src_rect_roi, color, thickness);
                putText(draw_mat, track_id_str, src_rect_roi.tl() - cv::Point2i(0, 10), cv::FONT_HERSHEY_COMPLEX_SMALL, 0.8, cv::Scalar(0, 0, 0), 1);
            }
        }
    }
}

};

endif // OPENCV

//extern "C" {

endif // __cplusplus

/ // C - wrappers YOLODLL_API void create_detector(char const cfg_filename, char const weight_filename, int gpu_id); YOLODLL_API void delete_detector(); YOLODLL_API bbox_t detect_custom(image_t img, float thresh, bool use_mean, int result_size); YOLODLL_API bbox_t detect_resized(image_t img, int init_w, int init_h, float thresh, bool use_mean, int result_size); YOLODLL_API bbox_t detect(image_t img, int result_size); YOLODLL_API image_t load_img(char image_filename); YOLODLL_API void free_img(image_t m);

ifdef __cplusplus

} // extern "C"

static std::shared_ptr c_detector_ptr; static std::vector c_result_vec;

void create_detector(char const cfg_filename, char const weight_filename, int gpu_id) { c_detector_ptr = std::make_shared(cfg_filename, weight_filename, gpu_id); }

void delete_detector() { c_detector_ptr.reset(); }

bbox_t detect_custom(image_t img, float thresh, bool use_mean, int result_size) { c_result_vec = static_cast<Detector>(c_detector_ptr.get())->detect(img, thresh, use_mean); result_size = c_result_vec.size(); return c_result_vec.data(); }

bbox_t detect_resized(image_t img, int init_w, int init_h, float thresh, bool use_mean, int result_size) { c_result_vec = static_cast<Detector>(c_detector_ptr.get())->detect_resized(img, init_w, init_h, thresh, use_mean); result_size = c_result_vec.size(); return c_result_vec.data(); }

bbox_t detect(image_t img, int result_size) { return detect_custom(img, 0.24, true, result_size); }

image_t load_img(char image_filename) { return static_cast<Detector>(c_detector_ptr.get())->load_image(image_filename); } void free_img(image_t m) { static_cast<Detector*>(c_detector_ptr.get())->free_image(m); }

endif // __cplusplus

*/

prety16 commented 6 years ago

this is my .hpp file

AlexeyAB commented 6 years ago

Use this code:

Detector detector = Detector(cfg, weight, 0);
detector.nms = 0.45;
image_t img=detector.load_image("dogr.jpg");
vector<bbox_t> vector_b = detector.detect(img, 0.25, false);

It is taken from:

prety16 commented 6 years ago

it doesn't work .the value of x is 0 and the value of y is 0 too.

AlexeyAB commented 6 years ago

@prety16 Can you show screenshot?

prety16 commented 6 years ago

Can you give me your mailbox address? I sent you my DLL and the source code for generating DLL and Weghts and.Cfg files to you.

prety16 commented 6 years ago

@AlexeyAB Can you give me your mailbox address? I sent you my DLL and the source code for generating DLL and Weghts and.Cfg files to you.

prety16 commented 6 years ago

image

prety16 commented 6 years ago

The same configuration file can be identified successfully by darknet.exe, but it can't be identified in YOLO-CPP-DLL. I don't know where the problem is

prety16 commented 6 years ago

image

prety16 commented 6 years ago

image When I call ~Detector (), the error shown in the above picture appears

AlexeyAB commented 6 years ago

it doesn't work .the value of x is 0 and the value of y is 0 too.

I don't see x=0 or y=0 on the screenshots.


When I call ~Detector (), the error shown in the above picture appears

You shouldn't call ~Detector() explicit.


Use this code:

std::string  cfg_file = "cfg/yolov3.cfg";
std::string  weights_file = "yolov3.weights";
Detector detector = Detector(cfg, weight, 0);
detector.nms = 0.45;
image_t img=detector.load_image("dogr.jpg");
vector<bbox_t> vector_b = detector.detect(img, 0.25, false);
for (auto &i : vector_b) {
    std::cout << "obj_id = " << i.obj_id << ",  x = " << i.x << ", y = " << i.y 
        << ", w = " << i.w << ", h = " << i.h
        << std::setprecision(3) << ", prob = " << i.prob << std::endl;
}
getchar();