Closed machine52vision closed 1 year ago
how batch inference? `
cv::cuda::GpuMat gpu_frame; // upload image to GPU gpu_frame.upload(img); cv::Size input_size = cv::Size(input_width, input_height); cv::cuda::GpuMat resized; //cv::cuda::resize(gpu_frame, resized, input_size, 0, 0, cv::INTER_NEAREST); cv::cuda::resize(gpu_frame, resized, input_size); // normalize cv::cuda::GpuMat flt_image; resized.convertTo(flt_image, CV_32FC3, 1.f / 255.f); cv::cuda::subtract(flt_image, cv::Scalar(0.0f, 0.0f, 0.0f), flt_image, cv::noArray(), -1); cv::cuda::divide(flt_image, cv::Scalar(1.0f, 1.0f, 1.0f), flt_image, 1, -1); std::vector<cv::cuda::GpuMat> chw; for (size_t i = 0; i < input_channel; ++i) { chw.emplace_back(cv::cuda::GpuMat(input_size, CV_32FC1, _input_data_device + b*i * input_width * input_height)); } cv::cuda::split(flt_image, chw);
`
can you write demo? thanks very match
i have solve this problem,thanks!
how batch inference? `
`
can you write demo? thanks very match