采用opencv 对图像中的小球通过canny边缘检测的方式进行提取坐标
本文介绍了如何使用OpenCV对图像中的小球进行Canny边缘检测,并通过Zernike矩进行亚像素边缘检测,最终拟合椭圆以获取小球的精确坐标。首先,图像被转换为灰度图并进行高斯平滑处理,接着通过Canny算法检测边缘。然后,利用Zernike矩对边缘点进行亚像素级别的精确定位,最后通过椭圆拟合得到小球的中心坐标。该方法能够有效提高小球检测的精度,适用于需要高精度定位的图像处理任务。
#include <opencv2/opencv.hpp>
#include <iostream>
#include <vector>
#include <map>
#include <cmath>#pragma comment(linker, "/subsystem:\"console\" /entry:\"mainCRTStartup\"" )
//#pragma comment(linker, "/subsystem:\"windows\" /entry:\"mainCRTStartup\"")// 全局变量存储显示状态
double scale = 1.0; // 当前缩放比例
int offset_x = 0; // X轴偏移量
int offset_y = 0; // Y轴偏移量
cv::Point mouse_pt(-1, -1); // 鼠标位置
bool dragging = false; // 拖拽状态struct ZernikeTemplates {cv::Mat real;cv::Mat imag;cv::Mat mod;
};ZernikeTemplates generateZernikeTemplates(int N) {ZernikeTemplates zt;int size = 2 * N + 1;zt.real = cv::Mat::zeros(size, size, CV_64F);zt.imag = cv::Mat::zeros(size, size, CV_64F);zt.mod = cv::Mat::zeros(size, size, CV_64F);for (int y = -N; y <= N; ++y) {for (int x = -N; x <= N; ++x) {double rho = sqrt(x * x + y * y) / N;double theta = atan2(y, x);if (rho <= 1.0) {zt.real.at<double>(y + N, x + N) = rho * cos(theta);zt.imag.at<double>(y + N, x + N) = rho * sin(theta);//zt.mod.at<double>(y + N, x + N) = 1.0;}elsezt.mod.at<double>(y + N, x + N) = 1.0;}}// 归一化double sum_real = sum(abs(zt.real))[0];double sum_imag = sum(abs(zt.imag))[0];double sum_mod = sum(zt.mod)[0];zt.real /= sum_real;zt.imag /= sum_imag;zt.mod /= sum_mod;return zt;
}std::vector<cv::Point2d> zernikeEdgeDetection(const cv::Mat& grayImg, const cv::Mat& edges,int zernikeN,double minEdgeStrength)
{// 生成Zernike模板ZernikeTemplates zt = generateZernikeTemplates(zernikeN);// 收集边缘点std::vector<cv::Point> edgePixels;findNonZero(edges, edgePixels);// 亚像素边缘检测std::vector<cv::Point2d> subPixelEdges;for (const cv::Point& p : edgePixels) {int x = p.x;int y = p.y;// 检查边界if (x < zernikeN || x >= grayImg.cols - zernikeN ||y < zernikeN || y >= grayImg.rows - zernikeN)continue;// 提取ROIcv::Rect roi(x - zernikeN, y - zernikeN, 2 * zernikeN + 1, 2 * zernikeN + 1);cv::Mat region = grayImg(roi).clone();region.convertTo(region, CV_64F, 1.0 / 255.0);// 计算Zernike矩double M_real = sum(region.mul(zt.real))[0];double M_imag = sum(region.mul(zt.imag))[0];double M_mod = sum(region.mul(zt.mod))[0];// 计算边缘参数double theta = atan2(M_imag, M_real);double edgeStrength = sqrt(M_real * M_real + M_imag * M_imag) / M_mod;// 计算偏移量double l = (3 * M_mod - 4 * (M_real * cos(theta) + M_imag * sin(theta)))/ (2 * edgeStrength + 1e-6);// 有效性检查if (abs(l) > zernikeN * 1.5 || edgeStrength < minEdgeStrength)continue;// 计算亚像素坐标cv::Point2d subPixel(p.x + l * cos(theta)+1, p.y + l * sin(theta)+1); // MARLAB坐标偏移补偿subPixelEdges.push_back(subPixel);}return subPixelEdges;
}cv::RotatedRect fitEllipseRobust(const std::vector<cv::Point2d>& points) {// 将点转换为Point2f格式std::vector<cv::Point2f> pointsF;int i = 0;for (const auto& p : points){i++;pointsF.emplace_back(p.x, p.y);std::cout << i << " point:" << p.x << " " << p.y << std::endl;}// 使用OpenCV的椭圆拟合(带鲁棒性)cv::RotatedRect ellipse = fitEllipse(pointsF);return ellipse;
}int main()
{// 1.读取bmp图片及显示结果cv::Mat image = cv::imread("G:/tools/matlab2020b/Zernike/left_4.bmp",cv::IMREAD_COLOR);if (image.empty()) {std::cout << "Could not open or find the image" << std::endl;return -1;}//cv::imshow("原始图像", image);// 2.进行Canny阈值边缘检测及显示结果// 参数设置double canny_thresh[2] = { 0.1, 0.4 }; // Canny阈值int zernike_N = 7;double min_edge_strength = 0.1;cv::Mat gray_img;// 转换为灰度图if (image.channels() == 3) {cv::cvtColor(image, gray_img, cv::COLOR_BGR2GRAY);}else {gray_img = image.clone();}// 归一化处理,类似于MATLAB的im2doublegray_img.convertTo(gray_img, CV_64F, 1.0 / 255.0);// 转换回CV_8U类型cv::Mat gray_img_8u;gray_img.convertTo(gray_img_8u, CV_8U, 255.0);// 手动对图像进行高斯平滑处理cv::Mat blurred;cv::GaussianBlur(gray_img_8u, blurred, cv::Size(5,5),0.3);// MATLAB默认sigma=0.6//cv::GaussianBlur(gray_img_8u, blurred, cv::Size(3, 3), 0);// Otsu 阈值分割cv::Mat binary;cv::threshold(blurred, binary, 0, 255, cv::THRESH_BINARY + cv::THRESH_OTSU);// 步骤1: Canny边缘检测cv::Mat BW;double low_thresh = canny_thresh[0] * 255;double high_thresh = canny_thresh[1] * 255;cv::Canny(binary * 255, BW, low_thresh, high_thresh, 7);cv::imshow("Canny图像", BW);cv::imwrite("G:/tools/matlab2020b/Zernike/Canny.bmp", BW);// 3.Zernike矩亚像素边缘检测及显示结果std::vector<cv::Point2d> subPixelEdges = zernikeEdgeDetection(binary, BW, zernike_N, min_edge_strength);// 椭圆拟合if (subPixelEdges.size() < 5) {std::cerr << "Not enough points for ellipse fitting" << std::endl;return -1;}cv::RotatedRect ellipse = fitEllipseRobust(subPixelEdges);// 显示结果cv::Mat display = image.clone();// 绘制边缘点for (const auto& p : subPixelEdges){circle(display, p, 0.1, cv::Scalar(0, 255, 255), cv::FILLED);}// 绘制椭圆//cv::ellipse(display, ellipse, cv::Scalar(0, 0, 255), 1);// 绘制中心circle(display, ellipse.center, 0.1, cv::Scalar(0, 0, 255), cv::FILLED);std::cout << "-------center-------" << ellipse.center.x << " " << ellipse.center.y << std::endl;// 创建窗口并设置回调cv::namedWindow("Zoomable Window", cv::WINDOW_AUTOSIZE);cv::imshow("Zoomable Window", display);// 4.椭圆拟合及显示结果cv::imwrite("G:/tools/matlab2020b/Zernike/edges.bmp", display);//cv::Mat combined;//cv::hconcat(image, CannyImage, combined);//cv::imshow("图像", combined);cv::waitKey(0);}
matLab中的提取像素坐标序列从1开始 而 opencv是从0开始,所以坐标相差1是正常的 本文中为了像matLab对齐,采用了补全+1的方式。
如下为cmakeLists
cmake_minimum_required(VERSION 3.0)project(CanDy_Demo)
set(CMAKE_AUTOUIC ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTORCC ON)include_directories(${PROJECT_SOURCE_DIR}${PROJECT_SOURCE_DIR}/opencv4/include${PROJECT_BINARY_DIR})set(srcsmain.cpp)set(hdrs)set(uis)add_executable(${PROJECT_NAME} ${srcs} ${hdrs} ${wrapUis})
add_compile_definitions(ARMDCOMBINEDAPI_EXPORTS)
add_compile_definitions(NOMINMAX)#opencv libss
find_library(OPENCV_WORLD_LIBRARY opencv_world452 ${CMAKE_CURRENT_SOURCE_DIR}/Opencv4/lib)target_link_libraries(${PROJECT_NAME} #opencv lib${OPENCV_WORLD_LIBRARY})if(EXISTS ${CMAKE_SOURCE_DIR}/${DIRS_IN_SRC})execute_process(COMMAND ${CMAKE_COMMAND} -E copy_directory ${CMAKE_SOURCE_DIR}/${DIRS_IN_SRC}${CMAKE_CURRENT_BINARY_DIR}/${DIRS_IN_SRC})
endif()#SET_TARGET_PROPERTIES(${PROJECT_NAME} PROPERTIES LINK_FLAGS "/MANIFESTUAC:\"level='requireAdministrator' uiAccess='false'\" /SUBSYSTEM:WINDOWS")