#include "DetectPresenter.h" #include "rodAndBarDetection_Export.h" #include "AlgoParamConverter.h" #include "IHandEyeCalib.h" #include #define _USE_MATH_DEFINES #include #include #include #include #include namespace { constexpr double kVectorNormEpsilon = 1e-6; HECPoint3D MakeHecPoint(double x, double y, double z) { return HECPoint3D(x, y, z); } HECPoint3D CrossProduct(const HECPoint3D& a, const HECPoint3D& b) { return HECPoint3D( a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y - a.y * b.x); } double DotProduct(const HECPoint3D& a, const HECPoint3D& b) { return a.x * b.x + a.y * b.y + a.z * b.z; } bool NormalizeInPlace(HECPoint3D& v) { const double norm = v.norm(); if (norm < kVectorNormEpsilon) { return false; } v = v / norm; return true; } void ApplyDirVectorInvert(std::vector& dirVectors, int dirVectorInvert) { switch (dirVectorInvert) { case 1: dirVectors[0] = dirVectors[0] * (-1.0); dirVectors[1] = dirVectors[1] * (-1.0); break; case 2: dirVectors[0] = dirVectors[0] * (-1.0); dirVectors[2] = dirVectors[2] * (-1.0); break; case 3: dirVectors[1] = dirVectors[1] * (-1.0); dirVectors[2] = dirVectors[2] * (-1.0); break; case 0: default: break; } } bool BuildRightHandedFrameFromXZ(const HECPoint3D& xSeed, const HECPoint3D& zSeed, std::vector& dirVectors) { HECPoint3D xAxis = xSeed; HECPoint3D zAxis = zSeed; if (!NormalizeInPlace(xAxis) || !NormalizeInPlace(zAxis)) { return false; } zAxis = zAxis - xAxis * DotProduct(xAxis, zAxis); if (!NormalizeInPlace(zAxis)) { return false; } HECPoint3D yAxis = CrossProduct(zAxis, xAxis); if (!NormalizeInPlace(yAxis)) { return false; } zAxis = CrossProduct(xAxis, yAxis); if (!NormalizeInPlace(zAxis)) { return false; } dirVectors = { xAxis, yAxis, zAxis }; return true; } HECRotationMatrix BuildRotationMatrixFromAxes(const std::vector& dirVectors) { HECRotationMatrix rotation; rotation.at(0, 0) = dirVectors[0].x; rotation.at(0, 1) = dirVectors[1].x; rotation.at(0, 2) = dirVectors[2].x; rotation.at(1, 0) = dirVectors[0].y; rotation.at(1, 1) = dirVectors[1].y; rotation.at(1, 2) = dirVectors[2].y; rotation.at(2, 0) = dirVectors[0].z; rotation.at(2, 1) = dirVectors[1].z; rotation.at(2, 2) = dirVectors[2].z; return rotation; } HECEulerOrder ToHandEyeEulerOrder(int eulerOrder) { switch (eulerOrder) { case 10: return HECEulerOrder::XYZ; case 11: return HECEulerOrder::ZYX; case 12: return HECEulerOrder::ZXY; case 13: return HECEulerOrder::YXZ; case 14: return HECEulerOrder::YZX; case 15: return HECEulerOrder::XZY; default: LOG_WARNING("Unsupported euler order %d, fallback to 11(sZYX)\n", eulerOrder); return HECEulerOrder::ZYX; } } HECCalibResult ToHandEyeCalibResult(const double clibMatrix[16]) { HECCalibResult calibResult; for (int i = 0; i < 3; ++i) { for (int j = 0; j < 3; ++j) { calibResult.R.at(i, j) = clibMatrix[i * 4 + j]; } calibResult.T.at(i) = clibMatrix[i * 4 + 3]; } return calibResult; } } // namespace DetectPresenter::DetectPresenter(/* args */) { LOG_DEBUG("DetectPresenter Init algo ver: %s\n", wd_rodAndBarDetectionVersion()); } DetectPresenter::~DetectPresenter() { } QString DetectPresenter::GetAlgoVersion() { return QString(wd_rodAndBarDetectionVersion()); } int DetectPresenter::DetectRod( int cameraIndex, std::vector>& laserLines, const VrAlgorithmParams& algorithmParams, const VrDebugParam& debugParam, LaserDataLoader& dataLoader, const double clibMatrix[16], int eulerOrder, int dirVectorInvert, DetectionResult& detectionResult) { if (laserLines.empty()) { LOG_WARNING("No laser lines data available\n"); return ERR_CODE(DEV_DATA_INVALID); } // 获取当前相机的校准参数 VrCameraPlaneCalibParam cameraCalibParamValue; const VrCameraPlaneCalibParam* cameraCalibParam = nullptr; if (algorithmParams.planeCalibParam.GetCameraCalibParam(cameraIndex, cameraCalibParamValue)) { cameraCalibParam = &cameraCalibParamValue; } // 保存debug数据 std::string timeStamp = CVrDateUtils::GetNowTime(); if(debugParam.enableDebug && debugParam.savePointCloud){ LOG_INFO("[Algo Thread] Debug mode is enabled, saving point cloud data\n"); // 获取当前时间戳,格式为YYYYMMDDHHMMSS std::string fileName = debugParam.debugOutputPath + "/Laserline_" + std::to_string(cameraIndex) + "_" + timeStamp + ".txt"; // 直接使用统一格式保存数据 dataLoader.SaveLaserScanData(fileName, laserLines, laserLines.size(), 0.0, 0, 0); } int nRet = SUCCESS; // 转换为算法需要的XYZ格式 std::vector> xyzData; int convertResult = dataLoader.ConvertToSVzNL3DPosition(laserLines, xyzData); if (convertResult != SUCCESS || xyzData.empty()) { LOG_WARNING("Failed to convert data to XYZ format or no XYZ data available\n"); return ERR_CODE(DEV_DATA_INVALID); } // 使用 AlgoParamConverter 进行参数转换 SSX_rodParam rodParam = AlgoParamConverter::ToAlgoParam(algorithmParams.rodParam); SSG_cornerParam cornerParam = AlgoParamConverter::ToAlgoParam(algorithmParams.cornerParam); SSG_treeGrowParam growParam = AlgoParamConverter::ToAlgoParam(algorithmParams.growParam); SSG_outlierFilterParam filterParam = AlgoParamConverter::ToAlgoParam(algorithmParams.filterParam); // 构建平面标定参数 SSG_planeCalibPara poseCalibPara = AlgoParamConverter::ToAlgoPlaneCalibParam(cameraCalibParam); if(debugParam.enableDebug && debugParam.printDetailLog) { AlgoParamConverter::LogAlgoParams("[Algo Thread]", rodParam, cornerParam, filterParam, growParam, clibMatrix); } int errCode = 0; CVrTimeUtils oTimeUtils; LOG_DEBUG("before sx_rodPositioning \n"); // 调用棒材定位算法 std::vector rodInfo; sx_rodPositioning( xyzData, poseCalibPara, cornerParam, filterParam, growParam, rodParam, rodInfo, &errCode); LOG_DEBUG("after sx_rodPositioning \n"); LOG_INFO("sx_rodPositioning: detected %zu rods, err=%d runtime=%.3fms\n", rodInfo.size(), errCode, oTimeUtils.GetElapsedTimeInMilliSec()); ERR_CODE_RETURN(errCode); std::unique_ptr handEyeCalib( CreateHandEyeCalibInstance(), DestroyHandEyeCalibInstance); if (!handEyeCalib) { LOG_ERROR("Failed to create HandEyeCalib instance\n"); return ERR_CODE(DEV_NOT_FIND); } const HECCalibResult calibResult = ToHandEyeCalibResult(clibMatrix); const HECEulerOrder hecEulerOrder = ToHandEyeEulerOrder(eulerOrder); // 构建检测结果:生成点云图像 // 1. 获取所有棒材的中心点用于可视化 std::vector> objOps; std::vector rodCenters; for (const auto& rod : rodInfo) { SVzNL3DPoint pt; pt.x = rod.center.x; pt.y = rod.center.y; pt.z = rod.center.z; rodCenters.push_back(pt); } if (!rodCenters.empty()) { objOps.push_back(rodCenters); } // 从点云数据生成投影图像(10cm边界) detectionResult.image = PointCloudImageUtils::GeneratePointCloudRetPointImage(xyzData, objOps, 100.0); // 在图像上绘制棒材的轴向方向线 if (!detectionResult.image.isNull() && !rodInfo.empty()) { QPainter painter(&detectionResult.image); painter.setRenderHint(QPainter::Antialiasing); // 计算点云范围(与PointCloudImageUtils相同的方式) double xMin = 1e10, xMax = -1e10, yMin = 1e10, yMax = -1e10; for (const auto& line : xyzData) { for (const auto& pt : line) { if (pt.pt3D.z < 1e-4) continue; xMin = std::min(xMin, (double)pt.pt3D.x); xMax = std::max(xMax, (double)pt.pt3D.x); yMin = std::min(yMin, (double)pt.pt3D.y); yMax = std::max(yMax, (double)pt.pt3D.y); } } // 扩展边界(与GeneratePointCloudRetPointImage相同) double margin = 100.0; // 10cm = 100mm xMin -= margin; xMax += margin; yMin -= margin; yMax += margin; // 使用与GeneratePointCloudRetPointImage相同的参数 int imgRows = detectionResult.image.height(); int imgCols = detectionResult.image.width(); int x_skip = 50; int y_skip = 50; // 计算投影比例(与PointCloudImageUtils相同的方式) double y_rows = (double)(imgRows - y_skip * 2); double x_cols = (double)(imgCols - x_skip * 2); double x_scale = (xMax - xMin) / x_cols; double y_scale = (yMax - yMin) / y_rows; // 使用统一的比例尺 double scale = std::max(x_scale, y_scale); x_scale = scale; y_scale = scale; // 计算点云在图像中居中的偏移量(与PointCloudImageUtils一致) double cloudWidth = (xMax - xMin) / scale; double cloudHeight = (yMax - yMin) / scale; int x_offset = x_skip + (int)((x_cols - cloudWidth) / 2); int y_offset = y_skip + (int)((y_rows - cloudHeight) / 2); // 转换3D坐标到图像坐标的lambda函数(使用居中偏移) auto toImageCoord = [&](const SVzNL3DPoint& pt) -> QPointF { int px = (int)((pt.x - xMin) / x_scale + x_offset); int py = (int)((pt.y - yMin) / y_scale + y_offset); return QPointF(px, py); }; // 绘制棒材的轴向方向线 for (const auto& rod : rodInfo) { // 绘制轴向方向线(红色) double len = 60; QPen axisPen(QColor(255, 0, 0), 2); painter.setPen(axisPen); SVzNL3DPoint pt0 = { rod.center.x - len * rod.axialDir.x, rod.center.y - len * rod.axialDir.y, rod.center.z - len * rod.axialDir.z }; SVzNL3DPoint pt1 = { rod.center.x + len * rod.axialDir.x, rod.center.y + len * rod.axialDir.y, rod.center.z + len * rod.axialDir.z }; QPointF imgPt0 = toImageCoord(pt0); QPointF imgPt1 = toImageCoord(pt1); painter.drawLine(imgPt0, imgPt1); // 绘制起点到终点线段(绿色) QPen segPen(QColor(0, 255, 0), 2); painter.setPen(segPen); SVzNL3DPoint startPt = { rod.startPt.x, rod.startPt.y, rod.startPt.z }; SVzNL3DPoint endPt = { rod.endPt.x, rod.endPt.y, rod.endPt.z }; QPointF imgStart = toImageCoord(startPt); QPointF imgEnd = toImageCoord(endPt); painter.drawLine(imgStart, imgEnd); } } // 转换检测结果为UI显示格式(使用机械臂坐标系数据) for (size_t i = 0; i < rodInfo.size(); i++) { const auto& rod = rodInfo[i]; // 进行坐标转换:从算法坐标系转换到机械臂坐标系 SVzNL3DPoint targetObj; targetObj.x = rod.center.x; targetObj.y = rod.center.y; targetObj.z = rod.center.z; HECPoint3D eyePoint = MakeHecPoint(targetObj.x, targetObj.y, targetObj.z); HECPoint3D robotPoint; handEyeCalib->TransformPoint(calibResult.R, calibResult.T, eyePoint, robotPoint); std::vector dirVectorsEye; bool validPose = BuildRightHandedFrameFromXZ( MakeHecPoint(rod.axialDir.x, rod.axialDir.y, rod.axialDir.z), MakeHecPoint(rod.normalDir.x, rod.normalDir.y, rod.normalDir.z), dirVectorsEye); if (!validPose) { LOG_WARNING("[Algo Thread] Rod %zu has invalid axial/normal direction, use zero pose\n", i); dirVectorsEye = { HECPoint3D(1.0, 0.0, 0.0), HECPoint3D(0.0, 1.0, 0.0), HECPoint3D(0.0, 0.0, 1.0) }; } ApplyDirVectorInvert(dirVectorsEye, dirVectorInvert); std::vector dirVectorsRobot(3); for (int axisIdx = 0; axisIdx < 3; ++axisIdx) { handEyeCalib->RotatePoint(calibResult.R, dirVectorsEye[axisIdx], dirVectorsRobot[axisIdx]); } const HECRotationMatrix robotPoseR = BuildRotationMatrixFromAxes(dirVectorsRobot); HECEulerAngles robotEuler; handEyeCalib->RotationMatrixToEuler(robotPoseR, hecEulerOrder, robotEuler); double rollDeg = 0.0; double pitchDeg = 0.0; double yawDeg = 0.0; robotEuler.toDegrees(rollDeg, pitchDeg, yawDeg); // 创建位置数据(使用转换后的机械臂坐标) RodPosition pos; pos.roll = rollDeg; pos.pitch = pitchDeg; pos.yaw = yawDeg; pos.x = robotPoint.x; // 机械臂坐标X pos.y = robotPoint.y; // 机械臂坐标Y pos.z = robotPoint.z; // 机械臂坐标Z detectionResult.positions.push_back(pos); // 保存棒材信息 RodInfo info; info.centerX = robotPoint.x; info.centerY = robotPoint.y; info.centerZ = robotPoint.z; info.axialDirX = rod.axialDir.x; info.axialDirY = rod.axialDir.y; info.axialDirZ = rod.axialDir.z; info.normalDirX = rod.normalDir.x; info.normalDirY = rod.normalDir.y; info.normalDirZ = rod.normalDir.z; info.startPtX = rod.startPt.x; info.startPtY = rod.startPt.y; info.startPtZ = rod.startPt.z; info.endPtX = rod.endPt.x; info.endPtY = rod.endPt.y; info.endPtZ = rod.endPt.z; detectionResult.rodInfoList.push_back(info); if(debugParam.enableDebug && debugParam.printDetailLog){ LOG_INFO("[Algo Thread] Rod %zu Eye Coords: X=%.2f, Y=%.2f, Z=%.2f\n", i, rod.center.x, rod.center.y, rod.center.z); LOG_INFO("[Algo Thread] Rod %zu Robot Coords: X=%.2f, Y=%.2f, Z=%.2f, RPY=%.2f, %.2f, %.2f\n", i, pos.x, pos.y, pos.z, pos.roll, pos.pitch, pos.yaw); LOG_INFO("[Algo Thread] Rod %zu Axial Dir: X=%.3f, Y=%.3f, Z=%.3f\n", i, rod.axialDir.x, rod.axialDir.y, rod.axialDir.z); LOG_INFO("[Algo Thread] Rod %zu Normal Dir: X=%.3f, Y=%.3f, Z=%.3f\n", i, rod.normalDir.x, rod.normalDir.y, rod.normalDir.z); } } if(debugParam.enableDebug && debugParam.saveDebugImage){ // 获取当前时间戳,格式为YYYYMMDDHHMMSS std::string fileName = debugParam.debugOutputPath + "/Image_" + std::to_string(cameraIndex) + "_" + timeStamp + ".png"; LOG_INFO("[Algo Thread] Debug image saved image : %s\n", fileName.c_str()); // 保存检测结果图片 if (!detectionResult.image.isNull()) { QString qFileName = QString::fromStdString(fileName); detectionResult.image.save(qFileName); } else { LOG_WARNING("[Algo Thread] No valid image to save for debug\n"); } } return nRet; }