mark检测初步版本

This commit is contained in:
yiyi 2025-12-20 16:18:12 +08:00
parent 73e3e7099d
commit 43a8f4766c
32 changed files with 2240 additions and 669 deletions

View File

@ -1,7 +1,7 @@
TEMPLATE = subdirs
# 拆包项目
SUBDIRS += ./GrabBag/GrabBag.pro
# SUBDIRS += ./GrabBag/GrabBag.pro
# 撕裂项目
# SUBDIRS += ./BeltTearing/BeltTearing.pro
@ -10,7 +10,7 @@ SUBDIRS += ./GrabBag/GrabBag.pro
# SUBDIRS += ./LapWeld/LapWeld.pro
#工件定位
SUBDIRS += ./Workpiece/Workpiece.pro
# SUBDIRS += ./Workpiece/Workpiece.pro
# 颗粒尺寸检测
# SUBDIRS += ./ParticleSize/ParticleSize.pro

View File

@ -10,6 +10,9 @@
#include <cstring>
#include <sstream>
#include <iomanip>
#include <future>
#include "VrTimeUtils.h"
#include "PathManager.h"
BinocularMarkPresenter::BinocularMarkPresenter(QObject *parent)
: QObject(parent)
@ -20,6 +23,8 @@ BinocularMarkPresenter::BinocularMarkPresenter(QObject *parent)
, m_bAutoStartDetection(false)
, m_bIsDetecting(false)
, m_bSendContinuousResult(false)
, m_bSendContinuousImage(false)
, m_bIsProcessingImage(false)
, m_bLeftImageReady(false)
, m_bRightImageReady(false)
, m_nServerPort(5901)
@ -56,7 +61,7 @@ BinocularMarkPresenter::BinocularMarkPresenter(QObject *parent)
m_markInfo.dictType = 1; // DICT_6x6
// 初始化Board配置默认值
m_boardInfo.totalBoardNum = 1; // 默认1个board
m_boardInfo.totalBoardNum = 10; // 默认1个board
m_boardInfo.boardIdInterval = 8; // 间隔8
m_boardInfo.boardChaucoIDNum = 4; // 3x3有4个charuco
@ -185,21 +190,47 @@ int BinocularMarkPresenter::tryConnectCameras()
return ERR_CODE(APP_ERR_EXEC);
}
LOG_INFO("Found %zu cameras\n", deviceList.size());
// 打开左相机
ret = m_pLeftCamera->OpenDeviceByIndex(m_nLeftCameraIndex);
if (ret != SUCCESS)
LOG_INFO("Found %zu cameras:\n", deviceList.size());
for (size_t i = 0; i < deviceList.size(); i++)
{
LOG_ERROR("Failed to open left camera (index: %u)\n", m_nLeftCameraIndex);
delete m_pLeftCamera;
delete m_pRightCamera;
m_pLeftCamera = nullptr;
m_pRightCamera = nullptr;
return ERR_CODE(APP_ERR_EXEC);
LOG_INFO(" [%zu] SN: %s, Model: %s, Name: %s\n",
i,
deviceList[i].serialNumber.c_str(),
deviceList[i].modelName.c_str(),
deviceList[i].displayName.c_str());
}
LOG_INFO("Left camera opened (index: %u)\n", m_nLeftCameraIndex);
// 打开左相机(优先使用序列号)
if (!m_strLeftCameraSerial.empty())
{
LOG_INFO("Opening left camera by serial number: %s\n", m_strLeftCameraSerial.c_str());
ret = m_pLeftCamera->OpenDevice(m_strLeftCameraSerial);
if (ret != SUCCESS)
{
LOG_ERROR("Failed to open left camera by serial number: %s\n", m_strLeftCameraSerial.c_str());
delete m_pLeftCamera;
delete m_pRightCamera;
m_pLeftCamera = nullptr;
m_pRightCamera = nullptr;
return ERR_CODE(APP_ERR_EXEC);
}
LOG_INFO("Left camera opened successfully (SN: %s)\n", m_strLeftCameraSerial.c_str());
}
else
{
LOG_INFO("Opening left camera by index: %u\n", m_nLeftCameraIndex);
ret = m_pLeftCamera->OpenDeviceByIndex(m_nLeftCameraIndex);
if (ret != SUCCESS)
{
LOG_ERROR("Failed to open left camera (index: %u)\n", m_nLeftCameraIndex);
delete m_pLeftCamera;
delete m_pRightCamera;
m_pLeftCamera = nullptr;
m_pRightCamera = nullptr;
return ERR_CODE(APP_ERR_EXEC);
}
LOG_INFO("Left camera opened successfully (index: %u)\n", m_nLeftCameraIndex);
}
// 初始化右相机SDK每个设备对象需要独立初始化
ret = m_pRightCamera->InitSDK();
@ -214,20 +245,39 @@ int BinocularMarkPresenter::tryConnectCameras()
return ERR_CODE(APP_ERR_EXEC);
}
// 打开右相机
ret = m_pRightCamera->OpenDeviceByIndex(m_nRightCameraIndex);
if (ret != SUCCESS)
// 打开右相机(优先使用序列号)
if (!m_strRightCameraSerial.empty())
{
LOG_ERROR("Failed to open right camera (index: %u)\n", m_nRightCameraIndex);
m_pLeftCamera->CloseDevice();
delete m_pLeftCamera;
delete m_pRightCamera;
m_pLeftCamera = nullptr;
m_pRightCamera = nullptr;
return ERR_CODE(APP_ERR_EXEC);
LOG_INFO("Opening right camera by serial number: %s\n", m_strRightCameraSerial.c_str());
ret = m_pRightCamera->OpenDevice(m_strRightCameraSerial);
if (ret != SUCCESS)
{
LOG_ERROR("Failed to open right camera by serial number: %s\n", m_strRightCameraSerial.c_str());
m_pLeftCamera->CloseDevice();
delete m_pLeftCamera;
delete m_pRightCamera;
m_pLeftCamera = nullptr;
m_pRightCamera = nullptr;
return ERR_CODE(APP_ERR_EXEC);
}
LOG_INFO("Right camera opened successfully (SN: %s)\n", m_strRightCameraSerial.c_str());
}
else
{
LOG_INFO("Opening right camera by index: %u\n", m_nRightCameraIndex);
ret = m_pRightCamera->OpenDeviceByIndex(m_nRightCameraIndex);
if (ret != SUCCESS)
{
LOG_ERROR("Failed to open right camera (index: %u)\n", m_nRightCameraIndex);
m_pLeftCamera->CloseDevice();
delete m_pLeftCamera;
delete m_pRightCamera;
m_pLeftCamera = nullptr;
m_pRightCamera = nullptr;
return ERR_CODE(APP_ERR_EXEC);
}
LOG_INFO("Right camera opened successfully (index: %u)\n", m_nRightCameraIndex);
}
LOG_INFO("Right camera opened (index: %u)\n", m_nRightCameraIndex);
// 设置左相机参数
m_pLeftCamera->SetExposureTime(m_fExposureTime);
@ -239,35 +289,7 @@ int BinocularMarkPresenter::tryConnectCameras()
m_pRightCamera->SetGain(m_fGain);
m_pRightCamera->SetTriggerMode(false); // 连续采集模式
// 启动相机采集(使 CaptureImage 可用)
ret = m_pLeftCamera->StartAcquisition();
if (ret != SUCCESS)
{
LOG_ERROR("Failed to start left camera acquisition\n");
m_pLeftCamera->CloseDevice();
m_pRightCamera->CloseDevice();
delete m_pLeftCamera;
delete m_pRightCamera;
m_pLeftCamera = nullptr;
m_pRightCamera = nullptr;
return ERR_CODE(APP_ERR_EXEC);
}
ret = m_pRightCamera->StartAcquisition();
if (ret != SUCCESS)
{
LOG_ERROR("Failed to start right camera acquisition\n");
m_pLeftCamera->StopAcquisition();
m_pLeftCamera->CloseDevice();
m_pRightCamera->CloseDevice();
delete m_pLeftCamera;
delete m_pRightCamera;
m_pLeftCamera = nullptr;
m_pRightCamera = nullptr;
return ERR_CODE(APP_ERR_EXEC);
}
LOG_INFO("Cameras connected and acquisition started successfully\n");
LOG_INFO("Cameras connected successfully\n");
return SUCCESS;
}
@ -335,7 +357,7 @@ int BinocularMarkPresenter::startDetection()
return ERR_CODE(APP_ERR_EXEC);
}
// 注册左相机图像回调使用lambda捕获this指针
// 1. 注册左相机图像回调使用lambda捕获this指针
int ret = m_pLeftCamera->RegisterImageCallback(
[this](const GalaxyImageData& imageData) {
this->leftCameraCallback(imageData);
@ -347,7 +369,7 @@ int BinocularMarkPresenter::startDetection()
return ERR_CODE(APP_ERR_EXEC);
}
// 注册右相机图像回调
// 2. 注册右相机图像回调
ret = m_pRightCamera->RegisterImageCallback(
[this](const GalaxyImageData& imageData) {
this->rightCameraCallback(imageData);
@ -360,13 +382,34 @@ int BinocularMarkPresenter::startDetection()
return ERR_CODE(APP_ERR_EXEC);
}
// 3. 启动左相机采集
ret = m_pLeftCamera->StartAcquisition();
if (ret != SUCCESS)
{
LOG_ERROR("Failed to start left camera acquisition\n");
m_pLeftCamera->UnregisterImageCallback();
m_pRightCamera->UnregisterImageCallback();
return ERR_CODE(APP_ERR_EXEC);
}
// 4. 启动右相机采集
ret = m_pRightCamera->StartAcquisition();
if (ret != SUCCESS)
{
LOG_ERROR("Failed to start right camera acquisition\n");
m_pLeftCamera->StopAcquisition();
m_pLeftCamera->UnregisterImageCallback();
m_pRightCamera->UnregisterImageCallback();
return ERR_CODE(APP_ERR_EXEC);
}
m_bIsDetecting.store(true);
m_bAutoStartDetection.store(false); // 清除自动启动标志
// 启动采集线程
// 5. 启动采集线程
m_captureThread = std::thread(&BinocularMarkPresenter::captureThreadFunc, this);
LOG_INFO("Detection started\n");
LOG_INFO("Detection started (callbacks registered and cameras acquisition started)\n");
return SUCCESS;
}
@ -378,7 +421,23 @@ void BinocularMarkPresenter::stopDetection()
m_bIsDetecting.store(false);
m_bAutoStartDetection.store(false); // 清除自动启动标志(用户主动停止)
// 取消注册图像回调(保持相机采集状态,以便 CaptureImage 可用)
// 1. 等待线程结束
if (m_captureThread.joinable())
{
m_captureThread.join();
}
// 2. 停止相机采集
if (m_pLeftCamera != nullptr)
{
m_pLeftCamera->StopAcquisition();
}
if (m_pRightCamera != nullptr)
{
m_pRightCamera->StopAcquisition();
}
// 3. 取消注册图像回调
if (m_pLeftCamera != nullptr)
{
m_pLeftCamera->UnregisterImageCallback();
@ -388,13 +447,7 @@ void BinocularMarkPresenter::stopDetection()
m_pRightCamera->UnregisterImageCallback();
}
// 等待线程结束
if (m_captureThread.joinable())
{
m_captureThread.join();
}
LOG_INFO("Detection stopped\n");
LOG_INFO("Detection stopped (cameras acquisition stopped and callbacks unregistered)\n");
}
void BinocularMarkPresenter::leftCameraCallback(const GalaxyImageData& imageData)
@ -487,6 +540,23 @@ void BinocularMarkPresenter::captureThreadFunc()
if (leftReady && rightReady)
{
// 帧率限制限制为5fps200ms间隔
auto now = std::chrono::steady_clock::now();
auto elapsed = std::chrono::duration_cast<std::chrono::milliseconds>(now - m_lastImageSendTime).count();
if (elapsed < 200) {
LOG_DEBUG("Drop frame: frame rate limit (elapsed: %lld ms)\n", elapsed);
// 清除就绪标志
{
QMutexLocker locker(&m_imageMutex);
m_bLeftImageReady = false;
m_bRightImageReady = false;
}
continue;
}
// 更新发送时间
m_lastImageSendTime = now;
// 处理图像并进行检测
processImages();
@ -500,7 +570,7 @@ void BinocularMarkPresenter::captureThreadFunc()
else
{
// 等待图像
std::this_thread::sleep_for(std::chrono::milliseconds(10));
std::this_thread::sleep_for(std::chrono::milliseconds(2));
}
}
@ -509,19 +579,23 @@ void BinocularMarkPresenter::captureThreadFunc()
void BinocularMarkPresenter::processImages()
{
// 直接在互斥锁中转换为 cv::Mat深拷贝,避免多余的临时变量和二次拷贝
// 在互斥锁中转换为 cv::Mat深拷贝
cv::Mat leftImage, rightImage;
{
QMutexLocker locker(&m_imageMutex);
// 直接转换为 cv::MatimageDataToMat 会执行深拷贝)
leftImage = imageDataToMat(m_leftImageData);
rightImage = imageDataToMat(m_rightImageData);
}
// 如果启用持续图像流,直接发送图像,不做检测
if (m_bSendContinuousImage) {
emit detectionResult(std::vector<SWD_charuco3DMark>(), leftImage, rightImage, 0);
LOG_DEBUG("Sent continuous image stream\n");
return;
}
// 调用算法进行检测
std::vector<SWD_charuco3DMark> marks;
// 使用新的算法接口
wd_BQ_getCharuco3DMark(
leftImage,
@ -576,10 +650,17 @@ cv::Mat BinocularMarkPresenter::imageDataToMat(const GalaxyImageData& imageData)
bool BinocularMarkPresenter::loadConfiguration(const QString& configFilePath)
{
m_calibrationFilePath = PathManager::GetInstance().GetAppConfigDirectory() + "/StereoCamera.xml";
QFile file(configFilePath);
if (!file.open(QIODevice::ReadOnly | QIODevice::Text))
{
LOG_WARN("Failed to open config file: %s\n", configFilePath.toStdString().c_str());
// 即使配置文件不存在,也要加载标定文件
if (!loadCalibration(m_calibrationFilePath))
{
LOG_WARN("Failed to load calibration file: %s (will use default values)\n", m_calibrationFilePath.toStdString().c_str());
}
return false;
}
@ -588,8 +669,7 @@ bool BinocularMarkPresenter::loadConfiguration(const QString& configFilePath)
int errorLine, errorColumn;
if (!doc.setContent(&file, &errorMsg, &errorLine, &errorColumn))
{
LOG_ERROR("Failed to parse config XML: %s (Line: %d, Column: %d)\n",
errorMsg.toStdString().c_str(), errorLine, errorColumn);
LOG_ERROR("Failed to parse config XML: %s (Line: %d, Column: %d)\n", errorMsg.toStdString().c_str(), errorLine, errorColumn);
file.close();
return false;
}
@ -625,22 +705,36 @@ bool BinocularMarkPresenter::loadConfiguration(const QString& configFilePath)
while (!camera.isNull())
{
int index = camera.attribute("index", "0").toInt();
QString serialNumber = camera.attribute("serialNumber", "");
double exposureTime = camera.attribute("exposureTime", "10000.0").toDouble();
double gain = camera.attribute("gain", "1.0").toDouble();
LOG_DEBUG(" Camera[%d]: exposureTime=%.2f, gain=%.2f\n", index, exposureTime, gain);
LOG_DEBUG(" Camera[%d]: serialNumber=%s, exposureTime=%.2f, gain=%.2f\n",
index, serialNumber.toStdString().c_str(), exposureTime, gain);
if (index == 0)
{
m_nLeftCameraIndex = 0;
m_strLeftCameraSerial = serialNumber.toStdString();
m_fExposureTime = exposureTime;
m_fGain = gain;
LOG_DEBUG(" -> 设置为左相机\n");
LOG_DEBUG(" -> 设置为左相机");
if (!m_strLeftCameraSerial.empty())
{
LOG_DEBUG(" (序列号: %s)", m_strLeftCameraSerial.c_str());
}
LOG_DEBUG("\n");
}
else if (index == 1)
{
m_nRightCameraIndex = 1;
LOG_DEBUG(" -> 设置为右相机\n");
m_strRightCameraSerial = serialNumber.toStdString();
LOG_DEBUG(" -> 设置为右相机");
if (!m_strRightCameraSerial.empty())
{
LOG_DEBUG(" (序列号: %s)", m_strRightCameraSerial.c_str());
}
LOG_DEBUG("\n");
// 假设左右相机使用相同的曝光和增益
}
@ -704,27 +798,25 @@ bool BinocularMarkPresenter::loadConfiguration(const QString& configFilePath)
LOG_DEBUG(" [AlgorithmParams] 未找到,使用默认值\n");
}
// 读取标定文件路径(必须配置
// 读取标定文件路径(可选,如果没有则使用默认路径
QDomElement calibFileElem = root.firstChildElement("CalibrationFile");
if (calibFileElem.isNull())
QString calibFile;
if (!calibFileElem.isNull())
{
LOG_ERROR("Missing <CalibrationFile> element in config file!\n");
LOG_ERROR("Please add: <CalibrationFile path=\"path/to/StereoCamera.xml\" />\n");
return false;
calibFile = calibFileElem.attribute("path", "");
LOG_DEBUG(" [CalibrationFile] path (原始) = %s\n", calibFile.toStdString().c_str());
}
QString calibFile = calibFileElem.attribute("path", "");
LOG_DEBUG(" [CalibrationFile] path (原始) = %s\n", calibFile.toStdString().c_str());
// 如果没有配置或配置为空,使用默认路径
if (calibFile.isEmpty())
{
LOG_ERROR("CalibrationFile path is empty in config file!\n");
LOG_ERROR("Please set path attribute: <CalibrationFile path=\"../Calib/Mark_13度/StereoCamera.xml\" />\n");
return false;
QFileInfo configFileInfo(configFilePath);
calibFile = configFileInfo.dir().absoluteFilePath("StereoCamera.xml");
LOG_DEBUG(" [CalibrationFile] 使用默认路径: %s\n", calibFile.toStdString().c_str());
}
// 如果是相对路径,则相对于配置文件所在目录
if (QFileInfo(calibFile).isRelative())
else if (QFileInfo(calibFile).isRelative())
{
QFileInfo configFileInfo(configFilePath);
calibFile = configFileInfo.dir().absoluteFilePath(calibFile);
@ -733,11 +825,13 @@ bool BinocularMarkPresenter::loadConfiguration(const QString& configFilePath)
LOG_DEBUG("========== 配置文件读取完成 ==========\n\n");
// 加载标定文件
// 保存标定文件路径
m_calibrationFilePath = calibFile;
// 加载标定文件(如果文件不存在,只警告不返回失败)
if (!loadCalibration(calibFile))
{
LOG_ERROR("Failed to load calibration file: %s\n", calibFile.toStdString().c_str());
return false;
LOG_WARN("Failed to load calibration file: %s (will use default values)\n", calibFile.toStdString().c_str());
}
LOG_INFO("Configuration loaded from %s\n", configFilePath.toStdString().c_str());
@ -977,6 +1071,7 @@ void BinocularMarkPresenter::handleSingleDetection(const TCPClient* pClient)
memset(&leftImageData, 0, sizeof(GalaxyImageData));
memset(&rightImageData, 0, sizeof(GalaxyImageData));
// 顺序取图
int ret1 = m_pLeftCamera->CaptureImage(leftImageData, 5000);
int ret2 = m_pRightCamera->CaptureImage(rightImageData, 5000);
@ -988,10 +1083,17 @@ void BinocularMarkPresenter::handleSingleDetection(const TCPClient* pClient)
return;
}
cv::Mat leftImage = imageDataToMat(leftImageData);
cv::Mat rightImage = imageDataToMat(rightImageData);
LOG_DEBUG("Image captured: left=%dx%d, right=%dx%d\n",
leftImage.cols, leftImage.rows, rightImage.cols, rightImage.rows);
// 并行转换图像
auto leftFuture = std::async(std::launch::async, [&]() {
return imageDataToMat(leftImageData);
});
auto rightFuture = std::async(std::launch::async, [&]() {
return imageDataToMat(rightImageData);
});
cv::Mat leftImage = leftFuture.get();
cv::Mat rightImage = rightFuture.get();
LOG_DEBUG("Image captured: left=%dx%d, right=%dx%d\n", leftImage.cols, leftImage.rows, rightImage.cols, rightImage.rows);
delete[] leftImageData.pData;
delete[] rightImageData.pData;
@ -1001,7 +1103,7 @@ void BinocularMarkPresenter::handleSingleDetection(const TCPClient* pClient)
m_cameraMatrixR, m_distCoeffsR, m_R1, m_R2, m_P1, m_P2,
m_Q, m_markInfo, m_boardInfo, m_disparityOffset, marks);
int errCode = marks.empty() ? -1 : 0;
int errCode = marks.empty() ? ERR_CODE(DEV_RESULT_EMPTY) : 0;
emit singleDetectionResult(pClient, marks, leftImage, rightImage, errCode);
LOG_INFO("Single detection completed, detected %zu marks\n", marks.size());
}
@ -1016,13 +1118,14 @@ void BinocularMarkPresenter::handleSingleImage(const TCPClient* pClient)
return;
}
CVrTimeUtils oTimeUtils;
GalaxyImageData leftImageData, rightImageData;
memset(&leftImageData, 0, sizeof(GalaxyImageData));
memset(&rightImageData, 0, sizeof(GalaxyImageData));
// 顺序取图
int ret1 = m_pLeftCamera->CaptureImage(leftImageData, 5000);
int ret2 = m_pRightCamera->CaptureImage(rightImageData, 5000);
if (ret1 != 0 || ret2 != 0) {
LOG_WARN("Failed to capture images: left=%d, right=%d\n", ret1, ret2);
if (leftImageData.pData) delete[] leftImageData.pData;
@ -1031,17 +1134,24 @@ void BinocularMarkPresenter::handleSingleImage(const TCPClient* pClient)
return;
}
cv::Mat leftImage = imageDataToMat(leftImageData);
cv::Mat rightImage = imageDataToMat(rightImageData);
LOG_DEBUG("Image captured: left=%dx%d, right=%dx%d\n",
leftImage.cols, leftImage.rows, rightImage.cols, rightImage.rows);
// 并行转换图像
auto leftFuture = std::async(std::launch::async, [&]() {
return imageDataToMat(leftImageData);
});
auto rightFuture = std::async(std::launch::async, [&]() {
return imageDataToMat(rightImageData);
});
cv::Mat leftImage = leftFuture.get();
cv::Mat rightImage = rightFuture.get();
delete[] leftImageData.pData;
delete[] rightImageData.pData;
double timeCost = oTimeUtils.GetElapsedTimeInMilliSec();
emit singleImageResult(pClient, leftImage, rightImage);
LOG_INFO("Single image sent, left=%dx%d, right=%dx%d\n",
leftImage.cols, leftImage.rows, rightImage.cols, rightImage.rows);
LOG_DEBUG("Image captured: left=%dx%d, right=%dx%d, getimage:%.3f ms time:%.3f ms\n",
leftImage.cols, leftImage.rows, rightImage.cols, rightImage.rows, timeCost, oTimeUtils.GetElapsedTimeInMilliSec());
}
void BinocularMarkPresenter::handleStartWork()
@ -1074,6 +1184,37 @@ void BinocularMarkPresenter::handleStopWork()
}
}
void BinocularMarkPresenter::handleStartContinuousImage()
{
LOG_INFO("Handle start continuous image request\n");
if (!m_bIsDetecting) {
int ret = startDetection();
if (ret != 0) {
LOG_ERROR("Failed to start detection, error code: %d\n", ret);
return;
}
LOG_INFO("Detection thread started\n");
}
m_bSendContinuousImage = true;
LOG_INFO("Continuous image sending enabled\n");
}
void BinocularMarkPresenter::handleStopContinuousImage()
{
LOG_INFO("Handle stop continuous image request\n");
m_bSendContinuousImage = false;
LOG_INFO("Continuous image sending disabled\n");
// 停止持续图像流时,无条件停止检测线程
if (m_bIsDetecting) {
stopDetection();
LOG_INFO("Detection thread stopped\n");
}
}
void BinocularMarkPresenter::handleSetExposureTime(double exposureTime)
{
LOG_INFO("Handle set exposure time: %.2f\n", exposureTime);
@ -1105,3 +1246,154 @@ void BinocularMarkPresenter::handleSetGain(double gain)
LOG_INFO("Gain updated successfully\n");
}
void BinocularMarkPresenter::handleSetLeftExposureTime(double exposureTime)
{
LOG_INFO("Handle set left camera exposure time: %.2f\n", exposureTime);
if (m_pLeftCamera) {
m_pLeftCamera->SetExposureTime(exposureTime);
LOG_INFO("Left camera exposure time updated successfully\n");
} else {
LOG_WARN("Left camera not initialized\n");
}
}
void BinocularMarkPresenter::handleSetRightExposureTime(double exposureTime)
{
LOG_INFO("Handle set right camera exposure time: %.2f\n", exposureTime);
if (m_pRightCamera) {
m_pRightCamera->SetExposureTime(exposureTime);
LOG_INFO("Right camera exposure time updated successfully\n");
} else {
LOG_WARN("Right camera not initialized\n");
}
}
void BinocularMarkPresenter::handleSetLeftGain(double gain)
{
LOG_INFO("Handle set left camera gain: %.2f\n", gain);
if (m_pLeftCamera) {
m_pLeftCamera->SetGain(gain);
LOG_INFO("Left camera gain updated successfully\n");
} else {
LOG_WARN("Left camera not initialized\n");
}
}
void BinocularMarkPresenter::handleSetRightGain(double gain)
{
LOG_INFO("Handle set right camera gain: %.2f\n", gain);
if (m_pRightCamera) {
m_pRightCamera->SetGain(gain);
LOG_INFO("Right camera gain updated successfully\n");
} else {
LOG_WARN("Right camera not initialized\n");
}
}
void BinocularMarkPresenter::handleGetCameraInfo(const TCPClient* pClient, const QString& camera)
{
LOG_INFO("Handle get camera info: %s\n", camera.toStdString().c_str());
IGalaxyDevice* pCamera = nullptr;
if (camera == "left") {
pCamera = m_pLeftCamera;
} else if (camera == "right") {
pCamera = m_pRightCamera;
}
if (!pCamera) {
LOG_WARN("Camera not initialized: %s\n", camera.toStdString().c_str());
emit cameraInfoResult(pClient, camera, "", "", "", 0.0, 0.0);
return;
}
// 获取相机信息
GalaxyDeviceInfo deviceInfo;
int ret = pCamera->GetDeviceInfo(deviceInfo);
if (ret != 0) {
LOG_WARN("Failed to get camera info: %s\n", camera.toStdString().c_str());
emit cameraInfoResult(pClient, camera, "", "", "", 0.0, 0.0);
return;
}
// 获取当前曝光时间和增益
double exposureTime = 0.0;
double gain = 0.0;
pCamera->GetExposureTime(exposureTime);
pCamera->GetGain(gain);
// 发送相机信息
emit cameraInfoResult(pClient,
camera,
QString::fromStdString(deviceInfo.serialNumber),
QString::fromStdString(deviceInfo.modelName),
QString::fromStdString(deviceInfo.displayName),
exposureTime,
gain);
LOG_INFO("Camera info sent: %s, SN=%s, Model=%s, Exposure=%.2f, Gain=%.2f\n",
camera.toStdString().c_str(),
deviceInfo.serialNumber.c_str(),
deviceInfo.modelName.c_str(),
exposureTime,
gain);
}
void BinocularMarkPresenter::handleGetCalibration(const TCPClient* pClient)
{
LOG_INFO("Handle get calibration request\n");
if (m_calibrationFilePath.isEmpty()) {
LOG_WARN("Calibration file path not set\n");
emit calibrationMatrixResult(pClient, "");
return;
}
QFile file(m_calibrationFilePath);
if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
LOG_WARN("Failed to open calibration file: %s\n", m_calibrationFilePath.toStdString().c_str());
emit calibrationMatrixResult(pClient, "");
return;
}
QTextStream in(&file);
QString calibrationXml = in.readAll();
file.close();
emit calibrationMatrixResult(pClient, calibrationXml);
LOG_INFO("Calibration matrix sent from: %s\n", m_calibrationFilePath.toStdString().c_str());
}
void BinocularMarkPresenter::handleSetCalibration(const TCPClient* pClient, const QString& calibrationXml)
{
LOG_INFO("Handle set calibration request : %s\n", m_calibrationFilePath.toStdString().c_str());
if (m_calibrationFilePath.isEmpty()) {
LOG_ERROR("Calibration file path not set\n");
return;
}
QFile file(m_calibrationFilePath);
if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) {
LOG_ERROR("Failed to open calibration file for writing: %s\n", m_calibrationFilePath.toStdString().c_str());
return;
}
QTextStream out(&file);
out << calibrationXml;
file.close();
LOG_INFO("Calibration matrix saved to: %s\n", m_calibrationFilePath.toStdString().c_str());
// 重新加载标定文件
if (loadCalibration(m_calibrationFilePath)) {
LOG_INFO("Calibration reloaded successfully\n");
} else {
LOG_ERROR("Failed to reload calibration\n");
}
}

View File

@ -131,17 +131,71 @@ public slots:
void handleStopWork();
/**
* @brief
* @brief
*/
void handleStartContinuousImage();
/**
* @brief
*/
void handleStopContinuousImage();
/**
* @brief
* @param exposureTime
*/
void handleSetExposureTime(double exposureTime);
/**
* @brief
* @brief
* @param gain
*/
void handleSetGain(double gain);
/**
* @brief
* @param exposureTime
*/
void handleSetLeftExposureTime(double exposureTime);
/**
* @brief
* @param exposureTime
*/
void handleSetRightExposureTime(double exposureTime);
/**
* @brief
* @param gain
*/
void handleSetLeftGain(double gain);
/**
* @brief
* @param gain
*/
void handleSetRightGain(double gain);
/**
* @brief
* @param pClient
* @param camera ("left""right")
*/
void handleGetCameraInfo(const TCPClient* pClient, const QString& camera);
/**
* @brief
* @param pClient
*/
void handleGetCalibration(const TCPClient* pClient);
/**
* @brief
* @param pClient
* @param calibrationXml XML内容
*/
void handleSetCalibration(const TCPClient* pClient, const QString& calibrationXml);
signals:
/**
* @brief
@ -171,6 +225,31 @@ signals:
cv::Mat leftImage,
cv::Mat rightImage);
/**
* @brief
* @param pClient
* @param camera ("left""right")
* @param serialNumber
* @param modelName
* @param displayName
* @param exposureTime
* @param gain
*/
void cameraInfoResult(const TCPClient* pClient,
const QString& camera,
const QString& serialNumber,
const QString& modelName,
const QString& displayName,
double exposureTime,
double gain);
/**
* @brief
* @param pClient
* @param calibrationXml XML字符串
*/
void calibrationMatrixResult(const TCPClient* pClient, const QString& calibrationXml);
/**
* @brief
* @param connected true-false-
@ -228,6 +307,9 @@ private:
// 采集控制
std::atomic<bool> m_bIsDetecting; // 是否正在检测
std::atomic<bool> m_bSendContinuousResult; // 是否发送持续检测结果
std::atomic<bool> m_bSendContinuousImage; // 是否发送持续图像流
std::atomic<bool> m_bIsProcessingImage; // 是否正在处理图像(用于丢帧)
std::chrono::steady_clock::time_point m_lastImageSendTime; // 上次发送图像时间(用于限制帧率)
std::thread m_captureThread; // 采集线程
// 图像缓存
@ -241,6 +323,8 @@ private:
quint16 m_nServerPort; // TCP服务器端口
unsigned int m_nLeftCameraIndex; // 左相机索引
unsigned int m_nRightCameraIndex; // 右相机索引
std::string m_strLeftCameraSerial; // 左相机序列号(优先使用)
std::string m_strRightCameraSerial; // 右相机序列号(优先使用)
// 相机参数
double m_fExposureTime; // 曝光时间
@ -261,6 +345,9 @@ private:
SWD_BQ_CharucoMarkInfo m_markInfo; // Mark板配置
SWD_BQ_MarkBoardInfo m_boardInfo; // Board配置
double m_disparityOffset; // 视差偏移
// 标定文件路径
QString m_calibrationFilePath; // 标定文件完整路径
};
#endif // BINOCULARMARKPRESENTER_H

View File

@ -5,6 +5,7 @@
#include <QFile>
#include <QTextStream>
#include <cstring>
#include <future>
// 静态实例指针
BinocularMarkTcpProtocol* BinocularMarkTcpProtocol::s_pInstance = nullptr;
@ -15,6 +16,7 @@ BinocularMarkTcpProtocol::BinocularMarkTcpProtocol(QObject *parent)
, m_pHeartbeatTimer(nullptr)
, m_nHeartbeatInterval(30)
, m_nTcpPort(5901)
, m_bIsProcessingFrame(false)
{
s_pInstance = this;
@ -185,13 +187,13 @@ QByteArray BinocularMarkTcpProtocol::buildFrame(const QByteArray& jsonData)
// 帧头8字节
frame.append(FRAME_HEADER, FRAME_HEADER_SIZE);
// 写入数据长度8位字符串格式
// 写入数据长度8位字符串格式64位无符号整数
quint64 dataLength = jsonData.size();
char lengthStr[9]; // 8位数字 + '\0'
#ifdef _WIN32
sprintf_s(lengthStr, "%08u", dataLength);
sprintf_s(lengthStr, 9, "%08llu", dataLength);
#else
sprintf(lengthStr, "%08u", dataLength);
snprintf(lengthStr, 9, "%08llu", dataLength);
#endif
frame.append(lengthStr, FRAME_LENGTH_SIZE);
@ -293,12 +295,18 @@ MarkMessageType BinocularMarkTcpProtocol::parseMessageType(const QString& msgTyp
return MarkMessageType::CMD_START_WORK;
else if (msgTypeStr == "cmd_stop_work")
return MarkMessageType::CMD_STOP_WORK;
else if (msgTypeStr == "cmd_start_continuous_image")
return MarkMessageType::CMD_START_CONTINUOUS_IMAGE;
else if (msgTypeStr == "cmd_stop_continuous_image")
return MarkMessageType::CMD_STOP_CONTINUOUS_IMAGE;
else if (msgTypeStr == "cmd_set_calibration")
return MarkMessageType::CMD_SET_CALIBRATION;
else if (msgTypeStr == "cmd_set_exposure_time")
return MarkMessageType::CMD_SET_EXPOSURE_TIME;
else if (msgTypeStr == "cmd_set_gain")
return MarkMessageType::CMD_SET_GAIN;
else if (msgTypeStr == "cmd_get_camera_info")
return MarkMessageType::CMD_GET_CAMERA_INFO;
else if (msgTypeStr == "cmd_response")
return MarkMessageType::CMD_RESPONSE;
else
@ -356,10 +364,22 @@ void BinocularMarkTcpProtocol::handleJsonMessage(const TCPClient* pClient, const
handleStopWorkCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_START_CONTINUOUS_IMAGE:
handleStartContinuousImageCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_STOP_CONTINUOUS_IMAGE:
handleStopContinuousImageCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_SET_CALIBRATION:
handleSetCalibrationCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_GET_CALIBRATION:
handleGetCalibrationCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_SET_EXPOSURE_TIME:
handleSetExposureTimeCommand(pClient, jsonObj);
break;
@ -368,6 +388,10 @@ void BinocularMarkTcpProtocol::handleJsonMessage(const TCPClient* pClient, const
handleSetGainCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_GET_CAMERA_INFO:
handleGetCameraInfoCommand(pClient, jsonObj);
break;
case MarkMessageType::HEARTBEAT_ACK:
// 心跳应答,不做处理
break;
@ -429,6 +453,24 @@ void BinocularMarkTcpProtocol::handleStopWorkCommand(const TCPClient* pClient, c
sendCommandResponse(pClient, "stop_work", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleStartContinuousImageCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 开始持续图像流
emit startContinuousImageRequested();
// 发送命令应答
sendCommandResponse(pClient, "start_continuous_image", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleStopContinuousImageCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 停止持续图像流
emit stopContinuousImageRequested();
// 发送命令应答
sendCommandResponse(pClient, "stop_continuous_image", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleSetCalibrationCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
QString calibrationXml = jsonObj["calibration_xml"].toString();
@ -438,23 +480,34 @@ void BinocularMarkTcpProtocol::handleSetCalibrationCommand(const TCPClient* pCli
return;
}
// 保存到StereoCamera.xml文件
QFile file("StereoCamera.xml");
if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) {
QString errorMsg = QString("Failed to open file: %1").arg(file.errorString());
sendCommandResponse(pClient, "set_calibration", false, -2, errorMsg.toStdString().c_str());
LOG_ERROR("Failed to open StereoCamera.xml: %s\n", file.errorString().toStdString().c_str());
return;
}
// 发送信号给Presenter处理
emit setCalibrationRequested(pClient, calibrationXml);
QTextStream out(&file);
out << calibrationXml;
file.close();
LOG_INFO("Calibration matrix saved to StereoCamera.xml\n");
// 发送命令应答
sendCommandResponse(pClient, "set_calibration", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleGetCalibrationCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
emit getCalibrationRequested(pClient);
}
void BinocularMarkTcpProtocol::sendCalibrationMatrixResponse(const TCPClient* pClient, const QString& calibrationXml)
{
QJsonObject resultObj;
resultObj["msg_type"] = "calibration_matrix_response";
resultObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
resultObj["calibration_xml"] = calibrationXml;
QJsonDocument doc(resultObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
if (m_pTcpServer != nullptr) {
m_pTcpServer->SendData(pClient, frameData.data(), frameData.size());
}
}
void BinocularMarkTcpProtocol::handleSetExposureTimeCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
double exposureTime = jsonObj["exposure_time"].toDouble();
@ -464,9 +517,20 @@ void BinocularMarkTcpProtocol::handleSetExposureTimeCommand(const TCPClient* pCl
return;
}
emit setExposureTimeRequested(exposureTime);
// 检查是否指定了相机
QString camera = jsonObj["camera"].toString();
if (camera == "left") {
emit setLeftExposureTimeRequested(exposureTime);
LOG_INFO("Left camera exposure time set: %.2f\n", exposureTime);
} else if (camera == "right") {
emit setRightExposureTimeRequested(exposureTime);
LOG_INFO("Right camera exposure time set: %.2f\n", exposureTime);
} else {
// 未指定相机,同时设置左右相机
emit setExposureTimeRequested(exposureTime);
LOG_INFO("Both cameras exposure time set: %.2f\n", exposureTime);
}
LOG_INFO("Exposure time set: %.2f\n", exposureTime);
sendCommandResponse(pClient, "set_exposure_time", true, 0, "OK");
}
@ -479,9 +543,20 @@ void BinocularMarkTcpProtocol::handleSetGainCommand(const TCPClient* pClient, co
return;
}
emit setGainRequested(gain);
// 检查是否指定了相机
QString camera = jsonObj["camera"].toString();
if (camera == "left") {
emit setLeftGainRequested(gain);
LOG_INFO("Left camera gain set: %.2f\n", gain);
} else if (camera == "right") {
emit setRightGainRequested(gain);
LOG_INFO("Right camera gain set: %.2f\n", gain);
} else {
// 未指定相机,同时设置左右相机
emit setGainRequested(gain);
LOG_INFO("Both cameras gain set: %.2f\n", gain);
}
LOG_INFO("Gain set: %.2f\n", gain);
sendCommandResponse(pClient, "set_gain", true, 0, "OK");
}
@ -527,6 +602,15 @@ void BinocularMarkTcpProtocol::sendMarkResult(const std::vector<SWD_charuco3DMar
const cv::Mat& rightImage,
int errorCode)
{
// 检查是否有图像数据如果没有marks且没有图像说明是持续图像流模式
bool isContinuousImageMode = marks.empty() && (!leftImage.empty() || !rightImage.empty());
// 丢帧策略:如果上一帧还在处理,跳过本次发送
bool expected = false;
if (!m_bIsProcessingFrame.compare_exchange_strong(expected, true)) {
return; // 静默丢帧,不输出日志
}
QJsonObject resultObj;
resultObj["msg_type"] = "mark_result";
resultObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
@ -546,17 +630,33 @@ void BinocularMarkTcpProtocol::sendMarkResult(const std::vector<SWD_charuco3DMar
}
resultObj["marks"] = marksArray;
#if 0
// 添加图像可选Base64编码
// 并行编码图像
std::future<QString> leftFuture;
std::future<QString> rightFuture;
if (!leftImage.empty())
{
resultObj["left_image"] = imageToBase64(leftImage);
leftFuture = std::async(std::launch::async, [this, leftImage]() {
return imageToBase64(leftImage);
});
}
if (!rightImage.empty())
{
resultObj["right_image"] = imageToBase64(rightImage);
rightFuture = std::async(std::launch::async, [this, rightImage]() {
return imageToBase64(rightImage);
});
}
#endif
// 获取编码结果
if (!leftImage.empty())
{
resultObj["left_image"] = leftFuture.get();
}
if (!rightImage.empty())
{
resultObj["right_image"] = rightFuture.get();
}
QJsonDocument doc(resultObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
@ -567,6 +667,9 @@ void BinocularMarkTcpProtocol::sendMarkResult(const std::vector<SWD_charuco3DMar
m_pTcpServer->SendAllData(frameData.data(), frameData.size());
}
// 处理完成,清除标志位
m_bIsProcessingFrame = false;
LOG_INFO("Sent mark result, mark_count: %zu, error_code: %d\n", marks.size(), errorCode);
}
@ -624,14 +727,33 @@ void BinocularMarkTcpProtocol::sendImageData(const TCPClient* pClient, const cv:
resultObj["msg_type"] = "image_data";
resultObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
// 添加图像Base64编码
if (!leftImage.empty())
// 并行编码图像
std::future<QString> leftFuture;
std::future<QString> rightFuture;
bool hasLeft = !leftImage.empty();
bool hasRight = !rightImage.empty();
if (hasLeft)
{
resultObj["left_image"] = imageToBase64(leftImage);
leftFuture = std::async(std::launch::async, [this, &leftImage]() {
return imageToBase64(leftImage);
});
}
if (!rightImage.empty())
if (hasRight)
{
resultObj["right_image"] = imageToBase64(rightImage);
rightFuture = std::async(std::launch::async, [this, &rightImage]() {
return imageToBase64(rightImage);
});
}
// 获取编码结果
if (hasLeft)
{
resultObj["left_image"] = leftFuture.get();
}
if (hasRight)
{
resultObj["right_image"] = rightFuture.get();
}
QJsonDocument doc(resultObj);
@ -649,9 +771,10 @@ void BinocularMarkTcpProtocol::sendImageData(const TCPClient* pClient, const cv:
QString BinocularMarkTcpProtocol::imageToBase64(const cv::Mat& image)
{
// 将cv::Mat编码为JPEG格式
// 将cv::Mat编码为JPEG格式质量设置为70以减少数据量
std::vector<uchar> buf;
cv::imencode(".jpg", image, buf);
std::vector<int> params = {cv::IMWRITE_JPEG_QUALITY, 70};
cv::imencode(".jpg", image, buf, params);
// 转换为Base64
QByteArray ba(reinterpret_cast<const char*>(buf.data()), buf.size());
@ -662,3 +785,53 @@ QString BinocularMarkTcpProtocol::generateClientId(const TCPClient* pClient)
{
return QString::number(reinterpret_cast<qintptr>(pClient));
}
void BinocularMarkTcpProtocol::handleGetCameraInfoCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
QString camera = jsonObj["camera"].toString();
if (camera != "left" && camera != "right") {
sendCommandResponse(pClient, "get_camera_info", false, -1, "Invalid camera parameter (must be 'left' or 'right')");
return;
}
// 触发获取相机信息请求
emit getCameraInfoRequested(pClient, camera);
LOG_INFO("Camera info requested: %s\n", camera.toStdString().c_str());
}
void BinocularMarkTcpProtocol::sendCameraInfoResponse(const TCPClient* pClient,
const QString& camera,
const QString& serialNumber,
const QString& modelName,
const QString& displayName,
double exposureTime,
double gain)
{
QJsonObject responseObj;
responseObj["msg_type"] = "camera_info_response";
responseObj["camera"] = camera;
responseObj["serial_number"] = serialNumber;
responseObj["model_name"] = modelName;
responseObj["display_name"] = displayName;
responseObj["exposure_time"] = exposureTime;
responseObj["gain"] = gain;
responseObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
QJsonDocument doc(responseObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
if (m_pTcpServer != nullptr && pClient != nullptr)
{
m_pTcpServer->SendData(pClient, frameData.data(), frameData.size());
}
LOG_INFO("Sent camera info response: %s, SN=%s, Model=%s, Exposure=%.2f, Gain=%.2f\n",
camera.toStdString().c_str(),
serialNumber.toStdString().c_str(),
modelName.toStdString().c_str(),
exposureTime,
gain);
}

View File

@ -36,9 +36,13 @@ enum class MarkMessageType
IMAGE_DATA, // 图像数据
CMD_START_WORK, // 开始持续工作命令
CMD_STOP_WORK, // 停止持续工作命令
CMD_START_CONTINUOUS_IMAGE, // 开始持续图像流命令
CMD_STOP_CONTINUOUS_IMAGE, // 停止持续图像流命令
CMD_SET_CALIBRATION, // 设置标定矩阵命令
CMD_GET_CALIBRATION, // 获取标定矩阵命令
CMD_SET_EXPOSURE_TIME, // 设置曝光时间命令
CMD_SET_GAIN, // 设置增益命令
CMD_GET_CAMERA_INFO, // 获取相机信息命令
CMD_RESPONSE = 200, // 命令应答
};
@ -78,6 +82,101 @@ public:
*/
void stopHeartbeat();
signals:
/**
* @brief
*/
void triggerDetection();
/**
* @brief
* @param pClient
*/
void singleDetectionRequested(const TCPClient* pClient);
/**
* @brief
* @param pClient
*/
void singleImageRequested(const TCPClient* pClient);
/**
* @brief
*/
void startWorkRequested();
/**
* @brief
*/
void stopWorkRequested();
/**
* @brief
*/
void startContinuousImageRequested();
/**
* @brief
*/
void stopContinuousImageRequested();
/**
* @brief
* @param exposureTime
*/
void setExposureTimeRequested(double exposureTime);
/**
* @brief
* @param gain
*/
void setGainRequested(double gain);
/**
* @brief
* @param exposureTime
*/
void setLeftExposureTimeRequested(double exposureTime);
/**
* @brief
* @param exposureTime
*/
void setRightExposureTimeRequested(double exposureTime);
/**
* @brief
* @param gain
*/
void setLeftGainRequested(double gain);
/**
* @brief
* @param gain
*/
void setRightGainRequested(double gain);
/**
* @brief
* @param pClient
* @param camera ("left""right")
*/
void getCameraInfoRequested(const TCPClient* pClient, const QString& camera);
/**
* @brief
* @param pClient
*/
void getCalibrationRequested(const TCPClient* pClient);
/**
* @brief
* @param pClient
* @param calibrationXml XML内容
*/
void setCalibrationRequested(const TCPClient* pClient, const QString& calibrationXml);
public slots:
/**
* @brief
* @param marks 3D标记列表
@ -112,45 +211,30 @@ public:
*/
void sendImageData(const TCPClient* pClient, const cv::Mat& leftImage, const cv::Mat& rightImage);
signals:
/**
* @brief
*/
void triggerDetection();
/**
* @brief
* @param pClient
*/
void singleDetectionRequested(const TCPClient* pClient);
/**
* @brief
* @param pClient
*/
void singleImageRequested(const TCPClient* pClient);
/**
* @brief
*/
void startWorkRequested();
/**
* @brief
*/
void stopWorkRequested();
/**
* @brief
* @brief
* @param pClient
* @param camera ("left""right")
* @param serialNumber
* @param modelName
* @param displayName
* @param exposureTime
*/
void setExposureTimeRequested(double exposureTime);
/**
* @brief
* @param gain
*/
void setGainRequested(double gain);
void sendCameraInfoResponse(const TCPClient* pClient,
const QString& camera,
const QString& serialNumber,
const QString& modelName,
const QString& displayName,
double exposureTime,
double gain);
/**
* @brief
* @param pClient
* @param calibrationXml XML字符串
*/
void sendCalibrationMatrixResponse(const TCPClient* pClient, const QString& calibrationXml);
private slots:
/**
@ -249,6 +333,20 @@ private:
*/
void handleStopWorkCommand(const TCPClient* pClient, const QJsonObject& jsonObj);
/**
* @brief
* @param pClient
* @param jsonObj JSON对象
*/
void handleStartContinuousImageCommand(const TCPClient* pClient, const QJsonObject& jsonObj);
/**
* @brief
* @param pClient
* @param jsonObj JSON对象
*/
void handleStopContinuousImageCommand(const TCPClient* pClient, const QJsonObject& jsonObj);
/**
* @brief
* @param pClient
@ -256,6 +354,13 @@ private:
*/
void handleSetCalibrationCommand(const TCPClient* pClient, const QJsonObject& jsonObj);
/**
* @brief
* @param pClient
* @param jsonObj JSON对象
*/
void handleGetCalibrationCommand(const TCPClient* pClient, const QJsonObject& jsonObj);
/**
* @brief
* @param pClient
@ -270,6 +375,13 @@ private:
*/
void handleSetGainCommand(const TCPClient* pClient, const QJsonObject& jsonObj);
/**
* @brief
* @param pClient
* @param jsonObj JSON对象
*/
void handleGetCameraInfoCommand(const TCPClient* pClient, const QJsonObject& jsonObj);
/**
* @brief
* @param pClient
@ -306,6 +418,7 @@ private:
QTimer* m_pHeartbeatTimer; // 心跳定时器
int m_nHeartbeatInterval; // 心跳间隔(秒)
quint16 m_nTcpPort; // TCP服务器端口
std::atomic<bool> m_bIsProcessingFrame; // 是否正在处理帧(用于丢帧)
// 客户端数据缓冲区(用于处理粘包)
QMap<QString, QByteArray> m_clientBuffers; // 客户端ID -> 数据缓冲区

View File

@ -8,7 +8,7 @@
#define BINOCULAR_MARK_COMPANY_NAME "VisionTech"
#define BINOCULAR_MARK_COPYRIGHT "Copyright (C) 2025"
#define BINOCULAR_MARK_VERSION_STRING "1.0.0"
#define BINOCULAR_MARK_VERSION_BUILD "1"
#define BINOCULAR_MARK_VERSION_BUILD "3"
#endif // VERSION_H

View File

@ -3,10 +3,20 @@
<!-- TCP服务器配置 -->
<ServerConfig port="5901" />
<!-- 相机配置 -->
<!-- 相机配置
index: 相机索引(0=左相机, 1=右相机)
serialNumber: 相机序列号(可选,推荐使用以确保相机不会对调)
name: 相机名称(用于日志显示)
exposureTime: 曝光时间(微秒)
gain: 增益值
注意:
1. 如果配置了serialNumber,将优先使用序列号打开相机
2. 如果未配置serialNumber,将使用index索引打开相机
3. 可以通过枚举设备获取相机序列号 -->
<Cameras>
<Camera index="0" name="LeftCamera" exposureTime="10000.0" gain="1.0" />
<Camera index="1" name="RightCamera" exposureTime="10000.0" gain="1.0" />
<Camera index="0" serialNumber="" name="LeftCamera" exposureTime="10000.0" gain="1.0" />
<Camera index="1" serialNumber="" name="RightCamera" exposureTime="10000.0" gain="1.0" />
</Cameras>
<!-- 双目标定文件路径(必填项)

View File

@ -34,6 +34,10 @@ int main(int argc, char *argv[])
const char* algoVersion = wd_charuco3DMarkVersion();
LOG_INFO("Algorithm Version: %s\n", algoVersion);
// 注册自定义类型用于信号槽
qRegisterMetaType<std::vector<SWD_charuco3DMark>>("std::vector<SWD_charuco3DMark>");
qRegisterMetaType<cv::Mat>("cv::Mat");
// 创建Presenter和TCP协议处理对象
BinocularMarkPresenter presenter;
BinocularMarkTcpProtocol tcpProtocol;
@ -42,13 +46,24 @@ int main(int argc, char *argv[])
QObject::connect(&presenter, &BinocularMarkPresenter::detectionResult, &tcpProtocol, &BinocularMarkTcpProtocol::sendMarkResult);
QObject::connect(&presenter, &BinocularMarkPresenter::singleDetectionResult, &tcpProtocol, &BinocularMarkTcpProtocol::sendSingleDetectionResult);
QObject::connect(&presenter, &BinocularMarkPresenter::singleImageResult, &tcpProtocol, &BinocularMarkTcpProtocol::sendImageData);
QObject::connect(&presenter, &BinocularMarkPresenter::cameraInfoResult, &tcpProtocol, &BinocularMarkTcpProtocol::sendCameraInfoResponse);
QObject::connect(&presenter, &BinocularMarkPresenter::calibrationMatrixResult, &tcpProtocol, &BinocularMarkTcpProtocol::sendCalibrationMatrixResponse);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::singleDetectionRequested, &presenter, &BinocularMarkPresenter::handleSingleDetection);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::singleImageRequested, &presenter, &BinocularMarkPresenter::handleSingleImage);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::startWorkRequested, &presenter, &BinocularMarkPresenter::handleStartWork);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::stopWorkRequested, &presenter, &BinocularMarkPresenter::handleStopWork);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::startContinuousImageRequested, &presenter, &BinocularMarkPresenter::handleStartContinuousImage);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::stopContinuousImageRequested, &presenter, &BinocularMarkPresenter::handleStopContinuousImage);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::setExposureTimeRequested, &presenter, &BinocularMarkPresenter::handleSetExposureTime);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::setGainRequested, &presenter, &BinocularMarkPresenter::handleSetGain);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::setLeftExposureTimeRequested, &presenter, &BinocularMarkPresenter::handleSetLeftExposureTime);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::setRightExposureTimeRequested, &presenter, &BinocularMarkPresenter::handleSetRightExposureTime);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::setLeftGainRequested, &presenter, &BinocularMarkPresenter::handleSetLeftGain);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::setRightGainRequested, &presenter, &BinocularMarkPresenter::handleSetRightGain);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::getCameraInfoRequested, &presenter, &BinocularMarkPresenter::handleGetCameraInfo);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::getCalibrationRequested, &presenter, &BinocularMarkPresenter::handleGetCalibration);
QObject::connect(&tcpProtocol, &BinocularMarkTcpProtocol::setCalibrationRequested, &presenter, &BinocularMarkPresenter::handleSetCalibration);
// 获取配置文件路径(通过 PathManager 自动判断加密或明文)
QString configFilePath = PathManager::GetInstance().GetConfigFilePath();
@ -73,8 +88,8 @@ int main(int argc, char *argv[])
return -1;
}
// 启动心跳
tcpProtocol.startHeartbeat(30);
// 不需要向外的心跳,客户端会发送心跳
// tcpProtocol.startHeartbeat(30);
// 初始化双目相机不启动采集线程等待cmd_start_work命令
int cameraResult = presenter.initCameras();

View File

@ -3,10 +3,22 @@
#include <QMessageBox>
#include <QDateTime>
#include <QBuffer>
#include <QPainter>
#include <QPushButton>
#include <QTextEdit>
#include <QVBoxLayout>
#include <QGroupBox>
#include <QFileDialog>
#include <QFile>
#include <QTextStream>
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
, m_continuousImageTimer(new QTimer(this))
, m_isContinuousImageRunning(false)
, m_continuousDetectionTimer(new QTimer(this))
, m_isContinuousDetectionRunning(false)
{
ui->setupUi(this);
@ -19,11 +31,24 @@ MainWindow::MainWindow(QWidget *parent)
// 连接信号槽
connect(ui->btn_connect, &QPushButton::clicked, this, &MainWindow::onConnectClicked);
connect(ui->btn_startWork, &QPushButton::clicked, this, &MainWindow::onStartWorkClicked);
connect(ui->btn_stopWork, &QPushButton::clicked, this, &MainWindow::onStopWorkClicked);
connect(ui->btn_singleImage, &QPushButton::clicked, this, &MainWindow::onSingleImageClicked);
connect(ui->btn_singleDetection, &QPushButton::clicked, this, &MainWindow::onSingleDetectionClicked);
connect(ui->btn_setExposureTime, &QPushButton::clicked, this, &MainWindow::onSetExposureTimeClicked);
connect(ui->btn_setGain, &QPushButton::clicked, this, &MainWindow::onSetGainClicked);
connect(ui->btn_continuousImage, &QPushButton::clicked, this, &MainWindow::onContinuousImageClicked);
connect(ui->btn_startContinuousImageStream, &QPushButton::clicked, this, &MainWindow::onContinuousImageStreamClicked);
connect(ui->btn_loadCalibrationFile, &QPushButton::clicked, this, &MainWindow::onLoadCalibrationFileClicked);
connect(ui->btn_getCalibration, &QPushButton::clicked, this, &MainWindow::onGetCalibrationClicked);
connect(ui->btn_setCalibration, &QPushButton::clicked, this, &MainWindow::onSetCalibrationClicked);
// 连接定时器
connect(m_continuousImageTimer, &QTimer::timeout, this, &MainWindow::onContinuousImageTimeout);
connect(m_continuousDetectionTimer, &QTimer::timeout, this, &MainWindow::onContinuousDetectionTimeout);
// 设置三个分组框的拉伸因子
ui->horizontalLayout_bottom->setStretch(0, 2); // groupBox_result
ui->horizontalLayout_bottom->setStretch(1, 1); // groupBox_control
ui->horizontalLayout_bottom->setStretch(2, 1); // groupBox_params
// 设置事件回调
if (m_receiver) {
@ -65,7 +90,7 @@ MainWindow::MainWindow(QWidget *parent)
if (errorCode == 0 && !marks.empty()) {
for (const auto& mark : marks) {
result += QString(" Mark ID=%1: (%.2f, %.2f, %.2f)\n")
result += QString(" Mark ID=%1: (%2, %3, %4)\n")
.arg(mark.markID)
.arg(mark.x, 0, 'f', 2)
.arg(mark.y, 0, 'f', 2)
@ -77,6 +102,14 @@ MainWindow::MainWindow(QWidget *parent)
appendResult(result);
});
});
// 设置图像回调
m_receiver->SetImageCallback([this](const std::string& leftImageBase64, const std::string& rightImageBase64, int64_t timestamp) {
QMetaObject::invokeMethod(this, [this, leftImageBase64, rightImageBase64]() {
displayImage(ui->label_left, leftImageBase64);
displayImage(ui->label_right, rightImageBase64);
});
});
}
}
@ -109,6 +142,13 @@ void MainWindow::onConnectClicked()
ui->btn_connect->setText("断开");
updateConnectionState(true);
appendResult(QString("正在连接 %1:%2...").arg(ip).arg(port));
auto leftInfo = m_receiver->GetCameraInfo(SVrCameraEnum::LEFT);
auto rightInfo = m_receiver->GetCameraInfo(SVrCameraEnum::RIGHT);
m_leftCameraSN = QString::fromStdString(leftInfo.serialNumber);
m_rightCameraSN = QString::fromStdString(rightInfo.serialNumber);
if (m_leftCameraSN.isEmpty()) m_leftCameraSN = "0";
if (m_rightCameraSN.isEmpty()) m_rightCameraSN = "1";
} else {
QMessageBox::warning(this, "错误", QString("连接失败,错误码: %1").arg(ret));
}
@ -119,27 +159,22 @@ void MainWindow::onStartWorkClicked()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
int ret = m_receiver->StartWork();
if (ret == 0) {
appendResult("开始持续检测");
ui->btn_startWork->setEnabled(false);
ui->btn_stopWork->setEnabled(true);
if (ui->btn_startWork->text() == "开始持续检测") {
int ret = m_receiver->StartWork();
if (ret == 0) {
appendResult("开始持续检测");
ui->btn_startWork->setText("停止持续检测");
} else {
QMessageBox::warning(this, "错误", QString("启动失败,错误码: %1").arg(ret));
}
} else {
QMessageBox::warning(this, "错误", QString("启动失败,错误码: %1").arg(ret));
}
}
void MainWindow::onStopWorkClicked()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
int ret = m_receiver->StopWork();
if (ret == 0) {
appendResult("停止持续检测");
ui->btn_startWork->setEnabled(true);
ui->btn_stopWork->setEnabled(false);
} else {
QMessageBox::warning(this, "错误", QString("停止失败,错误码: %1").arg(ret));
int ret = m_receiver->StopWork();
if (ret == 0) {
appendResult("停止持续检测");
ui->btn_startWork->setText("开始持续检测");
} else {
QMessageBox::warning(this, "错误", QString("停止失败,错误码: %1").arg(ret));
}
}
}
@ -177,7 +212,7 @@ void MainWindow::onSingleDetectionClicked()
if (result.errorCode == 0 && !result.marks.empty()) {
for (const auto& mark : result.marks) {
msg += QString(" Mark ID=%1: (%.2f, %.2f, %.2f)\n")
msg += QString(" Mark ID=%1: (%2, %3, %4)\n")
.arg(mark.markID)
.arg(mark.x, 0, 'f', 2)
.arg(mark.y, 0, 'f', 2)
@ -203,7 +238,7 @@ void MainWindow::onSetExposureTimeClicked()
return;
}
int ret = m_receiver->SetExposureTime(exposureTime);
int ret = m_receiver->SetExposureTime(SVrCameraEnum::BOTH, exposureTime);
if (ret == 0) {
appendResult(QString("设置曝光时间成功:%.2f").arg(exposureTime));
} else {
@ -222,7 +257,7 @@ void MainWindow::onSetGainClicked()
return;
}
int ret = m_receiver->SetGain(gain);
int ret = m_receiver->SetGain(SVrCameraEnum::BOTH, gain);
if (ret == 0) {
appendResult(QString("设置增益成功:%.2f").arg(gain));
} else {
@ -237,11 +272,55 @@ void MainWindow::displayImage(QLabel* label, const std::string& base64Data)
QByteArray imageData = QByteArray::fromBase64(QByteArray::fromStdString(base64Data));
QImage image;
if (image.loadFromData(imageData)) {
label->setPixmap(QPixmap::fromImage(image).scaled(
label->size(), Qt::KeepAspectRatio, Qt::SmoothTransformation));
QPixmap pixmap = QPixmap::fromImage(image);
if (label == ui->label_left) {
m_leftPixmap = pixmap;
} else if (label == ui->label_right) {
m_rightPixmap = pixmap;
}
QPixmap scaledPixmap = pixmap.scaled(label->size(), Qt::KeepAspectRatio, Qt::SmoothTransformation);
QPainter painter(&scaledPixmap);
painter.setPen(Qt::yellow);
painter.setFont(QFont("Arial", 12, QFont::Bold));
QString sn = (label == ui->label_left) ? m_leftCameraSN : m_rightCameraSN;
if (!sn.isEmpty()) {
painter.drawText(10, 20, "SN: " + sn);
}
label->setPixmap(scaledPixmap);
}
}
void MainWindow::updateImageDisplay()
{
if (!m_leftPixmap.isNull()) {
QPixmap scaledPixmap = m_leftPixmap.scaled(ui->label_left->size(), Qt::KeepAspectRatio, Qt::SmoothTransformation);
QPainter painter(&scaledPixmap);
painter.setPen(Qt::yellow);
painter.setFont(QFont("Arial", 12, QFont::Bold));
if (!m_leftCameraSN.isEmpty()) {
painter.drawText(10, 20, "SN: " + m_leftCameraSN);
}
ui->label_left->setPixmap(scaledPixmap);
}
if (!m_rightPixmap.isNull()) {
QPixmap scaledPixmap = m_rightPixmap.scaled(ui->label_right->size(), Qt::KeepAspectRatio, Qt::SmoothTransformation);
QPainter painter(&scaledPixmap);
painter.setPen(Qt::yellow);
painter.setFont(QFont("Arial", 12, QFont::Bold));
if (!m_rightCameraSN.isEmpty()) {
painter.drawText(10, 20, "SN: " + m_rightCameraSN);
}
ui->label_right->setPixmap(scaledPixmap);
}
}
void MainWindow::resizeEvent(QResizeEvent* event)
{
QMainWindow::resizeEvent(event);
updateImageDisplay();
}
void MainWindow::appendResult(const QString& text)
{
ui->textEdit_result->append(text);
@ -250,11 +329,158 @@ void MainWindow::appendResult(const QString& text)
void MainWindow::updateConnectionState(bool connected)
{
ui->btn_startWork->setEnabled(connected);
ui->btn_stopWork->setEnabled(false);
ui->btn_singleImage->setEnabled(connected);
ui->btn_singleDetection->setEnabled(connected);
ui->btn_continuousImage->setEnabled(connected);
ui->btn_startContinuousImageStream->setEnabled(connected);
ui->btn_setExposureTime->setEnabled(connected);
ui->btn_setGain->setEnabled(connected);
ui->btn_getCalibration->setEnabled(connected);
ui->btn_setCalibration->setEnabled(connected);
ui->lineEdit_ip->setEnabled(!connected);
ui->lineEdit_port->setEnabled(!connected);
if (connected) {
ui->btn_startWork->setText("开始持续检测");
ui->btn_startContinuousImageStream->setText("开始持续图像流");
}
}
void MainWindow::onContinuousImageClicked()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
if (m_isContinuousImageRunning) {
m_continuousImageTimer->stop();
m_isContinuousImageRunning = false;
ui->btn_continuousImage->setText("连续取图");
appendResult("停止连续取图");
} else {
m_continuousImageTimer->start(100);
m_isContinuousImageRunning = true;
ui->btn_continuousImage->setText("停止连续取图");
appendResult("开始连续取图");
}
}
void MainWindow::onContinuousImageTimeout()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
auto imageData = m_receiver->RequestSingleImage(5000);
if (imageData.timestamp > 0) {
displayImage(ui->label_left, imageData.leftImageBase64);
displayImage(ui->label_right, imageData.rightImageBase64);
}
}
void MainWindow::onContinuousDetectionClicked()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
if (m_isContinuousDetectionRunning) {
m_continuousDetectionTimer->stop();
m_isContinuousDetectionRunning = false;
appendResult("停止连续单次检测");
} else {
m_continuousDetectionTimer->start(100);
m_isContinuousDetectionRunning = true;
appendResult("开始连续单次检测");
}
}
void MainWindow::onContinuousDetectionTimeout()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
auto result = m_receiver->RequestSingleDetection(5000);
if (result.timestamp > 0) {
displayImage(ui->label_left, result.leftImageBase64);
displayImage(ui->label_right, result.rightImageBase64);
}
}
void MainWindow::onContinuousImageStreamClicked()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
if (ui->btn_startContinuousImageStream->text() == "开始持续图像流") {
int ret = m_receiver->StartCapture();
if (ret == 0) {
appendResult("开始持续图像流");
ui->btn_startContinuousImageStream->setText("停止持续图像流");
} else {
QMessageBox::warning(this, "错误", QString("启动失败,错误码: %1").arg(ret));
}
} else {
int ret = m_receiver->StopCapture();
if (ret == 0) {
appendResult("停止持续图像流");
ui->btn_startContinuousImageStream->setText("开始持续图像流");
} else {
QMessageBox::warning(this, "错误", QString("停止失败,错误码: %1").arg(ret));
}
}
}
void MainWindow::onLoadCalibrationFileClicked()
{
QString fileName = QFileDialog::getOpenFileName(this, "选择标定文件", "", "XML文件 (*.xml);;所有文件 (*)");
if (fileName.isEmpty()) {
return;
}
QFile file(fileName);
if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
QMessageBox::warning(this, "错误", "无法打开文件");
return;
}
QTextStream in(&file);
QString content = in.readAll();
file.close();
ui->textEdit_calibration->setPlainText(content);
appendResult(QString("已加载标定文件: %1").arg(fileName));
}
void MainWindow::onGetCalibrationClicked()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
appendResult("请求获取标定矩阵...");
std::string calibXml = m_receiver->GetCalibrationMatrix(5000);
if (!calibXml.empty()) {
ui->textEdit_calibration->setPlainText(QString::fromStdString(calibXml));
appendResult("获取标定矩阵成功");
} else {
appendResult("获取标定矩阵失败或超时");
QMessageBox::warning(this, "错误", "获取标定矩阵失败");
}
}
void MainWindow::onSetCalibrationClicked()
{
if (!m_receiver || !m_receiver->IsConnected()) return;
QString calibXml = ui->textEdit_calibration->toPlainText();
if (calibXml.isEmpty()) {
QMessageBox::warning(this, "错误", "标定矩阵内容为空");
return;
}
appendResult("设置标定矩阵...");
int ret = m_receiver->SetCalibrationMatrix(calibXml.toStdString());
if (ret == 0) {
appendResult("设置标定矩阵成功");
QMessageBox::information(this, "成功", "标定矩阵已设置");
} else {
appendResult(QString("设置标定矩阵失败,错误码: %1").arg(ret));
QMessageBox::warning(this, "错误", QString("设置标定矩阵失败,错误码: %1").arg(ret));
}
}

View File

@ -3,6 +3,8 @@
#include <QMainWindow>
#include <QImage>
#include <QPixmap>
#include <QTimer>
#include <memory>
#include "IBinocularMarkReceiver.h"
@ -11,6 +13,8 @@ namespace Ui { class MainWindow; }
QT_END_NAMESPACE
class QLabel;
class QPushButton;
class QTextEdit;
class MainWindow : public QMainWindow
{
@ -20,22 +24,41 @@ public:
MainWindow(QWidget *parent = nullptr);
~MainWindow();
protected:
void resizeEvent(QResizeEvent* event) override;
private slots:
void onConnectClicked();
void onStartWorkClicked();
void onStopWorkClicked();
void onSingleImageClicked();
void onSingleDetectionClicked();
void onSetExposureTimeClicked();
void onSetGainClicked();
void onContinuousImageClicked();
void onContinuousImageTimeout();
void onContinuousDetectionClicked();
void onContinuousDetectionTimeout();
void onContinuousImageStreamClicked();
void onLoadCalibrationFileClicked();
void onGetCalibrationClicked();
void onSetCalibrationClicked();
private:
void displayImage(QLabel* label, const std::string& base64Data);
void updateImageDisplay();
void appendResult(const QString& text);
void updateConnectionState(bool connected);
Ui::MainWindow *ui;
std::unique_ptr<IBinocularMarkReceiver> m_receiver;
QPixmap m_leftPixmap;
QPixmap m_rightPixmap;
QString m_leftCameraSN;
QString m_rightCameraSN;
QTimer* m_continuousImageTimer;
bool m_isContinuousImageRunning;
QTimer* m_continuousDetectionTimer;
bool m_isContinuousDetectionRunning;
};
#endif // MAINWINDOW_H

View File

@ -6,7 +6,7 @@
<rect>
<x>0</x>
<y>0</y>
<width>1119</width>
<width>1200</width>
<height>800</height>
</rect>
</property>
@ -14,258 +14,307 @@
<string>BinocularMark 测试工具</string>
</property>
<widget class="QWidget" name="centralwidget">
<widget class="QGroupBox" name="groupBox_images">
<property name="geometry">
<rect>
<x>10</x>
<y>0</y>
<width>1091</width>
<height>371</height>
</rect>
</property>
<property name="title">
<string>图像显示</string>
</property>
<widget class="QWidget" name="layoutWidget">
<property name="geometry">
<rect>
<x>40</x>
<y>40</y>
<width>1021</width>
<height>302</height>
</rect>
</property>
<layout class="QHBoxLayout" name="horizontalLayout">
<item>
<widget class="QLabel" name="label_left">
<property name="minimumSize">
<size>
<width>400</width>
<height>300</height>
</size>
</property>
<property name="frameShape">
<enum>QFrame::Box</enum>
</property>
<property name="text">
<string>左图</string>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="label_right">
<property name="minimumSize">
<size>
<width>400</width>
<height>300</height>
</size>
</property>
<property name="frameShape">
<enum>QFrame::Box</enum>
</property>
<property name="text">
<string>右图</string>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
</layout>
</widget>
</widget>
<widget class="QGroupBox" name="groupBox_result">
<property name="geometry">
<rect>
<x>10</x>
<y>380</y>
<width>621</width>
<height>381</height>
</rect>
</property>
<property name="title">
<string>持续检测结果</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout_4">
<item>
<widget class="QTextEdit" name="textEdit_result">
<property name="readOnly">
<bool>true</bool>
</property>
</widget>
</item>
</layout>
</widget>
<widget class="QGroupBox" name="groupBox_control">
<property name="geometry">
<rect>
<x>680</x>
<y>400</y>
<width>411</width>
<height>294</height>
</rect>
</property>
<property name="title">
<string>控制面板</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout_3">
<item>
<layout class="QHBoxLayout" name="horizontalLayout_2">
<layout class="QVBoxLayout" name="verticalLayout_main">
<item>
<widget class="QGroupBox" name="groupBox_images">
<property name="title">
<string>图像显示</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout_images">
<item>
<widget class="QLabel" name="label_ip">
<property name="text">
<string>服务器IP:</string>
<layout class="QHBoxLayout" name="horizontalLayout_images">
<item>
<widget class="QLabel" name="label_left">
<property name="minimumSize">
<size>
<width>400</width>
<height>300</height>
</size>
</property>
<property name="frameShape">
<enum>QFrame::Box</enum>
</property>
<property name="text">
<string>左图</string>
</property>
<property name="scaledContents">
<bool>false</bool>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="label_right">
<property name="minimumSize">
<size>
<width>400</width>
<height>300</height>
</size>
</property>
<property name="frameShape">
<enum>QFrame::Box</enum>
</property>
<property name="text">
<string>右图</string>
</property>
<property name="scaledContents">
<bool>false</bool>
</property>
<property name="alignment">
<set>Qt::AlignCenter</set>
</property>
</widget>
</item>
</layout>
</item>
</layout>
</widget>
</item>
<item>
<widget class="QWidget" name="widget_bottom" native="true">
<property name="maximumSize">
<size>
<width>16777215</width>
<height>350</height>
</size>
</property>
<layout class="QHBoxLayout" name="horizontalLayout_bottom">
<item>
<widget class="QGroupBox" name="groupBox_result">
<property name="title">
<string>检测结果</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout_result">
<item>
<widget class="QTextEdit" name="textEdit_result">
<property name="readOnly">
<bool>true</bool>
</property>
</widget>
</item>
</layout>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit_ip">
<property name="text">
<string>192.168.10.20</string>
<widget class="QGroupBox" name="groupBox_control">
<property name="title">
<string>控制面板</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout_control">
<item>
<layout class="QHBoxLayout" name="horizontalLayout_ip">
<item>
<widget class="QLabel" name="label_ip">
<property name="text">
<string>服务器IP:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit_ip">
<property name="text">
<string>192.168.10.20</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_port">
<item>
<widget class="QLabel" name="label_port">
<property name="text">
<string>端口:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit_port">
<property name="text">
<string>5901</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<widget class="QPushButton" name="btn_connect">
<property name="text">
<string>连接</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_startWork">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>开始持续检测</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_singleImage">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>单次取图</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_singleDetection">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>单次检测</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_continuousImage">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>连续取图</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_startContinuousImageStream">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>开始持续图像流</string>
</property>
</widget>
</item>
<item>
<spacer name="verticalSpacer">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>20</width>
<height>40</height>
</size>
</property>
</spacer>
</item>
</layout>
</widget>
</item>
<item>
<widget class="QGroupBox" name="groupBox_params">
<property name="title">
<string>参数配置</string>
</property>
<layout class="QVBoxLayout" name="verticalLayout_params">
<item>
<layout class="QHBoxLayout" name="horizontalLayout_exposure">
<item>
<widget class="QLabel" name="label_exposure">
<property name="text">
<string>曝光时间:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit_exposure">
<property name="text">
<string>5000</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_setExposureTime">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>设置曝光</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_gain">
<item>
<widget class="QLabel" name="label_gain">
<property name="text">
<string>增益:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit_gain">
<property name="text">
<string>10</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_setGain">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>设置增益</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<widget class="QPushButton" name="btn_loadCalibrationFile">
<property name="text">
<string>选择标定文件</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_getCalibration">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>获取标定矩阵</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_setCalibration">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>设置标定矩阵</string>
</property>
</widget>
</item>
<item>
<widget class="QTextEdit" name="textEdit_calibration">
<property name="placeholderText">
<string>标定矩阵XML内容...</string>
</property>
</widget>
</item>
</layout>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_3">
<item>
<widget class="QLabel" name="label_port">
<property name="text">
<string>端口:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit_port">
<property name="text">
<string>5901</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<widget class="QPushButton" name="btn_connect">
<property name="text">
<string>连接</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_startWork">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>开始持续检测</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_stopWork">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>停止持续检测</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_singleImage">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>单次取图</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_singleDetection">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>单次检测</string>
</property>
</widget>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_4">
<item>
<widget class="QLabel" name="label_exposure">
<property name="text">
<string>曝光时间:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit_exposure">
<property name="text">
<string>5000</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_setExposureTime">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>设置曝光</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_5">
<item>
<widget class="QLabel" name="label_gain">
<property name="text">
<string>增益:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="lineEdit_gain">
<property name="text">
<string>10</string>
</property>
</widget>
</item>
<item>
<widget class="QPushButton" name="btn_setGain">
<property name="enabled">
<bool>false</bool>
</property>
<property name="text">
<string>设置增益</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<spacer name="verticalSpacer">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>20</width>
<height>40</height>
</size>
</property>
</spacer>
</item>
</layout>
</widget>
</widget>
</item>
</layout>
</widget>
<widget class="QStatusBar" name="statusbar"/>
</widget>

View File

@ -81,14 +81,14 @@ QString PathManager::GetConfigDirectory() const
{
QString baseDir;
#ifdef _WIN32
// Windows系统使用程序目录
// Windows系统使用程序目录下的config子目录
baseDir = GetProgramDirectory();
return baseDir + "/Config";
#else
// Linux系统使用用户配置目录
baseDir = GetUserConfigDirectory();
#endif
// 使用类成员变量中存储的应用名称,避免重复获取
return baseDir + "/../" + m_appName + "/Config";
#endif
}
bool PathManager::EnsureConfigDirectoryExists()

View File

@ -7,8 +7,9 @@ TEMPLATE = subdirs
SUBDIRS += \
VrEyeDevice \
EpicEyeDevice \
GalaxyDevice \
..\SDK\EpicEye\EpicEyeSDK.pro
GalaxyDevice
#..\SDK\EpicEye\EpicEyeSDK.pro
# 项目依赖关系(如果有)
# IKapDevice.depends = VrCommon

View File

@ -23,13 +23,15 @@ INCLUDEPATH += $$PWD/../../VrUtils/Inc
# 源文件
SOURCES += \
Src/GalaxyDevice.cpp
Src/GalaxyDevice.cpp \
Src/GalaxySDKManager.cpp
# 头文件
HEADERS += \
Inc/IGalaxyDevice.h \
Inc/GalaxyDevice_global.h \
_Inc/GalaxyDevice.h
_Inc/GalaxyDevice.h \
_Inc/GalaxySDKManager.h
# Windows平台库链接

View File

@ -1,4 +1,5 @@
#include "GalaxyDevice.h"
#include "GalaxySDKManager.h"
#include "VrError.h"
#include "VrLog.h"
#include <cstring>
@ -110,67 +111,28 @@ CGalaxyDevice::~CGalaxyDevice()
// 初始化SDK
int CGalaxyDevice::InitSDK()
{
#ifdef _WIN32
// Windows 平台:使用 C++ API
try {
if (m_bSDKInitialized) {
return SUCCESS;
}
IGXFactory::GetInstance().Init();
m_bSDKInitialized = true;
return SUCCESS;
}
catch (CGalaxyException& e) {
return static_cast<int>(e.GetErrorCode());
}
catch (...) {
return ERR_CODE(DEV_OPEN_ERR);
}
#else
// Linux/ARM 平台:使用 C API
if (m_bSDKInitialized) {
return SUCCESS;
}
GX_STATUS status = GXInitLib();
if (status == GX_STATUS_SUCCESS) {
int ret = GalaxySDKManager::GetInstance().InitSDK();
if (ret == SUCCESS) {
m_bSDKInitialized = true;
return SUCCESS;
}
return static_cast<int>(status);
#endif
return ret;
}
// 反初始化SDK
int CGalaxyDevice::UninitSDK()
{
#ifdef _WIN32
// Windows 平台:使用 C++ API
try {
if (m_bSDKInitialized) {
IGXFactory::GetInstance().Uninit();
if (m_bSDKInitialized) {
int ret = GalaxySDKManager::GetInstance().UninitSDK();
if (ret == SUCCESS) {
m_bSDKInitialized = false;
}
return SUCCESS;
}
catch (CGalaxyException& e) {
return static_cast<int>(e.GetErrorCode());
}
catch (...) {
return ERR_CODE(DEV_CLOSE_ERR);
}
#else
// Linux/ARM 平台:使用 C API
if (m_bSDKInitialized) {
GX_STATUS status = GXCloseLib();
m_bSDKInitialized = false;
if (status != GX_STATUS_SUCCESS) {
return static_cast<int>(status);
}
return ret;
}
return SUCCESS;
#endif
}
// 获取SDK版本
@ -326,6 +288,12 @@ int CGalaxyDevice::OpenDevice(const std::string& serialNumber)
return ERR_CODE(DEV_OPEN_ERR);
}
// 设置采集缓冲区数量为5支持多线程并行取图
CIntFeaturePointer ptrBufferNum = m_objFeatureControlPtr->GetIntFeature("StreamBufferHandlingMode");
if (!ptrBufferNum.IsNull()) {
ptrBufferNum->SetValue(5);
}
m_bDeviceOpen = true;
return SUCCESS;
}
@ -363,15 +331,34 @@ int CGalaxyDevice::OpenDevice(const std::string& serialNumber)
return static_cast<int>(status);
}
// 设置网络相机的流通道包长(提高采集性能)
bool bImplementPacketSize = false;
status = GXIsImplemented(m_hDevice, GX_INT_GEV_PACKETSIZE, &bImplementPacketSize);
if (status == GX_STATUS_SUCCESS && bImplementPacketSize) {
// int64_t nPacketSize = 0;
// status = GXGetOptimalPacketSize(m_hDevice, &nPacketSize);
// if (status == GX_STATUS_SUCCESS) {
// GXSetInt(m_hDevice, GX_INT_GEV_PACKETSIZE, nPacketSize);
// }
// 3. 设置触发模式为连续触发(无需外部触发)
status = GXSetEnum(m_hDevice, GX_ENUM_TRIGGER_MODE, GX_TRIGGER_MODE_OFF);
LOG_DEBUG("Set trigger mode to off %s\n", status == GX_STATUS_SUCCESS ? "success" : "failed");
//使能采集帧率调节模式
status = GXSetEnum(m_hDevice, GX_ENUM_ACQUISITION_FRAME_RATE_MODE, GX_ACQUISITION_FRAME_RATE_MODE_ON);
//设置采集帧率,假设设置为 10.0,用户按照实际需求设置此值
status = GXSetFloat(m_hDevice, GX_FLOAT_ACQUISITION_FRAME_RATE, 5);
LOG_DEBUG("Set acquisition frame rate to 5 %s\n", status == GX_STATUS_SUCCESS ? "success" : "failed");
// 设置数据传输块大小(提高传输性能)
bool bStreamTransferSize = false;
status = GXIsImplemented(m_hDevice, GX_DS_INT_STREAM_TRANSFER_SIZE, &bStreamTransferSize);
if (status == GX_STATUS_SUCCESS && bStreamTransferSize) {
status = GXSetInt(m_hDevice, GX_DS_INT_STREAM_TRANSFER_SIZE, 64 * 1024);
if (status == GX_STATUS_SUCCESS) {
LOG_DEBUG("Set stream transfer size to 64KB\n");
}
}
// 设置数据传输块数量(提高传输性能)
bool bStreamTransferNumberUrb = false;
status = GXIsImplemented(m_hDevice, GX_DS_INT_STREAM_TRANSFER_NUMBER_URB, &bStreamTransferNumberUrb);
if (status == GX_STATUS_SUCCESS && bStreamTransferNumberUrb) {
status = GXSetInt(m_hDevice, GX_DS_INT_STREAM_TRANSFER_NUMBER_URB, 64);
if (status == GX_STATUS_SUCCESS) {
LOG_DEBUG("Set stream transfer number URB to 64\n");
}
}
m_bDeviceOpen = true;
@ -613,13 +600,15 @@ int CGalaxyDevice::StartAcquisition()
return SUCCESS;
}
// 发送开始采集命令
// 1. 先调用 GXStreamOn 开始采集
GX_STATUS status = GXStreamOn(m_hDevice);
if (status == GX_STATUS_SUCCESS) {
m_bAcquisitioning = true;
return SUCCESS;
if (status != GX_STATUS_SUCCESS) {
LOG_DEBUG("GXStreamOn failed: %d\n", status);
return static_cast<int>(status);
}
return static_cast<int>(status);
LOG_DEBUG("GXStreamOn success\n");
m_bAcquisitioning = true;
return SUCCESS;
#endif
}
@ -668,15 +657,19 @@ int CGalaxyDevice::StopAcquisition()
return SUCCESS;
}
// 1. 发送停止采集命令
if (m_hDevice) {
// 2. 调用 GXStreamOff 停止采集
GX_STATUS status = GXStreamOff(m_hDevice);
m_bAcquisitioning = false;
if (status != GX_STATUS_SUCCESS) {
LOG_DEBUG("GXStreamOff failed: %d\n", status);
m_bAcquisitioning = false;
return static_cast<int>(status);
}
}
m_bAcquisitioning = false;
LOG_DEBUG("Acquisition stopped successfully\n");
return SUCCESS;
#endif
}
@ -819,8 +812,22 @@ int CGalaxyDevice::RegisterImageCallback(GalaxyImageCallback callback)
return ERR_CODE(DEV_CTRL_ERR);
}
#else
// Linux/ARM 平台:使用 C API - 简化实现,不支持回调
// Linux/ARM 平台:使用 C API注册回调
if (!m_hDevice) {
return ERR_CODE(DEV_CTRL_ERR);
}
m_imageCallback = callback;
// 注册采集回调函数
GX_STATUS status = GXRegisterCaptureCallback(m_hDevice, this, OnFrameCallbackFun);
if (status != GX_STATUS_SUCCESS) {
LOG_DEBUG("GXRegisterCaptureCallback failed: %d\n", status);
m_imageCallback = nullptr;
return static_cast<int>(status);
}
LOG_DEBUG("GXRegisterCaptureCallback success\n");
return SUCCESS;
#endif
}
@ -851,7 +858,14 @@ int CGalaxyDevice::UnregisterImageCallback()
return ERR_CODE(DEV_CTRL_ERR);
}
#else
// Linux/ARM 平台:使用 C API - 简化实现
// Linux/ARM 平台:使用 C API取消注册回调
if (m_hDevice) {
GX_STATUS status = GXUnregisterCaptureCallback(m_hDevice);
if (status != GX_STATUS_SUCCESS) {
LOG_DEBUG("GXUnregisterCaptureCallback failed: %d\n", status);
}
}
m_imageCallback = nullptr;
return SUCCESS;
#endif
@ -895,6 +909,39 @@ void CGalaxyDevice::ProcessCapturedImage(CImageDataPointer& objImageDataPointer)
// 忽略回调中的异常
}
}
#else
// Linux/ARM <20><>采图线程函数
void GX_STDC CGalaxyDevice::OnFrameCallbackFun(GX_FRAME_CALLBACK_PARAM* pFrame)
{
if (pFrame == nullptr || pFrame->pUserParam == nullptr) {
return;
}
// 从用户参数中获取设备对象指针
CGalaxyDevice* pDevice = static_cast<CGalaxyDevice*>(pFrame->pUserParam);
// 检查帧状态
if (pFrame->status == GX_FRAME_STATUS_SUCCESS) {
// 如果有回调函数,调用回调
if (pDevice->m_imageCallback) {
GalaxyImageData imageData;
imageData.width = pFrame->nWidth;
imageData.height = pFrame->nHeight;
imageData.pixelFormat = pFrame->nPixelFormat;
imageData.frameID = pFrame->nFrameID;
imageData.timestamp = pFrame->nTimestamp;
imageData.dataSize = pFrame->nImgSize;
imageData.pData = reinterpret_cast<unsigned char*>(const_cast<void*>(pFrame->pImgBuf));
try {
pDevice->m_imageCallback(imageData);
}
catch (...) {
// 忽略回调中的异常
}
}
}
}
#endif
// 获取图像宽度

View File

@ -0,0 +1,120 @@
#include "GalaxySDKManager.h"
#include "VrError.h"
GalaxySDKManager::GalaxySDKManager()
: m_refCount(0)
, m_bSDKInitialized(false)
{
}
GalaxySDKManager::~GalaxySDKManager()
{
// 确保SDK被反初始化
if (m_bSDKInitialized) {
#ifdef _WIN32
try {
IGXFactory::GetInstance().Uninit();
}
catch (...) {
// 忽略析构函数中的异常
}
#else
GXCloseLib();
#endif
m_bSDKInitialized = false;
}
}
GalaxySDKManager& GalaxySDKManager::GetInstance()
{
static GalaxySDKManager instance;
return instance;
}
int GalaxySDKManager::InitSDK()
{
std::lock_guard<std::mutex> lock(m_mutex);
// 增加引用计数
m_refCount++;
// 如果已经初始化,直接返回成功
if (m_bSDKInitialized) {
return SUCCESS;
}
// 执行SDK初始化
#ifdef _WIN32
// Windows 平台:使用 C++ API
try {
IGXFactory::GetInstance().Init();
m_bSDKInitialized = true;
return SUCCESS;
}
catch (CGalaxyException& e) {
m_refCount--; // 初始化失败,减少引用计数
return static_cast<int>(e.GetErrorCode());
}
catch (...) {
m_refCount--; // 初始化失败,减少引用计数
return ERR_CODE(DEV_OPEN_ERR);
}
#else
// Linux/ARM 平台:使用 C API
GX_STATUS status = GXInitLib();
if (status == GX_STATUS_SUCCESS) {
m_bSDKInitialized = true;
return SUCCESS;
}
m_refCount--; // 初始化失败,减少引用计数
return static_cast<int>(status);
#endif
}
int GalaxySDKManager::UninitSDK()
{
std::lock_guard<std::mutex> lock(m_mutex);
// 减少引用计数
if (m_refCount > 0) {
m_refCount--;
}
// 只有当引用计数为0时才真正反初始化
if (m_refCount == 0 && m_bSDKInitialized) {
#ifdef _WIN32
// Windows 平台:使用 C++ API
try {
IGXFactory::GetInstance().Uninit();
m_bSDKInitialized = false;
return SUCCESS;
}
catch (CGalaxyException& e) {
return static_cast<int>(e.GetErrorCode());
}
catch (...) {
return ERR_CODE(DEV_CLOSE_ERR);
}
#else
// Linux/ARM 平台:使用 C API
GX_STATUS status = GXCloseLib();
if (status == GX_STATUS_SUCCESS) {
m_bSDKInitialized = false;
return SUCCESS;
}
return static_cast<int>(status);
#endif
}
return SUCCESS;
}
bool GalaxySDKManager::IsSDKInitialized() const
{
return m_bSDKInitialized;
}
int GalaxySDKManager::GetRefCount() const
{
return m_refCount.load();
}

View File

@ -112,6 +112,9 @@ private:
void* m_pCaptureEventHandler; // 采集事件处理器(未使用)
GX_DEVICE_BASE_INFO* m_pDeviceInfo; // 设备信息缓存
uint32_t m_nDeviceNum; // 设备数量
// 静态回调函数SDK 回调)
static void GX_STDC OnFrameCallbackFun(GX_FRAME_CALLBACK_PARAM* pFrame);
#endif
GalaxyDeviceInfo m_deviceInfo; // 当前设备信息

View File

@ -0,0 +1,70 @@
#ifndef GALAXYSDKMANAGER_H
#define GALAXYSDKMANAGER_H
#include <mutex>
#include <atomic>
// Galaxy SDK 头文件
#ifdef _WIN32
// Windows 平台使用 C++ API
#define _WINSOCKAPI_ // 防止winsock.h被包含避免与winsock2.h冲突
#include "GXIAPIBase.h"
#include "IGXFactory.h"
using namespace GxIAPICPP;
#else
// Linux/ARM 平台使用 C API
#include "GxIAPI.h"
#endif
/**
* @brief GalaxySDKManager
* Galaxy SDK的全局初始化和反初始化
* 使SDK在所有设备关闭后才反初始化
*/
class GalaxySDKManager
{
public:
/**
* @brief
* @return
*/
static GalaxySDKManager& GetInstance();
/**
* @brief SDK+1
* @return 0--
*/
int InitSDK();
/**
* @brief SDK-10
* @return 0--
*/
int UninitSDK();
/**
* @brief SDK是否已初始化
* @return true-false-
*/
bool IsSDKInitialized() const;
/**
* @brief
* @return
*/
int GetRefCount() const;
private:
GalaxySDKManager();
~GalaxySDKManager();
// 禁止拷贝和赋值
GalaxySDKManager(const GalaxySDKManager&) = delete;
GalaxySDKManager& operator=(const GalaxySDKManager&) = delete;
std::mutex m_mutex; // 互斥锁
std::atomic<int> m_refCount; // 引用计数
bool m_bSDKInitialized; // SDK是否已初始化
};
#endif // GALAXYSDKMANAGER_H

View File

@ -19,6 +19,8 @@ SUBDIRS += ../AppUtils/AppUtils.pro
# 项目(依赖 AppUtils
SUBDIRS += ../App/App.pro
# SUBDIRS += ../SDK/Galaxy/aarch64/sample/GxViewer/GxViewer.pro
# Test 测试
# SUBDIRS += ../Test/Test.pro

View File

@ -23,14 +23,18 @@ make -j6
if [ $? -eq 0 ]; then
END_TIME=$(date +%s)
ELAPSED=$((END_TIME - START_TIME))
MINUTES=$((ELAPSED / 60))
SECONDS=$((ELAPSED % 60))
echo "=========================================="
echo "编译成功!耗时: ${ELAPSED}"
echo "编译成功!耗时: ${MINUTES}${SECONDS}"
echo "=========================================="
else
END_TIME=$(date +%s)
ELAPSED=$((END_TIME - START_TIME))
MINUTES=$((ELAPSED / 60))
SECONDS=$((ELAPSED % 60))
echo "=========================================="
echo "编译失败!耗时: ${ELAPSED}"
echo "编译失败!耗时: ${MINUTES}${SECONDS}"
echo "=========================================="
exit 1
fi

View File

@ -45,6 +45,16 @@ enum class ReceiverEventType
HEARTBEAT_TIMEOUT // 心跳超时
};
/**
* @brief
*/
enum class SVrCameraEnum
{
LEFT, // 左相机
RIGHT, // 右相机
BOTH // 双相机
};
/**
* @brief Mark数据接收器接口
* BinocularMarkApp接收3D Mark检测结果
@ -74,15 +84,33 @@ public:
int64_t timestamp; // 时间戳
};
/**
* @brief
*/
struct CameraInfo {
std::string serialNumber; // 相机序列号
std::string modelName; // 相机型号
std::string displayName; // 显示名称
double exposureTime; // 当前曝光时间
double gain; // 当前增益
};
/**
* @brief Mark结果回调函数类型
* @param marks 3D标记列表
* @param timestamp
* @param errorCode 0
*/
using MarkResultCallback = std::function<void(const std::vector<VrMark3D>& marks,
int64_t timestamp,
int errorCode)>;
using MarkResultCallback = std::function<void(const std::vector<VrMark3D>& marks, int64_t timestamp, int errorCode)>;
/**
* @brief
* @param leftImageBase64 Base64编码
* @param rightImageBase64 Base64编码
* @param timestamp
*/
using ImageCallback = std::function<void(const std::string& leftImageBase64, const std::string& rightImageBase64, int64_t timestamp)>;
/**
* @brief
@ -112,10 +140,42 @@ public:
virtual bool IsConnected() const = 0;
/**
* @brief 使RequestSingleDetection代替
* @brief
* @param target /
* @param timeoutMs 3000ms
* @return
*/
virtual CameraInfo GetCameraInfo(SVrCameraEnum target, int timeoutMs = 3000) = 0;
/**
* @brief
* @param timeoutMs 3000ms
* @return XML字符串
*/
virtual std::string GetCalibrationMatrix(int timeoutMs = 3000) = 0;
/**
* @brief
* @param calibrationXml XML内容
* @return 0--
*/
virtual int TriggerDetection() = 0;
virtual int SetCalibrationMatrix(const std::string& calibrationXml) = 0;
/**
* @brief
* @param target //
* @param exposureTime
* @return 0--
*/
virtual int SetExposureTime(SVrCameraEnum target, double exposureTime) = 0;
/**
* @brief
* @param target //
* @param gain
* @return 0--
*/
virtual int SetGain(SVrCameraEnum target, double gain) = 0;
/**
* @brief Mark结果
@ -144,25 +204,16 @@ public:
virtual int StopWork() = 0;
/**
* @brief
* @param calibrationXml XML内容
* @brief
* @return 0--
*/
virtual int SetCalibrationMatrix(const std::string& calibrationXml) = 0;
virtual int StartCapture() = 0;
/**
* @brief
* @param exposureTime
* @brief
* @return 0--
*/
virtual int SetExposureTime(double exposureTime) = 0;
/**
* @brief
* @param gain
* @return 0--
*/
virtual int SetGain(double gain) = 0;
virtual int StopCapture() = 0;
/**
* @brief Mark结果回调使
@ -170,6 +221,12 @@ public:
*/
virtual void SetMarkResultCallback(MarkResultCallback callback) = 0;
/**
* @brief 使
* @param callback
*/
virtual void SetImageCallback(ImageCallback callback) = 0;
/**
* @brief
* @param callback

View File

@ -25,7 +25,11 @@ BinocularMarkReceiver::BinocularMarkReceiver()
, m_bConnected(false)
, m_bSingleDetectionReady(false)
, m_bImageDataReady(false)
, m_bLeftCameraInfoReady(false)
, m_bRightCameraInfoReady(false)
, m_bCalibrationMatrixReady(false)
, m_bHeartbeatRunning(false)
, m_bHeartbeatEnabled(false)
, m_nHeartbeatInterval(30)
{
m_pTcpClient = IVrTCPClient::CreateInstance();
@ -80,6 +84,7 @@ int BinocularMarkReceiver::Disconnect()
// 停止心跳线程
m_bHeartbeatRunning = false;
m_cvHeartbeat.notify_one(); // 唤醒心跳线程
if (m_heartbeatThread.joinable()) {
m_heartbeatThread.join();
}
@ -92,6 +97,13 @@ int BinocularMarkReceiver::Disconnect()
m_bConnected = false;
m_dataBuffer.clear();
// 通知所有等待的条件变量,避免其他线程卡住
m_cvSingleDetection.notify_all();
m_cvImageData.notify_all();
m_cvLeftCameraInfo.notify_all();
m_cvRightCameraInfo.notify_all();
m_cvCalibrationMatrix.notify_all();
return 0;
}
@ -100,24 +112,18 @@ bool BinocularMarkReceiver::IsConnected() const
return m_bConnected;
}
int BinocularMarkReceiver::TriggerDetection()
{
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count());
Json::FastWriter writer;
std::string jsonData = writer.write(root);
return sendJsonMessage("cmd_trigger", jsonData);
}
void BinocularMarkReceiver::SetMarkResultCallback(MarkResultCallback callback)
{
std::lock_guard<std::mutex> lock(m_mutex);
m_markResultCallback = callback;
}
void BinocularMarkReceiver::SetImageCallback(ImageCallback callback)
{
std::lock_guard<std::mutex> lock(m_mutex);
m_imageCallback = callback;
}
void BinocularMarkReceiver::SetEventCallback(EventCallback callback)
{
std::lock_guard<std::mutex> lock(m_mutex);
@ -132,11 +138,18 @@ void BinocularMarkReceiver::linkEventCallback(IVrTCPClient* pClient, bool connec
pThis->m_bConnected = connected;
// 获取回调函数的拷贝,在锁外调用
EventCallback callback;
{
std::lock_guard<std::mutex> lock(pThis->m_mutex);
callback = pThis->m_eventCallback;
}
if (connected) {
LOG_DEBUG("Connected to server\n");
// 触发连接事件
if (pThis->m_eventCallback) {
pThis->m_eventCallback(ReceiverEventType::CONNECTED, "");
// 在锁外触发连接事件
if (callback) {
callback(ReceiverEventType::CONNECTED, "");
}
// 启动心跳线程
@ -144,9 +157,9 @@ void BinocularMarkReceiver::linkEventCallback(IVrTCPClient* pClient, bool connec
pThis->m_heartbeatThread = std::thread(&BinocularMarkReceiver::heartbeatThreadFunc, pThis);
} else {
LOG_DEBUG("Disconnected from server\n");
// 触发断开连接事件
if (pThis->m_eventCallback) {
pThis->m_eventCallback(ReceiverEventType::DISCONNECTED, "");
// 在锁外触发断开连接事件
if (callback) {
callback(ReceiverEventType::DISCONNECTED, "");
}
}
}
@ -171,10 +184,16 @@ void BinocularMarkReceiver::tcpRecvCallback(IVrTCPClient* pClient, const char* p
void BinocularMarkReceiver::heartbeatThreadFunc()
{
while (m_bHeartbeatRunning) {
std::this_thread::sleep_for(std::chrono::seconds(m_nHeartbeatInterval));
// 使用 condition_variable 实现可中断的等待
std::unique_lock<std::mutex> lock(m_mutexHeartbeat);
m_cvHeartbeat.wait_for(lock, std::chrono::seconds(m_nHeartbeatInterval),
[this] { return !m_bHeartbeatRunning; });
if (!m_bHeartbeatRunning) break;
// 检查是否启用心跳
if (!m_bHeartbeatEnabled) continue;
// 发送心跳消息
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
@ -189,61 +208,71 @@ void BinocularMarkReceiver::heartbeatThreadFunc()
void BinocularMarkReceiver::parseFrames()
{
std::lock_guard<std::mutex> lock(m_mutex);
std::vector<std::string> messagesToProcess;
while (true) {
// 查找帧头
auto it = std::search(m_dataBuffer.begin(), m_dataBuffer.end(), FRAME_HEADER, FRAME_HEADER + FRAME_HEADER_SIZE);
if (it == m_dataBuffer.end()) {
m_dataBuffer.clear();
break;
{
std::lock_guard<std::mutex> lock(m_mutex);
while (true) {
// 查找帧头
auto it = std::search(m_dataBuffer.begin(), m_dataBuffer.end(), FRAME_HEADER, FRAME_HEADER + FRAME_HEADER_SIZE);
if (it == m_dataBuffer.end()) {
m_dataBuffer.clear();
break;
}
// 丢弃帧头之前的数据
if (it != m_dataBuffer.begin()) {
m_dataBuffer.erase(m_dataBuffer.begin(), it);
}
// 检查是否有足够的数据来读取长度字段
if (m_dataBuffer.size() < FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE) {
break;
}
// 读取数据长度8字节ASCII字符串格式
std::string lengthStr(m_dataBuffer.begin() + FRAME_HEADER_SIZE, m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE);
int64_t dataLength = std::stoll(lengthStr);
// 检查数据长度是否合理最大50MB支持双目高分辨率图像
if (dataLength < 0 || dataLength > 50 * 1024 * 1024) {
LOG_WARN("Invalid data length: %lld, discarding frame header\n", dataLength);
m_dataBuffer.erase(m_dataBuffer.begin(), m_dataBuffer.begin() + FRAME_HEADER_SIZE);
continue;
}
// 计算完整帧的总长度
size_t totalFrameLength = FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength + FRAME_TAIL_SIZE;
// 检查是否有完整的帧
if (m_dataBuffer.size() < totalFrameLength) {
break;
}
// 提取JSON数据
std::string jsonData(m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE,
m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength);
// 验证帧尾
std::string tail(m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength,
m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength + FRAME_TAIL_SIZE);
if (tail != FRAME_TAIL) {
m_dataBuffer.erase(m_dataBuffer.begin(), m_dataBuffer.begin() + FRAME_HEADER_SIZE);
continue;
}
// 收集消息,稍后在锁外处理
messagesToProcess.push_back(jsonData);
// 移除已处理的帧
m_dataBuffer.erase(m_dataBuffer.begin(), m_dataBuffer.begin() + totalFrameLength);
}
} // 释放锁
// 丢弃帧头之前的数据
if (it != m_dataBuffer.begin()) {
m_dataBuffer.erase(m_dataBuffer.begin(), it);
}
// 检查是否有足够的数据来读取长度字段
if (m_dataBuffer.size() < FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE) {
break;
}
// 读取数据长度8字节ASCII字符串格式
std::string lengthStr(m_dataBuffer.begin() + FRAME_HEADER_SIZE, m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE);
int64_t dataLength = std::stoll(lengthStr);
// 检查数据长度是否合理最大10MB
if (dataLength < 0 || dataLength > 10 * 1024 * 1024) {
m_dataBuffer.erase(m_dataBuffer.begin(), m_dataBuffer.begin() + FRAME_HEADER_SIZE);
continue;
}
// 计算完整帧的总长度
size_t totalFrameLength = FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength + FRAME_TAIL_SIZE;
// 检查是否有完整的帧
if (m_dataBuffer.size() < totalFrameLength) {
break;
}
// 提取JSON数据
std::string jsonData(m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE,
m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength);
// 验证帧尾
std::string tail(m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength,
m_dataBuffer.begin() + FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength + FRAME_TAIL_SIZE);
if (tail != FRAME_TAIL) {
m_dataBuffer.erase(m_dataBuffer.begin(), m_dataBuffer.begin() + FRAME_HEADER_SIZE);
continue;
}
// 处理JSON消息
// 在锁外处理所有消息
for (const auto& jsonData : messagesToProcess) {
handleJsonMessage(jsonData);
// 移除已处理的帧
m_dataBuffer.erase(m_dataBuffer.begin(), m_dataBuffer.begin() + totalFrameLength);
}
}
@ -272,10 +301,16 @@ void BinocularMarkReceiver::handleJsonMessage(const std::string& jsonData)
handleSingleDetectionResult(jsonData);
} else if (messageType == "image_data") {
handleImageData(jsonData);
} else if (messageType == "heartbeat") {
handleHeartbeat(jsonData);
} else if (messageType == "heartbeat_ack") {
handleHeartbeatAck(jsonData);
} else if (messageType == "cmd_response") {
handleCommandResponse(jsonData);
} else if (messageType == "camera_info_response") {
handleCameraInfoResponse(jsonData);
} else if (messageType == "calibration_matrix_response") {
handleCalibrationMatrixResponse(jsonData);
} else {
LOG_DEBUG("Unknown message type: %s\n", messageType.c_str());
}
@ -304,9 +339,46 @@ void BinocularMarkReceiver::handleMarkResult(const std::string& jsonStr)
}
}
if (m_markResultCallback) {
m_markResultCallback(marks, timestamp, errorCode);
// 提取图像数据
std::string leftImageBase64 = root.get("left_image", "").asString();
std::string rightImageBase64 = root.get("right_image", "").asString();
// 如果有图像数据且注册了图像回调,触发图像回调
if (!leftImageBase64.empty() && !rightImageBase64.empty()) {
ImageCallback imageCallback;
{
std::lock_guard<std::mutex> lock(m_mutex);
imageCallback = m_imageCallback;
}
if (imageCallback) {
imageCallback(leftImageBase64, rightImageBase64, timestamp);
}
}
// 如果有marks数据且注册了mark回调触发mark回调
if (!marks.empty()) {
MarkResultCallback callback;
{
std::lock_guard<std::mutex> lock(m_mutex);
callback = m_markResultCallback;
}
if (callback) {
callback(marks, timestamp, errorCode);
}
}
}
void BinocularMarkReceiver::handleHeartbeat(const std::string& jsonStr)
{
// 收到服务器心跳,回复心跳应答
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count());
Json::FastWriter writer;
std::string jsonData = writer.write(root);
sendJsonMessage("heartbeat_ack", jsonData);
}
void BinocularMarkReceiver::handleHeartbeatAck(const std::string& jsonStr)
@ -453,6 +525,26 @@ int BinocularMarkReceiver::StopWork()
return sendJsonMessage("cmd_stop_work", jsonData);
}
int BinocularMarkReceiver::StartCapture()
{
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count());
Json::FastWriter writer;
std::string jsonData = writer.write(root);
return sendJsonMessage("cmd_start_continuous_image", jsonData);
}
int BinocularMarkReceiver::StopCapture()
{
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count());
Json::FastWriter writer;
std::string jsonData = writer.write(root);
return sendJsonMessage("cmd_stop_continuous_image", jsonData);
}
int BinocularMarkReceiver::SetCalibrationMatrix(const std::string& calibrationXml)
{
Json::Value root;
@ -464,28 +556,110 @@ int BinocularMarkReceiver::SetCalibrationMatrix(const std::string& calibrationXm
return sendJsonMessage("cmd_set_calibration", jsonData);
}
int BinocularMarkReceiver::SetExposureTime(double exposureTime)
int BinocularMarkReceiver::SetExposureTime(SVrCameraEnum target, double exposureTime)
{
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count());
root["exposure_time"] = exposureTime;
if (target == SVrCameraEnum::LEFT) {
root["camera"] = "left";
} else if (target == SVrCameraEnum::RIGHT) {
root["camera"] = "right";
}
Json::FastWriter writer;
std::string jsonData = writer.write(root);
return sendJsonMessage("cmd_set_exposure_time", jsonData);
}
int BinocularMarkReceiver::SetGain(double gain)
int BinocularMarkReceiver::SetGain(SVrCameraEnum target, double gain)
{
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count());
root["gain"] = gain;
if (target == SVrCameraEnum::LEFT) {
root["camera"] = "left";
} else if (target == SVrCameraEnum::RIGHT) {
root["camera"] = "right";
}
Json::FastWriter writer;
std::string jsonData = writer.write(root);
return sendJsonMessage("cmd_set_gain", jsonData);
}
IBinocularMarkReceiver::CameraInfo BinocularMarkReceiver::GetCameraInfo(SVrCameraEnum target, int timeoutMs)
{
CameraInfo result;
result.exposureTime = 0.0;
result.gain = 0.0;
if (!m_bConnected || target == SVrCameraEnum::BOTH) {
return result;
}
// 发送请求
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count());
root["camera"] = (target == SVrCameraEnum::LEFT) ? "left" : "right";
Json::FastWriter writer;
std::string jsonData = writer.write(root);
if (target == SVrCameraEnum::LEFT) {
m_bLeftCameraInfoReady = false;
if (sendJsonMessage("cmd_get_camera_info", jsonData) != 0) {
return result;
}
std::unique_lock<std::mutex> lock(m_mutex);
if (m_cvLeftCameraInfo.wait_for(lock, std::chrono::milliseconds(timeoutMs),
[this] { return m_bLeftCameraInfoReady; })) {
return m_pendingLeftCameraInfo;
}
} else {
m_bRightCameraInfoReady = false;
if (sendJsonMessage("cmd_get_camera_info", jsonData) != 0) {
return result;
}
std::unique_lock<std::mutex> lock(m_mutex);
if (m_cvRightCameraInfo.wait_for(lock, std::chrono::milliseconds(timeoutMs),
[this] { return m_bRightCameraInfoReady; })) {
return m_pendingRightCameraInfo;
}
}
return result;
}
std::string BinocularMarkReceiver::GetCalibrationMatrix(int timeoutMs)
{
std::string result;
if (!m_bConnected) {
return result;
}
// 发送请求
Json::Value root;
root["timestamp"] = static_cast<Json::Int64>(std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::system_clock::now().time_since_epoch()).count());
Json::FastWriter writer;
std::string jsonData = writer.write(root);
m_bCalibrationMatrixReady = false;
if (sendJsonMessage("cmd_get_calibration", jsonData) != 0) {
return result;
}
std::unique_lock<std::mutex> lock(m_mutex);
if (m_cvCalibrationMatrix.wait_for(lock, std::chrono::milliseconds(timeoutMs),
[this] { return m_bCalibrationMatrixReady; })) {
return m_pendingCalibrationMatrix;
}
return result;
}
void BinocularMarkReceiver::handleSingleDetectionResult(const std::string& jsonStr)
{
LOG_DEBUG("handleSingleDetectionResult start, JSON size: %zu\n", jsonStr.size());
@ -524,7 +698,7 @@ void BinocularMarkReceiver::handleSingleDetectionResult(const std::string& jsonS
// 存储结果并通知等待的线程
{
// std::lock_guard<std::mutex> lock(m_mutex);
std::lock_guard<std::mutex> lock(m_mutex);
LOG_DEBUG("handleSingleDetectionResult storing data\n");
m_pendingSingleDetectionResult.marks = marks;
m_pendingSingleDetectionResult.leftImageBase64 = leftImageBase64;
@ -533,7 +707,8 @@ void BinocularMarkReceiver::handleSingleDetectionResult(const std::string& jsonS
m_pendingSingleDetectionResult.errorCode = errorCode;
m_bSingleDetectionReady = true;
LOG_DEBUG("handleSingleDetectionResult data stored\n");
}
} // 释放锁后再通知
LOG_DEBUG("handleSingleDetectionResult notifying\n");
m_cvSingleDetection.notify_one();
LOG_DEBUG("handleSingleDetectionResult complete\n");
@ -560,15 +735,91 @@ void BinocularMarkReceiver::handleImageData(const std::string& jsonStr)
// 存储结果并通知等待的线程
{
// std::lock_guard<std::mutex> lock(m_mutex);
std::lock_guard<std::mutex> lock(m_mutex);
LOG_DEBUG("handleImageData storing data\n");
m_pendingImageData.leftImageBase64 = leftImageBase64;
m_pendingImageData.rightImageBase64 = rightImageBase64;
m_pendingImageData.timestamp = timestamp;
m_bImageDataReady = true;
LOG_DEBUG("handleImageData data stored\n");
}
} // 释放锁后再通知
LOG_DEBUG("handleImageData notifying\n");
m_cvImageData.notify_one();
LOG_DEBUG("handleImageData complete\n");
}
void BinocularMarkReceiver::handleCameraInfoResponse(const std::string& jsonStr)
{
LOG_DEBUG("handleCameraInfoResponse start, JSON size: %zu\n", jsonStr.size());
Json::Reader reader;
Json::Value root;
if (!reader.parse(jsonStr, root)) {
LOG_DEBUG("handleCameraInfoResponse JSON parse failed\n");
return;
}
LOG_DEBUG("handleCameraInfoResponse JSON parsed\n");
std::string camera = root.get("camera", "").asString();
std::string serialNumber = root.get("serial_number", "").asString();
std::string modelName = root.get("model_name", "").asString();
std::string displayName = root.get("display_name", "").asString();
double exposureTime = root.get("exposure_time", 0.0).asDouble();
double gain = root.get("gain", 0.0).asDouble();
LOG_DEBUG("handleCameraInfoResponse got data: camera=%s, SN=%s, model=%s, exposure=%.2f, gain=%.2f\n",
camera.c_str(), serialNumber.c_str(), modelName.c_str(), exposureTime, gain);
// 存储结果并通知等待的线程
{
std::lock_guard<std::mutex> lock(m_mutex);
LOG_DEBUG("handleCameraInfoResponse storing data\n");
if (camera == "left") {
m_pendingLeftCameraInfo.serialNumber = serialNumber;
m_pendingLeftCameraInfo.modelName = modelName;
m_pendingLeftCameraInfo.displayName = displayName;
m_pendingLeftCameraInfo.exposureTime = exposureTime;
m_pendingLeftCameraInfo.gain = gain;
m_bLeftCameraInfoReady = true;
LOG_DEBUG("handleCameraInfoResponse left camera data stored\n");
} else if (camera == "right") {
m_pendingRightCameraInfo.serialNumber = serialNumber;
m_pendingRightCameraInfo.modelName = modelName;
m_pendingRightCameraInfo.displayName = displayName;
m_pendingRightCameraInfo.exposureTime = exposureTime;
m_pendingRightCameraInfo.gain = gain;
m_bRightCameraInfoReady = true;
LOG_DEBUG("handleCameraInfoResponse right camera data stored\n");
}
} // 释放锁后再通知
LOG_DEBUG("handleCameraInfoResponse notifying\n");
if (camera == "left") {
m_cvLeftCameraInfo.notify_one();
} else if (camera == "right") {
m_cvRightCameraInfo.notify_one();
}
LOG_DEBUG("handleCameraInfoResponse complete\n");
}
void BinocularMarkReceiver::handleCalibrationMatrixResponse(const std::string& jsonStr)
{
Json::Reader reader;
Json::Value root;
if (!reader.parse(jsonStr, root)) {
return;
}
std::string calibrationXml = root.get("calibration_xml", "").asString();
{
std::lock_guard<std::mutex> lock(m_mutex);
m_pendingCalibrationMatrix = calibrationXml;
m_bCalibrationMatrixReady = true;
}
m_cvCalibrationMatrix.notify_one();
}

View File

@ -24,15 +24,19 @@ public:
int Connect(const std::string& serverIp, uint16_t serverPort) override;
int Disconnect() override;
bool IsConnected() const override;
int TriggerDetection() override;
SingleDetectionResult RequestSingleDetection(int timeoutMs = 5000) override;
ImageData RequestSingleImage(int timeoutMs = 5000) override;
int StartWork() override;
int StopWork() override;
int StartCapture() override;
int StopCapture() override;
std::string GetCalibrationMatrix(int timeoutMs = 3000) override;
int SetCalibrationMatrix(const std::string& calibrationXml) override;
int SetExposureTime(double exposureTime) override;
int SetGain(double gain) override;
int SetExposureTime(SVrCameraEnum target, double exposureTime) override;
int SetGain(SVrCameraEnum target, double gain) override;
CameraInfo GetCameraInfo(SVrCameraEnum target, int timeoutMs = 3000) override;
void SetMarkResultCallback(MarkResultCallback callback) override;
void SetImageCallback(ImageCallback callback) override;
void SetEventCallback(EventCallback callback) override;
private:
@ -65,6 +69,12 @@ private:
*/
void handleImageData(const std::string& jsonStr);
/**
* @brief
* @param jsonStr JSON字符串
*/
void handleHeartbeat(const std::string& jsonStr);
/**
* @brief
* @param jsonStr JSON字符串
@ -77,6 +87,18 @@ private:
*/
void handleCommandResponse(const std::string& jsonStr);
/**
* @brief
* @param jsonStr JSON字符串
*/
void handleCameraInfoResponse(const std::string& jsonStr);
/**
* @brief
* @param jsonStr JSON字符串
*/
void handleCalibrationMatrixResponse(const std::string& jsonStr);
/**
* @brief
* @param jsonData JSON数据
@ -108,20 +130,33 @@ private:
std::vector<char> m_dataBuffer; // 数据缓冲区(处理粘包)
MarkResultCallback m_markResultCallback; // Mark结果回调
ImageCallback m_imageCallback; // 图像回调
EventCallback m_eventCallback; // 事件回调
// 同步等待结果存储
SingleDetectionResult m_pendingSingleDetectionResult;
ImageData m_pendingImageData;
CameraInfo m_pendingLeftCameraInfo;
CameraInfo m_pendingRightCameraInfo;
std::string m_pendingCalibrationMatrix;
std::condition_variable m_cvSingleDetection; // 单次检测条件变量
std::condition_variable m_cvImageData; // 图像数据条件变量
std::condition_variable m_cvLeftCameraInfo; // 左相机信息条件变量
std::condition_variable m_cvRightCameraInfo; // 右相机信息条件变量
std::condition_variable m_cvCalibrationMatrix; // 标定矩阵条件变量
bool m_bSingleDetectionReady;
bool m_bImageDataReady;
bool m_bLeftCameraInfoReady;
bool m_bRightCameraInfoReady;
bool m_bCalibrationMatrixReady;
// 心跳线程
std::thread m_heartbeatThread;
std::atomic<bool> m_bHeartbeatRunning;
std::atomic<bool> m_bHeartbeatEnabled; // 是否启用心跳
int m_nHeartbeatInterval; // 心跳间隔(秒)
std::condition_variable m_cvHeartbeat; // 心跳线程条件变量
std::mutex m_mutexHeartbeat; // 心跳线程互斥锁
mutable std::mutex m_mutex; // 线程安全锁

View File

@ -6,31 +6,30 @@ enum ErrorCode
{
SUCCESS = 0,
CLASS_OBJ_NULL = 1000,
FUN_UNSUPPORT,
//功能执行失败
APP_ERR_EXEC,
APP_ERR_ACK,
FUN_UNSUPPORT = 1100,
//相关加密内容
RUN_ENCRYPT_SUCCESS = 0,
ENCRYPT_ERROR_BASE = 1050,
RUN_ENCRYPT_LOAD,
RUN_ENCRYPT_NOT_LOAD,
RUN_ENCRYPT_RELEASE,
RUN_ENCRYPT_NO_FUN,
RUN_ENCRYPT_ARG,
//文件相关错误
FILE_ERR_EXIST = 1200,
FILE_ERR_EXIST = 1100,
FILE_ERR_NOEXIST,
FILE_ERR_READ,
FILE_ERR_WRITE,
FILE_ERR_FORMAT,
FILE_ERR_DEL,
//数据错误
DATA_ERR_LEN = 1300,
DATA_ERR_INVALID,
DATA_ERR_MD5,
DATA_ERR_KEY,
DATA_ERR_RANGE,
DATA_ERR_MEM,
//功能执行失败
APP_ERR_EXEC = 1400,
APP_ERR_ACK,
//共享内存错误
SHAREMEM_ERR_BASE = 1500,
SHAREMEM_ERR_BASE = 1200,
SHAREMEM_ERR_PARAM, // 参数错误
SHAREMEM_ERR_CREATE, // 创建失败
SHAREMEM_ERR_ALREADY_EXIST, // 已存在
@ -41,63 +40,55 @@ enum ErrorCode
SHAREMEM_ERR_NOT_MAPPED, // 未映射
SHAREMEM_ERR_TIMEOUT, // 超时
//相关加密内容
ENCRYPT_ERROR_BASE = 1600,
// 运行加密算法
RUN_ENCRYPT_SUCCESS = 0,
RUN_ENCRYPT_LOAD = 1700,
RUN_ENCRYPT_NOT_LOAD,
RUN_ENCRYPT_RELEASE,
RUN_ENCRYPT_NO_FUN,
RUN_ENCRYPT_ARG,
//数据错误
DATA_ERR_LEN = 1300,
DATA_ERR_INVALID,
DATA_ERR_MD5,
DATA_ERR_KEY,
DATA_ERR_RANGE,
DATA_ERR_MEM,
//网络错误
NET_SUCCESS = 0,
NET_ERR_CREAT_INIT = 1800,
NET_ERR_CREAT_INIT = 1400,
NET_ERR_CREAT_BIND,
NET_ERR_CREAT_LISTEN,
NET_ERR_CONNECT,
NET_ERR_ACCEPT,
NET_ERR_IP_INVALID,
NET_ERR_NOTINIT,
NET_ERR_SEND_DATA = 1900,
NET_ERR_SEND_DATA,
NET_ERR_RECV_CMD,
NET_ERR_RECV_DATA,
NET_ERR_RECV_DATA_LACK,
NET_ERR_RECV_DATA_LACK, //1410
NET_ERR_ARG,
NET_ERR_CONFIG,
NET_ERR_CONFIG_GET_IP,
NET_ERR_REMOVE_FD_FAILED,
NET_ERR_GET,
NET_DEV_NOT_FIND = 2000,
NET_DEV_NOT_FIND,
NET_DEV_NOT_RIGHT,
NET_DEV_CLIENT_LINK = 2100,
NET_DEV_CLIENT_LINK,
NET_DEV_CLIENT_SENDLEN,
NET_DEV_CLIENT_RECV,
NET_DEV_CLIENT_RECV, //1420
// 设备类型
DEV_TYPE_ERR = 2200,
DEV_TYPE_ERR = 1500,
DEV_NOT_FIND,
DEV_NO_OPEN,
DEV_ID_ERR,
DEV_OPEN_ERR,
DEV_CLOSE_ERR,
DEV_CLOSE_ERR, //1505
DEV_CTRL_ERR,
DEV_SEND_ERR,
DEV_RECV_ERR,
DEV_CTRL_TIMEOUT,
DEV_UNSUPPORT,
DEV_UNSUPPORT, //1510
DEV_ARG_INVAILD,
DEV_BUSY,
DEV_CONFIG_ERR,
DEV_DATA_INVALID,
DEV_RESULT_EMPTY, //1515
};
#define ERR_CODE(nCode) -std::abs((int)nCode)