GrabBag/App/GrabBag/GrabBagApp/Presenter/Src/GrabBagPresenter.cpp
2025-11-26 22:38:26 +08:00

2265 lines
87 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "GrabBagPresenter.h"
#include "VrError.h"
#include "VrLog.h"
#include <QtCore/QCoreApplication>
#include <QtCore/QFileInfo>
#include <QtCore/QDir>
#include <QtCore/QString>
#include <QtCore/QStandardPaths>
#include <QtCore/QFile>
#include <QtCore/QRegularExpression>
#include <cmath>
#include <algorithm>
#include <QImage>
#include <QThread>
#include <QUuid>
#include <QMetaObject>
#include "Version.h"
#include "VrTimeUtils.h"
#include "VrDateUtils.h"
#include "SG_bagPositioning_Export.h"
#include "SG_baseDataType.h"
#include "VrConvert.h"
#include "PointCloudImageUtils.h"
GrabBagPresenter::GrabBagPresenter(QObject *parent)
: QObject(parent)
, m_vrConfig(nullptr)
, m_pStatus(nullptr)
, m_pDetectPresenter(nullptr)
, m_pRobotProtocol(nullptr)
, m_pSerialProtocol(nullptr)
, m_pTcpServer(nullptr)
, m_port(0)
, m_bTcpClientConnected(false)
, m_bCameraConnected(false)
, m_bRobotConnected(false)
, m_currentWorkStatus(WorkStatus::Error)
{
m_detectionDataCache.clear();
// 创建参数管理器
m_pParameterManager = std::make_unique<ParameterManager>();
// 创建新的定时器
m_pCameraReconnectTimer = new QTimer(this);
// 设置定时器间隔为2秒
m_pCameraReconnectTimer->setInterval(2000);
// 连接定时器信号到槽函数
connect(m_pCameraReconnectTimer, &QTimer::timeout, this, &GrabBagPresenter::_OnCameraReconnectTimer);
}
GrabBagPresenter::~GrabBagPresenter()
{
// 停止相机重连定时器
_StopCameraReconnectTimer();
m_pCameraReconnectTimer->deleteLater();
m_pCameraReconnectTimer = nullptr;
// 停止配置管理器
if (m_pConfigManager) {
m_pConfigManager->Shutdown();
m_pConfigManager.reset();
}
// 停止算法检测线程
m_bAlgoDetectThreadRunning = false;
m_algoDetectCondition.notify_all();
// 等待算法检测线程结束
if (m_algoDetectThread.joinable()) {
m_algoDetectThread.join();
}
// 释放缓存的检测数据
_ClearDetectionDataCache();
// 释放机械臂协议
if (m_pRobotProtocol) {
m_pRobotProtocol->Deinitialize();
delete m_pRobotProtocol;
m_pRobotProtocol = nullptr;
}
// 释放串口协议
if (m_pSerialProtocol) {
m_pSerialProtocol->CloseSerial();
delete m_pSerialProtocol;
m_pSerialProtocol = nullptr;
}
// 释放TCP服务器
if (m_pTcpServer) {
m_pTcpServer->Stop();
m_pTcpServer->Close();
delete m_pTcpServer;
m_pTcpServer = nullptr;
}
// 释放相机设备资源
for(auto it = m_vrEyeDeviceList.begin(); it != m_vrEyeDeviceList.end(); it++)
{
if (it->second != nullptr) {
it->second->CloseDevice();
delete it->second;
it->second = nullptr;
}
}
m_vrEyeDeviceList.clear();
// 释放检测处理器
if(m_pDetectPresenter)
{
delete m_pDetectPresenter;
m_pDetectPresenter = nullptr;
}
// 释放配置对象
if (m_vrConfig) {
delete m_vrConfig;
m_vrConfig = nullptr;
}
}
int GrabBagPresenter::Init()
{
LOG_DEBUG("Start APP Version: %s\n", GRABBAG_FULL_VERSION_STRING);
// 初始化连接状态
m_bCameraConnected = false;
m_currentWorkStatus = WorkStatus::InitIng;
m_pStatus->OnWorkStatusChanged(m_currentWorkStatus);
m_pDetectPresenter = new DetectPresenter();
// 初始化VrConfig模块
IVrConfig::CreateInstance(&m_vrConfig);
// 设置配置改变通知回调
if (m_vrConfig) {
m_vrConfig->SetConfigChangeNotify(this);
}
// 获取配置文件路径
QString configPath = PathManager::GetInstance().GetConfigFilePath();
// 读取IP列表
m_configResult = m_vrConfig->LoadConfig(configPath.toStdString());
m_projectType = m_configResult.projectType;
int nRet = SUCCESS;
// 初始化算法参数
nRet = InitAlgorithmParams();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("算法参数初始化失败");
LOG_ERROR("Algorithm parameters initialization failed with error: %d\n", nRet);
} else {
m_pStatus->OnStatusUpdate("算法参数初始化成功");
LOG_INFO("Algorithm parameters initialization successful\n");
}
InitCamera(m_configResult.cameraList);
LOG_INFO("Camera initialization completed. Connected cameras: %zu, default camera index: %d\n",
m_vrEyeDeviceList.size(), m_currentCameraIndex);
// 初始化机械臂协议
nRet = InitRobotProtocol();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("机械臂服务初始化失败");
m_bRobotConnected = false;
} else {
m_pStatus->OnStatusUpdate("机械臂服务初始化成功");
}
// 初始化串口协议
nRet = InitSerialProtocol();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("串口服务初始化失败");
m_bSerialConnected = false;
} else {
m_pStatus->OnStatusUpdate("串口服务初始化成功");
m_bSerialConnected = true;
m_pStatus->OnSerialConnectionChanged(true);
}
// 初始化TCP服务器
nRet = InitTcpServer(m_configResult.tcpServerConfig.port);
if (nRet != 0) {
m_pStatus->OnStatusUpdate("TCP服务器初始化失败");
} else {
m_pStatus->OnStatusUpdate("TCP服务器初始化成功");
}
m_bAlgoDetectThreadRunning = true;
m_algoDetectThread = std::thread(&GrabBagPresenter::_AlgoDetectThread, this);
m_algoDetectThread.detach();
m_pStatus->OnStatusUpdate("设备初始化完成");
CheckAndUpdateWorkStatus();
// 加载工作点和包裹类型配置
_LoadCurrentWorkPositionAndPackageType();
QString str = QString("%1 配置初始化成功").arg(ProjectTypeToString(m_configResult.projectType).c_str());
m_pStatus->OnStatusUpdate(str.toStdString());
LOG_INFO("ConfigManager initialized successfully\n");
return SUCCESS;
}
// 相机协议相关方法
int GrabBagPresenter::InitCamera(std::vector<DeviceInfo>& cameraList)
{
// 保存相机配置信息,用于重连尝试
m_cameraConfigList = cameraList;
m_expectedCameraCount = cameraList.size();
// 通知UI相机个数
int cameraCount = cameraList.size();
// 初始化相机列表,预分配空间
m_vrEyeDeviceList.resize(cameraCount, std::make_pair("", nullptr));
for(int i = 0; i < cameraCount; i++)
{
m_vrEyeDeviceList[i] = std::make_pair(cameraList[i].name, nullptr);
}
m_pStatus->OnCameraCountChanged(cameraCount);
// 尝试初始化所有相机
bool allCamerasConnected = true;
if(cameraCount > 0){
if (cameraCount >= 1) {
// 尝试打开相机
int nRet = _OpenDevice(1, cameraList[0].name.c_str(), cameraList[0].ip.c_str(), m_projectType);
m_pStatus->OnCamera1StatusChanged(nRet == SUCCESS);
if (nRet != SUCCESS) {
allCamerasConnected = false;
}
}
if (cameraCount >= 2) {
// 尝试打开相机
int nRet = _OpenDevice(2, cameraList[1].name.c_str(), cameraList[1].ip.c_str(), m_projectType);
m_pStatus->OnCamera2StatusChanged(nRet == SUCCESS);
if (nRet != SUCCESS) {
allCamerasConnected = false;
}
}
} else {
m_vrEyeDeviceList.resize(1, std::make_pair("", nullptr));
m_expectedCameraCount = 1;
int nRet = _OpenDevice(1, "相机", nullptr, m_projectType);
if (nRet != SUCCESS) {
allCamerasConnected = false;
}
}
// 检查连接状态
int connectedCount = 0;
for (const auto& device : m_vrEyeDeviceList) {
if (device.second != nullptr) {
connectedCount++;
}
}
m_bCameraConnected = (connectedCount > 0); // 至少有一个相机连接成功
// 设置默认相机索引为第一个连接的相机
m_currentCameraIndex = 1; // 默认从1开始
for (int i = 0; i < static_cast<int>(m_vrEyeDeviceList.size()); i++) {
if (m_vrEyeDeviceList[i].second != nullptr) {
m_currentCameraIndex = i + 1; // 找到第一个连接的相机
break;
}
}
// 如果不是所有期望的相机都连接成功,启动重连定时器
if (!allCamerasConnected && m_expectedCameraCount > 0) {
LOG_INFO("Not all cameras connected (%d/%d), starting reconnect timer\n", connectedCount, m_expectedCameraCount);
m_pStatus->OnStatusUpdate("启动重新连接相机...");
_StartCameraReconnectTimer();
} else if (allCamerasConnected) {
LOG_INFO("All cameras connected successfully \n");
m_pStatus->OnStatusUpdate("所有相机连接成功");
// 确保定时器停止
_StopCameraReconnectTimer();
}
return SUCCESS;
}
// 初始化机械臂协议
int GrabBagPresenter::InitRobotProtocol()
{
LOG_DEBUG("Start initializing robot protocol\n");
m_pStatus->OnStatusUpdate("机械臂服务初始化...");
// 创建RobotProtocol实例
if(nullptr == m_pRobotProtocol){
m_pRobotProtocol = new RobotProtocol();
}
// 初始化协议服务使用端口502
int nRet = m_pRobotProtocol->Initialize(5020);
// 设置连接状态回调
m_pRobotProtocol->SetConnectionCallback([this](bool connected) {
this->OnRobotConnectionChanged(connected);
});
// 设置工作信号回调
m_pRobotProtocol->SetWorkSignalCallback([this](bool startWork, int cameraIndex) {
return this->OnRobotWorkSignal(startWork, cameraIndex);
});
LOG_INFO("Robot protocol initialization completed successfully\n");
return nRet;
}
// 串口协议相关方法实现
int GrabBagPresenter::InitSerialProtocol()
{
if (m_pSerialProtocol) {
LOG_WARNING("Serial protocol already initialized\n");
return SUCCESS;
}
LOG_INFO("Initializing serial protocol\n");
// 获取串口配置
QString configPath = PathManager::GetInstance().GetConfigFilePath();
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
// 如果串口未启用,则不初始化
if (!configResult.serialConfig.enabled) {
LOG_INFO("Serial communication is disabled in configuration\n");
return ERR_CODE(DEV_ARG_INVAILD);
}
// 创建串口协议实例
m_pSerialProtocol = new SerialProtocol();
// 设置串口回调函数
m_pSerialProtocol->SetConnectionCallback([this](bool connected) {
this->OnSerialConnectionChanged(connected);
});
m_pSerialProtocol->SetWorkSignalCallback([this](bool startWork, int cameraIndex) {
return this->OnSerialWorkSignal(startWork, cameraIndex);
});
// 打开串口
int result = m_pSerialProtocol->OpenSerial(
configResult.serialConfig.portName,
configResult.serialConfig.baudRate,
configResult.serialConfig.dataBits,
configResult.serialConfig.stopBits,
configResult.serialConfig.parity,
configResult.serialConfig.flowControl
);
if (result == SUCCESS) {
LOG_INFO("Serial protocol initialization successful\n");
m_bSerialConnected = true;
return SUCCESS;
} else {
LOG_ERROR("Serial protocol initialization failed with error: %d\n", result);
m_bSerialConnected = false;
return result;
}
}
// 初始化TCP服务器
int GrabBagPresenter::InitTcpServer(int nPort)
{
// 创建TCP服务器
if (!VrCreatYTCPServer(&m_pTcpServer)) {
LOG_ERROR("Failed to create VrNets TCPServer\n");
m_pTcpServer = nullptr;
}
if (!m_pTcpServer) {
LOG_ERROR("TCP server is not initialized\n");
return ERR_CODE(DEV_OPEN_ERR);
}
LOG_INFO("Initializing VrNets TCP server on port %d\n", nPort);
// 初始化TCP服务器
if (!m_pTcpServer->Init(nPort, false)) {
LOG_ERROR("Failed to initialize VrNets TCP server on port %d\n", nPort);
return ERR_CODE(DEV_OPEN_ERR);
}
// 创建lambda回调函数捕获this指针
FunTCPServerRecv recvCallback = [this](const TCPClient* pClient, const char* pData, const unsigned int nLen) {
this->onTcpDataReceivedFromCallback(pClient, pData, nLen);
};
// 创建事件回调函数,用于处理客户端连接和断开事件
FunTCPServerEvent eventCallback = [this](const TCPClient* pClient, TCPServerEventType eventType) {
this->onTcpClientEventFromCallback(pClient, eventType);
};
// 设置事件回调
m_pTcpServer->SetEventCallback(eventCallback);
// 启动TCP服务器使用lambda回调函数
if (!m_pTcpServer->Start(recvCallback, false)) {
LOG_ERROR("Failed to start VrNets TCP server\n");
return ERR_CODE(DEV_OPEN_ERR);
}
m_port = nPort;
LOG_INFO("VrNets TCP server started successfully on port %d\n", nPort);
return SUCCESS;
}
bool GrabBagPresenter::startServer(quint16 port)
{
// 这个方法现在由InitTcpServer处理
return InitTcpServer(port) == SUCCESS;
}
void GrabBagPresenter::stopServer()
{
if (!m_pTcpServer) {
return;
}
m_pTcpServer->Stop();
m_port = 0;
LOG_INFO("VrNets TCP server stopped\n");
}
// TCP服务器数据接收回调处理方法
void GrabBagPresenter::onTcpDataReceivedFromCallback(const TCPClient* pClient, const char* pData, const unsigned int nLen)
{
if (!pClient || !pData || nLen == 0) {
return;
}
std::string receivedData(pData, nLen);
QString qData = QString::fromStdString(receivedData).trimmed();
LOG_INFO("TCP data received from client %p: %s\n", pClient, qData.toStdString().c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("收到TCP数据: " + qData.toStdString());
}
int cameraIndex = -1; // 相机编号(视觉号)
int templateIndex = -1; // 视觉模板编号
QString triggerCmd; // 启动信息Trig等
// 尝试按优先级解析协议:新格式 -> 旧格式 -> 简单格式
bool parsed = _ParseNewFormatProtocol(qData, cameraIndex, templateIndex, triggerCmd);
if (!parsed) {
parsed = _ParseOldFormatProtocol(qData, cameraIndex, templateIndex, triggerCmd);
}
if (!parsed) {
parsed = _ParseSimpleProtocol(qData, cameraIndex, templateIndex, triggerCmd);
}
// 如果所有格式都无法解析,返回错误
if (!parsed) {
LOG_WARNING("Unknown TCP command received: %s\n", qData.toStdString().c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("未知TCP命令: " + qData.toStdString());
}
std::string err = "Unknown_Command";
m_pTcpServer->SendData(pClient, err.c_str(), err.length());
return;
}
// 处理Trig命令
if (triggerCmd.contains("Trig", Qt::CaseInsensitive)) {
bool bRet = _HandleTrigCommand(pClient, cameraIndex, templateIndex);
if(!bRet){
SetWorkStatus(WorkStatus::Error);
}
} else {
// 其他启动命令(未来扩展)
LOG_WARNING("Unknown trigger command: %s\n", triggerCmd.toStdString().c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("未知触发命令: " + triggerCmd.toStdString());
}
std::string err = "Unknown_Trigger_Command";
m_pTcpServer->SendData(pClient, err.c_str(), err.length());
}
}
// 解析新格式协议:@,视觉号,视觉模版号,启动信息,$
// 示例:@13Trig$ 表示触发1号视觉相机拍照调用3号视觉模版发出Trig拍照命令
bool GrabBagPresenter::_ParseNewFormatProtocol(const QString& qData, int& cameraIndex, int& templateIndex, QString& triggerCmd)
{
// 正则表达式:@[,]\s*(-?\d+)\s*[,]\s*(-?\d+)\s*[,]\s*(\w+)
// 支持负数(-1表示使用默认相机/模板)
QRegularExpression newFormatRegex("@[,]\\s*(-?\\d+)\\s*[,]\\s*(-?\\d+)\\s*[,]\\s*(\\w+)");
QRegularExpressionMatch newFormatMatch = newFormatRegex.match(qData);
if (!newFormatMatch.hasMatch()) {
return false;
}
// 新格式解析成功(相机编号、模板编号、触发命令)
cameraIndex = newFormatMatch.captured(1).toInt();
templateIndex = newFormatMatch.captured(2).toInt();
triggerCmd = newFormatMatch.captured(3);
LOG_INFO("Parsed new format command: camera=%d, template=%d, trigger=%s\n", cameraIndex, templateIndex, triggerCmd.toStdString().c_str());
// 如果相机索引为-1使用当前默认相机
if (cameraIndex == -1) {
cameraIndex = m_currentCameraIndex;
LOG_INFO("Camera index is -1, using current default camera: %d\n", cameraIndex);
}
// 如果模板索引为-1或0表示使用当前模板不切换
if (templateIndex == -1 || templateIndex == 0) {
LOG_INFO("Template index is %d, will use current template (no switch)\n", templateIndex);
templateIndex = -1; // 标准化为-1表示不切换
}
return true;
}
// 解析旧格式协议:@模板编号Trig$ 或 @模板编号Trig
// 示例:@3Trig$ 表示调用3号视觉模版
bool GrabBagPresenter::_ParseOldFormatProtocol(const QString& qData, int& cameraIndex, int& templateIndex, QString& triggerCmd)
{
QRegularExpression oldFormatRegex("@(\\d+).*Trig");
QRegularExpressionMatch oldFormatMatch = oldFormatRegex.match(qData);
if (!oldFormatMatch.hasMatch()) {
return false;
}
// 旧格式解析成功(只有模板编号)
templateIndex = oldFormatMatch.captured(1).toInt();
cameraIndex = m_currentCameraIndex; // 使用当前默认相机
triggerCmd = "Trig";
LOG_INFO("Parsed old format command: template=%d, using current camera=%d\n", templateIndex, cameraIndex);
return true;
}
// 解析简单协议Trig
// 示例Trig 表示使用当前相机和当前模板触发检测
bool GrabBagPresenter::_ParseSimpleProtocol(const QString& qData, int& cameraIndex, int& templateIndex, QString& triggerCmd)
{
if (!qData.contains("Trig", Qt::CaseInsensitive)) {
return false;
}
// 简单的Trig命令不切换模板和相机
cameraIndex = m_currentCameraIndex;
templateIndex = -1; // 不切换模板
triggerCmd = "Trig";
LOG_INFO("Parsed simple Trig command, using current camera=%d and template\n", cameraIndex);
return true;
}
// 处理Trig命令
bool GrabBagPresenter::_HandleTrigCommand(const TCPClient* pClient, int cameraIndex, int templateIndex)
{
SetWorkStatus(WorkStatus::Working);
// 1. 验证相机编号有效性
if (cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())) {
LOG_ERROR("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("无效的相机编号: %1").arg(cameraIndex).toStdString());
}
// 发送错误响应
QString response = QString("Error_Invalid_Camera_%1").arg(cameraIndex);
m_pTcpServer->SendData(pClient, response.toStdString().c_str(), response.toStdString().length());
return false;
}
// 检查相机是否已连接
if (m_vrEyeDeviceList[cameraIndex - 1].second == nullptr) {
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
if (m_pStatus) {
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first);
m_pStatus->OnStatusUpdate(QString("相机%1未连接").arg(cameraName).toStdString());
}
// 发送错误响应
QString response = QString("Error_Camera_%1_Not_Connected").arg(cameraIndex);
m_pTcpServer->SendData(pClient, response.toStdString().c_str(), response.toStdString().length());
return false;
}
// 2. 如果指定了模板编号,进行模板切换
if (templateIndex > 0) {
// 通过包裹ID全局唯一在配置文件中查询包裹、相机和工位信息
std::string packageId = "pkg_" + std::to_string(templateIndex);
WorkPositionConfig* foundWorkPosition = nullptr;
CameraConfig* foundCamera = nullptr;
PackageTypeConfig* foundPackage = nullptr;
// 遍历所有工位、相机、包裹查找匹配的包裹ID
for (auto& wp : m_configResult.workPositions) {
for (auto& cam : wp.cameras) {
for (auto& pkg : cam.packages) {
if (pkg.id == packageId) {
foundWorkPosition = &wp;
foundCamera = &cam;
foundPackage = &pkg;
break;
}
}
if (foundPackage) break;
}
if (foundPackage) break;
}
// 检查是否找到包裹
if (!foundPackage || !foundCamera || !foundWorkPosition) {
LOG_ERROR("Package not found with ID: %s (template index: %d)\n", packageId.c_str(), templateIndex);
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("未找到包裹ID: %1 (模板编号: %2)")
.arg(QString::fromStdString(packageId))
.arg(templateIndex).toStdString());
}
QString response = QString("Error_Package_Not_Found_%1").arg(templateIndex);
m_pTcpServer->SendData(pClient, response.toStdString().c_str(), response.toStdString().length());
return false;
}
// 验证找到的相机索引是否与请求的相机索引匹配
if (foundCamera->cameraIndex != cameraIndex) {
LOG_WARNING("Package %s belongs to camera %d, but requested camera is %d. Using package's camera.\n",
packageId.c_str(), foundCamera->cameraIndex, cameraIndex);
// 更新相机索引为包裹所属的相机
cameraIndex = foundCamera->cameraIndex;
}
// 记录找到的配置信息
std::string templateName = foundPackage->name;
LOG_INFO("Switched to package by ID: %s (template index: %d)\n", packageId.c_str(), templateIndex);
LOG_INFO(" Work Position: %s (ID: %s)\n", foundWorkPosition->name.c_str(), foundWorkPosition->id.c_str());
LOG_INFO(" Camera: %s (ID: %s, Index: %d)\n", foundCamera->cameraName.c_str(), foundCamera->id.c_str(), foundCamera->cameraIndex);
LOG_INFO(" Package: %s (ID: %s)\n", templateName.c_str(), foundPackage->id.c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("切换到[工位:%1, 相机:%2, 包裹:%3]")
.arg(QString::fromStdString(foundWorkPosition->name))
.arg(QString::fromStdString(foundCamera->cameraName))
.arg(QString::fromStdString(foundPackage->name)).toStdString());
}
// 更新执行参数(使用新的模板配置)
_UpdateCurrentExecutionParams(foundWorkPosition, foundCamera, foundPackage, cameraIndex);
}
// 3. 显示状态信息
LOG_INFO("Starting detection with camera %d (template %d)\n", cameraIndex, templateIndex);
// 4. 启动指定相机的检测(非自动模式)
// 如果已经切换了模板templateIndex > 0则不需要再更新参数
// 如果没有切换模板,则需要在 StartDetection 中更新参数
bool needUpdateParams = (templateIndex <= 0);
int result = StartDetection(cameraIndex, false, needUpdateParams);
// 5. 发送响应给客户端
if (result != SUCCESS) {
QString response = QString("Error_%1").arg(result);
LOG_ERROR("Failed to start camera %d detection, error: %d\n", cameraIndex, result);
m_pTcpServer->SendData(pClient, response.toStdString().c_str(), response.toStdString().length());
}
return result == SUCCESS;
}
// TCP客户端连接状态回调处理方法
void GrabBagPresenter::onTcpClientEventFromCallback(const TCPClient* pClient, TCPServerEventType eventType)
{
if (!pClient) {
return;
}
QString clientId = QString("Client_%1").arg(reinterpret_cast<intptr_t>(pClient));
switch (eventType) {
case TCP_EVENT_CLIENT_CONNECTED:
{
LOG_INFO("TCP client connected: %s\n", clientId.toStdString().c_str());
m_bTcpClientConnected = true;
if (m_pStatus) {
m_pStatus->OnStatusUpdate("机械臂连接成功");
// 这里可以根据需要更新机械臂连接状态UI
m_pStatus->OnRobotConnectionChanged(true);
}
break;
}
case TCP_EVENT_CLIENT_DISCONNECTED:
{
LOG_INFO("TCP client disconnected: %s\n", clientId.toStdString().c_str());
m_bTcpClientConnected = false;
if (m_pStatus) {
m_pStatus->OnStatusUpdate("机械臂断开连接");
// 这里可以根据需要更新机械臂连接状态UI
m_pStatus->OnRobotConnectionChanged(false);
}
break;
}
case TCP_EVENT_CLIENT_EXCEPTION:
{
LOG_WARNING("TCP client exception: %s\n", clientId.toStdString().c_str());
m_bTcpClientConnected = false;
if (m_pStatus) {
m_pStatus->OnStatusUpdate("机械臂连接异常");
// 这里可以根据需要更新机械臂连接状态UI
m_pStatus->OnRobotConnectionChanged(false);
}
break;
}
default:
LOG_DEBUG("Unknown TCP client event: %d for client: %s\n", eventType, clientId.toStdString().c_str());
break;
}
}
// 初始化算法参数
int GrabBagPresenter::InitAlgorithmParams()
{
LOG_DEBUG("Start initializing algorithm parameters\n");
// 获取手眼标定文件路径
QString clibPath = PathManager::GetInstance().GetCalibrationFilePath();
LOG_INFO("Loading hand-eye matrices from: %s\n", clibPath.toStdString().c_str());
// 使用 ParameterManager 加载所有参数
int nRet = m_pParameterManager->LoadGlobalParams(
&m_configResult,
clibPath.toStdString(),
m_configResult.algorithmParams.planeCalibParam.cameraCalibParams
);
if (nRet != 0) {
LOG_ERROR("ParameterManager failed to load global parameters, error: %d\n", nRet);
return nRet;
}
LOG_INFO("projectType: %s\n", ProjectTypeToString(m_projectType).c_str());
LOG_INFO("Algorithm parameters initialized successfully via ParameterManager\n");
return SUCCESS;
}
// 手眼标定矩阵管理方法实现
const CalibMatrix GrabBagPresenter::GetClibMatrix(int index) const
{
return m_pParameterManager->GetHandEyeMatrix(index);
}
// 机械臂协议回调函数实现
void GrabBagPresenter::OnRobotConnectionChanged(bool connected)
{
LOG_INFO("Robot connection status changed: %s\n", (connected ? "connected" : "disconnected"));
// 更新机械臂连接状态
m_bRobotConnected = connected;
if (m_pStatus) {
m_pStatus->OnRobotConnectionChanged(connected);
// 发送详细的页面状态信息
if (connected) {
m_pStatus->OnStatusUpdate("机械臂连接成功,通信正常");
} else {
m_pStatus->OnStatusUpdate("机械臂连接断开,请检查网络连接");
}
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
}
bool GrabBagPresenter::OnRobotWorkSignal(bool startWork, int cameraIndex)
{
LOG_INFO("Received robot work signal: %s for camera index: %d\n", (startWork ? "start work" : "stop work"), cameraIndex);
if(!_SinglePreDetection(cameraIndex)){
return false;
}
int nRet = _SingleDetection(cameraIndex, startWork);
if(nRet != SUCCESS){
m_currentWorkStatus = WorkStatus::Ready;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
}
return nRet == SUCCESS;
}
void GrabBagPresenter::SetStatusCallback(IYGrabBagStatus* status)
{
m_pStatus = status;
}
// 模拟检测函数,用于演示
int GrabBagPresenter::StartDetection(int cameraIdx, bool isAuto, bool updateParams)
{
LOG_INFO("--------------------------------\n");
LOG_INFO("Start detection with camera index: %d, updateParams: %d\n", cameraIdx, updateParams);
// 检查设备状态是否准备就绪
if (isAuto && m_currentWorkStatus != WorkStatus::Ready) {
LOG_INFO("Device not ready, cannot start detection\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("设备未准备就绪,无法开始检测");
}
return ERR_CODE(DEV_BUSY);
}
// 确定要使用的相机索引(不修改默认相机索引)
int cameraIndex = (cameraIdx != -1) ? cameraIdx : m_currentCameraIndex;
LOG_INFO("Using camera index: %d (default: %d)\n", cameraIndex, m_currentCameraIndex);
m_currentWorkStatus = WorkStatus::Detecting;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(m_currentWorkStatus);
}
// 设置机械臂工作状态为忙碌
if (m_pRobotProtocol) {
m_pRobotProtocol->SetWorkStatus(RobotProtocol::WORK_STATUS_BUSY);
}
if(m_vrEyeDeviceList.empty()){
LOG_ERROR("No camera device found\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("未找到相机设备");
}
return ERR_CODE(DEV_NOT_FIND);
}
// 清空检测数据缓存(释放之前的内存)
_ClearDetectionDataCache();
// 更新当前执行参数(如果需要)
if (updateParams) {
// 临时保存默认相机索引
int savedDefaultCameraIndex = m_currentCameraIndex;
m_currentCameraIndex = cameraIndex;
_UpdateCurrentExecutionParams();
m_currentCameraIndex = savedDefaultCameraIndex; // 恢复默认相机索引
}
// 配置相机参数
int nRet = _ApplyCameraParameters();
if (nRet != SUCCESS) {
LOG_ERROR("Failed to apply camera parameters, error: %d\n", nRet);
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置相机参数失败");
}
m_currentWorkStatus = WorkStatus::Ready;
return nRet;
}
LOG_INFO("Camera parameters applied successfully, starting detection\n");
// 根据参数决定启动哪些相机
// 启动指定相机cameraIndex为相机ID从1开始编号
int arrayIndex = cameraIndex - 1; // 转换为数组索引从0开始
// 检查相机是否连接
if (arrayIndex < 0 || arrayIndex >= static_cast<int>(m_vrEyeDeviceList.size()) || m_vrEyeDeviceList[arrayIndex].second == nullptr) {
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
QString cameraName = (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("%1 未连接").arg(cameraName).toStdString());
return ERR_CODE(DEV_NOT_FIND);
}
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[arrayIndex].second;
EVzResultDataType eDataType = keResultDataType_Position;
if(m_projectType == ProjectType::DirectBag){
eDataType = keResultDataType_PointXYZRGBA;
}
pDevice->SetStatusCallback(&GrabBagPresenter::_StaticCameraNotify, this);
// 开始
nRet = pDevice->StartDetect(&GrabBagPresenter::_StaticDetectionCallback, eDataType, this);
LOG_INFO("Camera ID %d start detection nRet: %d\n", cameraIndex, nRet);
if (nRet == SUCCESS) {
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
m_pStatus->OnStatusUpdate(QString("启动%1检测成功").arg(cameraName).toStdString());
} else {
LOG_ERROR("Camera ID %d start detection failed with error: %d\n", cameraIndex, nRet);
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
m_pStatus->OnStatusUpdate(QString("启动%1检测失败[%d]").arg(cameraName).arg(nRet).toStdString());
}
} else {
LOG_ERROR("Invalid camera ID: %d, valid range is 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
m_pStatus->OnStatusUpdate(QString("无效的相机ID: %1有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
nRet = ERR_CODE(DEV_NOT_FIND);
}
return nRet;
}
int GrabBagPresenter::StopDetection()
{
LOG_INFO("Stop detection\n");
// 停止所有相机的检测
for (size_t i = 0; i < m_vrEyeDeviceList.size(); ++i) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second;
if (pDevice) {
int ret = pDevice->StopDetect();
if (ret == 0) {
LOG_INFO("Camera %zu stop detection successfully\n", i + 1);
} else {
LOG_WARNING("Camera %zu stop detection failed, error code: %d\n", i + 1, ret);
}
}
}
// 通知UI工作状态变更为"就绪"(如果设备连接正常)
if (m_pStatus) {
// 检查设备连接状态,决定停止后的状态
if (m_bCameraConnected && (m_bRobotConnected || m_bSerialConnected)) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
m_pStatus->OnStatusUpdate("检测已停止");
}
// 设置机械臂工作状态为空闲
if (m_pRobotProtocol) {
m_pRobotProtocol->SetWorkStatus(RobotProtocol::WORK_STATUS_IDLE);
}
return 0;
}
// 加载调试数据进行检测
int GrabBagPresenter::LoadDebugDataAndDetect(const std::string& filePath)
{
LOG_INFO("Loading debug data from file: %s\n", filePath.c_str());
m_currentWorkStatus = WorkStatus::Working;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
std::string fileName = QFileInfo(QString::fromStdString(filePath)).fileName().toStdString();
m_pStatus->OnStatusUpdate(QString("加载文件:%1").arg(fileName.c_str()).toStdString());
}
// 设置机械臂工作状态为忙碌
if (m_pRobotProtocol) {
m_pRobotProtocol->SetWorkStatus(RobotProtocol::WORK_STATUS_BUSY);
}
int lineNum = 0;
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = SUCCESS;
// 1. 清空现有的检测数据缓存
_ClearDetectionDataCache();
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 使用统一的LoadLaserScanData接口自动判断文件格式
result = m_dataLoader.LoadLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
}
if (result != SUCCESS) {
LOG_ERROR("Failed to load debug data: %s\n", m_dataLoader.GetLastError().c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("调试数据加载失败: " + m_dataLoader.GetLastError());
}
return result;
}
LOG_INFO("Successfully loaded %d lines of debug data\n", lineNum);
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("成功加载 %1 行调试数据").arg(lineNum).toStdString());
}
// 等待检测完成
result = _DetectTask();
return result;
}
// 为所有相机设置状态回调
void GrabBagPresenter::SetCameraStatusCallback(VzNL_OnNotifyStatusCBEx fNotify, void* param)
{
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second;
if (pDevice) {
pDevice->SetStatusCallback(fNotify, param);
LOG_DEBUG("Status callback set for camera %zu\n", i + 1);
}
}
}
// 打开相机
int GrabBagPresenter::_OpenDevice(int cameraIndex, const char* cameraName, const char* cameraIp, ProjectType& projectType)
{
IVrEyeDevice* pDevice = nullptr;
IVrEyeDevice::CreateObject(&pDevice);
int nRet = pDevice->InitDevice();
if(nRet != SUCCESS){
delete pDevice;
LOG_ERROR("InitDevice failed, error code: %d\n", nRet);
ERR_CODE_RETURN(nRet);
}
// 先设置状态回调
nRet = pDevice->SetStatusCallback(&GrabBagPresenter::_StaticCameraNotify, this);
if(nRet != SUCCESS){
delete pDevice;
LOG_ERROR("SetStatusCallback failed, error code: %d\n", nRet);
}
ERR_CODE_RETURN(nRet);
// 尝试打开相机1
nRet = pDevice->OpenDevice(cameraIp, ProjectType::DirectBag == projectType);
// 通过回调更新相机1状态
bool cameraConnected = (SUCCESS == nRet);
if(!cameraConnected){
delete pDevice; // 释放失败的设备
pDevice = nullptr;
}
int arrIdx = cameraIndex - 1;
if(m_vrEyeDeviceList.size() > arrIdx){
m_vrEyeDeviceList[arrIdx] = std::make_pair(cameraName, pDevice); // 直接存储到索引0
}
m_pStatus->OnCamera1StatusChanged(cameraConnected);
// m_pStatus->OnStatusUpdate(cameraConnected ? "相机连接成功" : "相机连接失败");
m_bCameraConnected = cameraConnected;
return nRet;
}
// 判断是否可以开始检测
bool GrabBagPresenter::_SinglePreDetection(int cameraIndex)
{
if(m_vrEyeDeviceList.empty()){
LOG_ERROR("No camera device found\n");
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate("未找到相机设备");
}
return false;
}
if(cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())){
LOG_ERROR("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
return false;
}
if(m_vrEyeDeviceList[cameraIndex - 1].second == nullptr){
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
return false;
}
return true;
}
int GrabBagPresenter::_SingleDetection(int cameraIndex, bool isStart)
{
int nRet = SUCCESS;
if (isStart) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
QString message = QString("收到信号,启动%1检测").arg(cameraName);
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate(message.toStdString());
}
nRet = StartDetection(cameraIndex);
} else {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
QString message = QString("收到信号,停止%1检测").arg(cameraName);
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate(message.toStdString());
}
nRet = StopDetection();
}
return nRet;
}
// 静态回调函数实现
void GrabBagPresenter::_StaticCameraNotify(EVzDeviceWorkStatus eStatus, void* pExtData, unsigned int nDataLength, void* pInfoParam)
{
// 从pInfoParam获取this指针转换回GrabBagPresenter*类型
GrabBagPresenter* pThis = reinterpret_cast<GrabBagPresenter*>(pInfoParam);
if (pThis)
{
// 调用实例的非静态成员函数
pThis->_CameraNotify(eStatus, pExtData, nDataLength, pInfoParam);
}
}
void GrabBagPresenter::_CameraNotify(EVzDeviceWorkStatus eStatus, void *pExtData, unsigned int nDataLength, void *pInfoParam)
{
LOG_DEBUG("[Camera Notify] received: status=%d\n", (int)eStatus);
switch (eStatus) {
case EVzDeviceWorkStatus::keDeviceWorkStatus_Offline:
{
LOG_WARNING("[Camera Notify] Camera device offline/disconnected\n");
// 更新相机连接状态
m_bCameraConnected = false;
// 通知UI相机状态变更
if (m_pStatus) {
// 这里需要判断是哪个相机离线暂时更新相机1状态
// 实际应用中可能需要通过pInfoParam或其他方式区分具体哪个相机
m_pStatus->OnCamera1StatusChanged(false);
m_pStatus->OnStatusUpdate("相机设备离线");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Eye_Reconnect:
{
LOG_INFO("[Camera Notify] Camera device online/connected\n");
// 更新相机连接状态
m_bCameraConnected = true;
// 通知UI相机状态变更
if (m_pStatus) {
m_pStatus->OnCamera1StatusChanged(true);
m_pStatus->OnStatusUpdate("相机设备已连接");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Device_Swing_Finish:
{
LOG_INFO("[Camera Notify] Received scan finish signal from camera\n");
// 发送页面提示信息
if (m_pStatus) {
m_pStatus->OnStatusUpdate("相机扫描完成,开始数据处理...");
}
// 通知检测线程开始处理
m_algoDetectCondition.notify_one();
break;
}
default:
break;
}
}
// 检测数据回调函数静态版本
void GrabBagPresenter::_StaticDetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
GrabBagPresenter* pThis = reinterpret_cast<GrabBagPresenter*>(pUserData);
if (pThis) {
pThis->_DetectionCallback(eDataType, pLaserLinePoint, pUserData);
}
}
// 检测数据回调函数实例版本
void GrabBagPresenter::_DetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
if (!pLaserLinePoint) {
LOG_WARNING("[Detection Callback] pLaserLinePoint is null\n");
return;
}
if (pLaserLinePoint->nPointCount <= 0) {
LOG_WARNING("[Detection Callback] Point count is zero or negative: %d\n", pLaserLinePoint->nPointCount);
return;
}
if (!pLaserLinePoint->p3DPoint) {
LOG_WARNING("[Detection Callback] p3DPoint is null\n");
return;
}
// 直接存储SVzLaserLineData到统一缓存中
SVzLaserLineData lineData;
memset(&lineData, 0, sizeof(SVzLaserLineData));
// 根据数据类型分配和复制点云数据
if (eDataType == keResultDataType_Position) {
// 复制SVzNL3DPosition数据
if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) {
lineData.p3DPoint = new SVzNL3DPosition[pLaserLinePoint->nPointCount];
if (lineData.p3DPoint) {
memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNL3DPosition) * pLaserLinePoint->nPointCount);
}
lineData.p2DPoint = new SVzNL2DPosition[pLaserLinePoint->nPointCount];
if (lineData.p2DPoint) {
memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DPosition) * pLaserLinePoint->nPointCount);
}
}
} else if (eDataType == keResultDataType_PointXYZRGBA) {
// 复制SVzNLPointXYZRGBA数据
if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) {
lineData.p3DPoint = new SVzNLPointXYZRGBA[pLaserLinePoint->nPointCount];
if (lineData.p3DPoint) {
memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNLPointXYZRGBA) * pLaserLinePoint->nPointCount);
}
lineData.p2DPoint = new SVzNL2DLRPoint[pLaserLinePoint->nPointCount];
if (lineData.p2DPoint) {
memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DLRPoint) * pLaserLinePoint->nPointCount);
}
}
}
lineData.nPointCount = pLaserLinePoint->nPointCount;
lineData.llTimeStamp = pLaserLinePoint->llTimeStamp;
lineData.llFrameIdx = pLaserLinePoint->llFrameIdx;
lineData.nEncodeNo = pLaserLinePoint->nEncodeNo;
lineData.fSwingAngle = pLaserLinePoint->fSwingAngle;
lineData.bEndOnceScan = pLaserLinePoint->bEndOnceScan;
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
m_detectionDataCache.push_back(std::make_pair(eDataType, lineData));
}
void GrabBagPresenter::CheckAndUpdateWorkStatus()
{
if (m_bCameraConnected && (m_bRobotConnected || m_bSerialConnected)) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
}
void GrabBagPresenter::SetWorkStatus(WorkStatus status)
{
m_currentWorkStatus = status;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(m_currentWorkStatus);
}
if(WorkStatus::Error == status){
m_currentWorkStatus = WorkStatus::Ready;
}
}
void GrabBagPresenter::_AlgoDetectThread()
{
while(m_bAlgoDetectThreadRunning)
{
std::unique_lock<std::mutex> lock(m_algoDetectMutex);
m_algoDetectCondition.wait(lock, [this]() {
return m_currentWorkStatus == WorkStatus::Working || m_currentWorkStatus == WorkStatus::Detecting;
});
if(!m_bAlgoDetectThreadRunning){
break;
}
// 检查设备状态是否准备就绪
int nRet = _DetectTask();
LOG_ERROR("DetectTask result: %d\n", nRet);
if(nRet != SUCCESS){
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
LOG_DEBUG("Algo Thread end\n");
m_currentWorkStatus = WorkStatus::Ready;
}
}
int GrabBagPresenter::_DetectTask()
{
LOG_INFO("[Algo Thread] Start real detection task using algorithm\n");
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 1. 获取缓存的点云数据
if (m_detectionDataCache.empty()) {
LOG_WARNING("No cached detection data available\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("无缓存的检测数据");
}
return ERR_CODE(DEV_DATA_INVALID);
}
LOG_INFO("[Algo Thread] Detection data cache size: %zu lines\n", m_detectionDataCache.size());
// 检查ParameterManager是否有效
if (!m_pParameterManager) {
LOG_ERROR("[Algo Thread] ParameterManager is null!\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("参数管理器未初始化");
}
return ERR_CODE(DEV_ARG_INVAILD);
}
// 2. 准备算法参数 - 直接从 m_currentExecutionParams 和全局配置获取
SG_bagPositionParam detectionAlgoParam;
SSG_planeCalibPara detectionCalibParam;
CalibMatrix detectionHandEyeMatrix;
// 2.1 初始化算法参数结构
memset(&detectionAlgoParam, 0, sizeof(SG_bagPositionParam));
// 从全局配置中获取算法参数
const VrAlgorithmParams& xmlParams = m_configResult.algorithmParams;
// 滤波参数
detectionAlgoParam.filterParam.continuityTh = xmlParams.filterParam.continuityTh;
detectionAlgoParam.filterParam.outlierTh = xmlParams.filterParam.outlierTh;
// 角点参数
detectionAlgoParam.cornerParam.cornerTh = xmlParams.cornerParam.cornerTh;
detectionAlgoParam.cornerParam.scale = xmlParams.cornerParam.scale;
detectionAlgoParam.cornerParam.minEndingGap = xmlParams.cornerParam.minEndingGap;
detectionAlgoParam.cornerParam.minEndingGap_z = xmlParams.cornerParam.minEndingGap_z;
detectionAlgoParam.cornerParam.jumpCornerTh_1 = xmlParams.cornerParam.jumpCornerTh_1;
detectionAlgoParam.cornerParam.jumpCornerTh_2 = xmlParams.cornerParam.jumpCornerTh_2;
// 生长参数
detectionAlgoParam.growParam.maxLineSkipNum = xmlParams.growParam.maxLineSkipNum;
detectionAlgoParam.growParam.yDeviation_max = xmlParams.growParam.yDeviation_max;
detectionAlgoParam.growParam.maxSkipDistance = xmlParams.growParam.maxSkipDistance;
detectionAlgoParam.growParam.zDeviation_max = xmlParams.growParam.zDeviation_max;
detectionAlgoParam.growParam.minLTypeTreeLen = xmlParams.growParam.minLTypeTreeLen;
detectionAlgoParam.growParam.minVTypeTreeLen = xmlParams.growParam.minVTypeTreeLen;
// 支持旋转
detectionAlgoParam.supportRotate = xmlParams.supportRotate;
// 使用 m_currentExecutionParams 中的包裹参数
detectionAlgoParam.bagParam.bagL = m_currentExecutionParams.bagParam.bagL;
detectionAlgoParam.bagParam.bagW = m_currentExecutionParams.bagParam.bagW;
detectionAlgoParam.bagParam.bagH = m_currentExecutionParams.bagParam.bagH;
// 2.2 直接使用 m_currentExecutionParams 中的调平参数和手眼标定参数
detectionCalibParam = m_currentExecutionParams.planeCalibParam;
detectionHandEyeMatrix = m_currentExecutionParams.handEyeCalibMatrix;
// 3. 准备检测
unsigned int lineNum = 0;
lineNum = m_detectionDataCache.size();
if(m_pStatus){
m_pStatus->OnStatusUpdate("扫描线数:" + std::to_string(lineNum) + ",正在算法检测...");
}
CVrTimeUtils oTimeUtils;
DetectionResult detectionResult;
LOG_INFO("[Algo Thread] About to call DetectBag with %zu lines\n", m_detectionDataCache.size());
// 检查DetectPresenter是否有效
if (!m_pDetectPresenter) {
LOG_ERROR("[Algo Thread] DetectPresenter is null!\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("检测器未初始化");
}
return ERR_CODE(DEV_ARG_INVAILD);
}
// 4. 执行检测 - 使用确定的参数
int nRet = SUCCESS;
try {
LOG_INFO("[Algo Thread] Calling DetectBag...\n");
nRet = m_pDetectPresenter->DetectBag(m_currentCameraIndex, m_detectionDataCache,
m_projectType, detectionAlgoParam, detectionCalibParam,
m_pParameterManager->GetDebugParam(),
m_dataLoader, detectionHandEyeMatrix.clibMatrix,
m_pParameterManager->GetHsvCmpParam(),
m_pParameterManager->GetRgbColorPattern(),
m_pParameterManager->GetFrontColorTemplate(),
m_pParameterManager->GetBackColorTemplate(), detectionResult);
LOG_INFO("[Algo Thread] DetectBag returned: %d\n", nRet);
} catch (const std::exception& e) {
LOG_ERROR("[Algo Thread] Exception in DetectBag: %s\n", e.what());
if (m_pStatus) {
m_pStatus->OnStatusUpdate(std::string("检测算法异常: ") + e.what());
}
return ERR_CODE(DEV_DATA_INVALID);
} catch (...) {
LOG_ERROR("[Algo Thread] Unknown exception in DetectBag\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("检测算法未知异常");
}
return ERR_CODE(DEV_DATA_INVALID);
}
// 根据项目类型选择处理方式
if (m_pStatus) {
QString err = QString("错误:%1").arg(nRet);
m_pStatus->OnStatusUpdate(QString("检测%1").arg(SUCCESS == nRet ? "成功": err).toStdString());
}
ERR_CODE_RETURN(nRet);
LOG_INFO("[Algo Thread] algo detected %zu objects time : %.2f ms\n", detectionResult.positions.size(), oTimeUtils.GetElapsedTimeInMilliSec());
// 8. 返回检测结果
detectionResult.cameraIndex = m_currentCameraIndex;
// 调用检测结果回调函数
m_pStatus->OnDetectionResult(detectionResult);
// 更新状态
QString statusMsg = QString("检测完成,发现%1个目标").arg(detectionResult.positions.size());
m_pStatus->OnStatusUpdate(statusMsg.toStdString());
// 更新机械臂协议状态(发送转换后的目标位置数据)
_SendDetectionResultToRobot(detectionResult, m_currentCameraIndex);
// 9. 检测完成后,将工作状态更新为"完成"
if (m_pStatus) {
m_currentWorkStatus = WorkStatus::Completed;
m_pStatus->OnWorkStatusChanged(WorkStatus::Completed);
}
// 设置机械臂工作状态为相应相机工作完成相机ID从1开始
if (m_pRobotProtocol) {
uint16_t workStatus = (m_currentCameraIndex == 1) ?
RobotProtocol::WORK_STATUS_CAMERA1_DONE : RobotProtocol::WORK_STATUS_CAMERA2_DONE;
m_pRobotProtocol->SetWorkStatus(workStatus);
}
// 恢复到就绪状态
m_currentWorkStatus = WorkStatus::Ready;
return SUCCESS;
}
// 释放缓存的检测数据
void GrabBagPresenter::_ClearDetectionDataCache()
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 释放加载的数据
m_dataLoader.FreeLaserScanData(m_detectionDataCache);
LOG_DEBUG("Detection data cache cleared successfully\n");
}
// 发送检测结果给机械臂
void GrabBagPresenter::_SendDetectionResultToRobot(const DetectionResult& detectionResult, int cameraIndex)
{
LOG_INFO("Sending detection result for camera %d\n", cameraIndex);
// 准备多目标数据结构
MultiTargetData multiTargetData;
// 检查是否有检测结果
if (detectionResult.positions.empty()) {
LOG_INFO("No objects detected, sending empty result\n");
// 发送空的多目标数据
multiTargetData.count = 0;
multiTargetData.targets.clear();
} else {
// 获取检测到的目标位置(已经是机械臂坐标系)
const auto& positions = detectionResult.positions;
multiTargetData.count = static_cast<uint16_t>(positions.size());
// 直接使用已经转换好的机械臂坐标
for (size_t i = 0; i < positions.size(); i++) {
const GrabBagPosition& pos = positions[i];
// 直接使用已转换的坐标数据
TargetPosition robotTarget;
robotTarget.x = pos.x; // 已转换的X轴坐标
robotTarget.y = pos.y; // 已转换的Y轴坐标
robotTarget.z = pos.z; // 已转换的Z轴坐标
robotTarget.rz = pos.yaw; // Yaw角
// 添加到多目标数据
multiTargetData.targets.push_back(robotTarget);
}
}
// 发送到机械臂网络ModbusTCP
bool robotSent = false;
if (m_pRobotProtocol && (m_bRobotConnected || m_bSerialConnected)) {
int result = m_pRobotProtocol->SetMultiTargetData(multiTargetData, cameraIndex);
robotSent = (result == SUCCESS);
LOG_INFO("[Robot TCP] SetMultiTargetData result: %d for camera ID: %d\n", result, cameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
if (result != SUCCESS) {
m_pStatus->OnStatusUpdate(QString("写坐标给机械臂失败,%1").arg(cameraName).toStdString());
} else {
m_pStatus->OnStatusUpdate(QString("写坐标给机械臂成功,%1").arg(cameraName).toStdString());
}
}
} else {
LOG_WARNING("Robot TCP protocol not available\n");
}
// 发送到串口
bool serialSent = false;
if (m_pSerialProtocol && m_bSerialConnected) {
int result = m_pSerialProtocol->SendMultiTargetData(multiTargetData, cameraIndex);
serialSent = (result == SUCCESS);
LOG_INFO("[Serial] SendMultiTargetData result: %d for camera ID: %d\n", result, cameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
if (result != SUCCESS) {
m_pStatus->OnStatusUpdate(QString("写坐标给串口失败,%1").arg(cameraName).toStdString());
} else {
m_pStatus->OnStatusUpdate(QString("写坐标给串口成功,%1").arg(cameraName).toStdString());
}
}
} else {
LOG_WARNING("Serial protocol not available\n");
}
// 发送给TCP服务端
bool tcpipSent = false;
if(m_pTcpServer){
std::string result = "";
result.append("@,1,"); // 协议头
if(!multiTargetData.targets.empty()){
// 有包裹时:@,1,X,Y,Z,U,1,$
result.append(std::to_string(multiTargetData.targets[0].x));
result.append(",");
result.append(std::to_string(multiTargetData.targets[0].y));
result.append(",");
result.append(std::to_string(multiTargetData.targets[0].z));
result.append(",");
result.append(std::to_string(multiTargetData.targets[0].rz));
result.append(",1,$"); // 有包裹标志为1协议尾
tcpipSent = true;
}else {
// 无包裹时:@,1,0,0,0,0,0,$
result.append("0,0,0,0,0,$"); // 协议尾
tcpipSent = true;
}
m_pTcpServer->SendAllData(result.c_str(), result.length());
}
// 总结发送状态
if (robotSent || serialSent) {
LOG_INFO("Detection result sent successfully (Robot TCP: %s, Serial: %s)\n",
robotSent ? "Yes" : "No", serialSent ? "Yes" : "No");
} else {
LOG_WARNING("Failed to send detection result via any protocol\n");
}
}
// 实现配置改变通知接口
void GrabBagPresenter::OnConfigChanged(const ConfigResult& configResult)
{
LOG_INFO("Configuration changed notification received, reloading algorithm parameters\n");
// 重新初始化算法参数
int result = InitAlgorithmParams();
if (result == SUCCESS) {
LOG_INFO("Algorithm parameters reloaded successfully after config change\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置已更新,算法参数重新加载成功");
}
} else {
LOG_ERROR("Failed to reload algorithm parameters after config change, error: %d\n", result);
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置更新后算法参数重新加载失败");
}
}
}
// 根据相机索引获取调平参数
SSG_planeCalibPara GrabBagPresenter::_GetCameraCalibParam(int cameraIndex)
{
return m_pParameterManager->GetCameraCalibParam(cameraIndex);
}
// 实现IConfigChangeListener接口
void GrabBagPresenter::OnSystemConfigChanged(const SystemConfig& config)
{
LOG_INFO("System configuration changed, applying new configuration\n");
// 更新内部配置状态
if (m_vrConfig) {
// 可以选择性地重新初始化算法参数
InitAlgorithmParams();
}
if (m_pStatus) {
m_pStatus->OnStatusUpdate("系统配置已更新");
}
}
void GrabBagPresenter::OnCameraParamChanged(int cameraIndex, const CameraUIParam& cameraParam)
{
LOG_INFO("Camera %d parameters changed: expose=%.2f, gain=%.2f, frameRate=%.2f\n",
cameraIndex, cameraParam.exposeTime, cameraParam.gain, cameraParam.frameRate);
// 应用相机参数到实际设备
if (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[cameraIndex - 1].second;
if (device) {
// 设置曝光时间
if (cameraParam.exposeTime > 0) {
unsigned int exposeTime = static_cast<unsigned int>(cameraParam.exposeTime);
int ret = device->SetEyeExpose(exposeTime);
if (ret == SUCCESS) {
LOG_INFO("Set expose time %.2f for camera %d successfully\n", cameraParam.exposeTime, cameraIndex);
} else {
LOG_ERROR("Failed to set expose time for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置增益
if (cameraParam.gain > 0) {
unsigned int gain = static_cast<unsigned int>(cameraParam.gain);
int ret = device->SetEyeGain(gain);
if (ret == SUCCESS) {
LOG_INFO("Set gain %.2f for camera %d successfully\n", cameraParam.gain, cameraIndex);
} else {
LOG_ERROR("Failed to set gain for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置帧率
if (cameraParam.frameRate > 0) {
int frameRate = static_cast<int>(cameraParam.frameRate);
int ret = device->SetFrame(frameRate);
if (ret == SUCCESS) {
LOG_INFO("Set frame rate %.2f for camera %d successfully\n", cameraParam.frameRate, cameraIndex);
} else {
LOG_ERROR("Failed to set frame rate for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置摆动速度
if (cameraParam.swingSpeed > 0) {
float swingSpeed = static_cast<float>(cameraParam.swingSpeed);
int ret = device->SetSwingSpeed(swingSpeed);
if (ret == SUCCESS) {
LOG_INFO("Set swing speed %.2f for camera %d successfully\n", cameraParam.swingSpeed, cameraIndex);
} else {
LOG_ERROR("Failed to set swing speed for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置摆动角度
if (cameraParam.swingStartAngle != cameraParam.swingStopAngle) {
float startAngle = static_cast<float>(cameraParam.swingStartAngle);
float stopAngle = static_cast<float>(cameraParam.swingStopAngle);
int ret = device->SetSwingAngle(startAngle, stopAngle);
if (ret == SUCCESS) {
LOG_INFO("Set swing angle %.2f-%.2f for camera %d successfully\n",
cameraParam.swingStartAngle, cameraParam.swingStopAngle, cameraIndex);
} else {
LOG_ERROR("Failed to set swing angle for camera %d, error: %d\n", cameraIndex, ret);
}
}
}
}
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("%1参数已更新").arg(cameraName).toStdString());
}
}
void GrabBagPresenter::OnAlgorithmParamChanged(const VrAlgorithmParams& algorithmParams)
{
LOG_INFO("Algorithm parameters changed, updating internal configuration\n");
// 更新算法参数
// 这里可以重新初始化算法参数或直接更新内部状态
InitAlgorithmParams();
if (m_pStatus) {
m_pStatus->OnStatusUpdate("算法参数已更新");
}
}
void GrabBagPresenter::OnSerialConnectionChanged(bool connected)
{
if (connected) {
LOG_INFO("Serial connection established\n");
m_bSerialConnected = true;
} else {
LOG_INFO("Serial connection lost\n");
m_bSerialConnected = false;
}
// 更新工作状态
CheckAndUpdateWorkStatus();
}
bool GrabBagPresenter::OnSerialWorkSignal(bool startWork, int cameraIndex)
{
LOG_INFO("Serial work signal received: %s, camera index: %d\n", startWork ? "start" : "stop", cameraIndex);
if(!_SinglePreDetection(cameraIndex)){
return false;
}
int nRet = _SingleDetection(cameraIndex, startWork);
if(nRet != SUCCESS){
m_currentWorkStatus = WorkStatus::Ready;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
}
return nRet == SUCCESS;
}
// 设置默认相机索引
void GrabBagPresenter::SetDefaultCameraIndex(int cameraIndex)
{
LOG_INFO("Setting default camera index from %d to %d\n", m_currentCameraIndex, cameraIndex);
// 验证相机索引的有效性cameraIndex是配置中的索引从1开始
if (cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())) {
LOG_WARNING("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("无效的相机索引: %1有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
}
return;
}
// 更新默认相机索引
m_currentCameraIndex = cameraIndex;
LOG_INFO("Default camera index updated to %d\n", m_currentCameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("设置%1为默认相机").arg(cameraName).toStdString());
}
}
// 保存检测数据到文件(默认实现)
int GrabBagPresenter::SaveDetectionDataToFile(const std::string& filePath)
{
LOG_INFO("Saving detection data to file: %s\n", filePath.c_str());
if (m_detectionDataCache.empty()) {
LOG_WARNING("No detection data available for saving\n");
return ERR_CODE(DEV_DATA_INVALID);
}
// 保存数据到文件
int lineNum = static_cast<int>(m_detectionDataCache.size());
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = m_dataLoader.SaveLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
if (result == SUCCESS) {
LOG_INFO("Successfully saved %d lines of detection data to file: %s\n", lineNum, filePath.c_str());
} else {
LOG_ERROR("Failed to save detection data, error: %s\n", m_dataLoader.GetLastError().c_str());
}
return result;
}
// 相机重连定时器相关方法实现
void GrabBagPresenter::_StartCameraReconnectTimer()
{
// 使用QMetaObject::invokeMethod确保在正确的线程中操作定时器
QMetaObject::invokeMethod(this, [this]() {
// 如果定时器已经存在,先停止
if (m_pCameraReconnectTimer && m_pCameraReconnectTimer->isActive()) {
m_pCameraReconnectTimer->stop();
}
// 启动定时器
if (m_pCameraReconnectTimer) {
m_pCameraReconnectTimer->start();
LOG_INFO("Camera reconnect timer started with 2 second interval\n");
}
}, Qt::QueuedConnection);
}
void GrabBagPresenter::_StopCameraReconnectTimer()
{
// 使用QMetaObject::invokeMethod确保在正确的线程中操作定时器
QMetaObject::invokeMethod(this, [this]() {
if (m_pCameraReconnectTimer && m_pCameraReconnectTimer->isActive()) {
m_pCameraReconnectTimer->stop();
LOG_INFO("Camera reconnect timer stopped\n");
}
}, Qt::QueuedConnection);
}
void GrabBagPresenter::_OnCameraReconnectTimer()
{
// 尝试重连相机
if (_TryReconnectCameras()) {
LOG_INFO("All cameras reconnected successfully, stopping timer\n");
m_pStatus->OnStatusUpdate("所有相机重连成功");
_StopCameraReconnectTimer();
}
}
bool GrabBagPresenter::_TryReconnectCameras()
{
bool allConnected = true;
int connectedCount = 0;
// 遍历所有配置的相机,尝试重连失败的相机
for (int i = 0; i < static_cast<int>(m_cameraConfigList.size()); i++) {
// 检查该位置的相机是否已连接
if (i < static_cast<int>(m_vrEyeDeviceList.size()) && m_vrEyeDeviceList[i].second != nullptr) {
// 相机已连接,跳过
connectedCount++;
continue;
}
// 尝试重连相机
int cameraIndex = i + 1; // 相机索引从1开始
const DeviceInfo& cameraInfo = m_cameraConfigList[i];
LOG_DEBUG("Attempting to reconnect camera %d (%s, %s)\n", cameraIndex, cameraInfo.name.c_str(), cameraInfo.ip.c_str());
int nRet = _OpenDevice(cameraIndex, cameraInfo.name.c_str(), cameraInfo.ip.c_str(), m_projectType);
if (nRet == SUCCESS) {
LOG_INFO("Camera %d (%s) reconnected successfully\n", cameraIndex, cameraInfo.name.c_str());
connectedCount++;
// 更新UI状态
if (cameraIndex == 1) {
m_pStatus->OnCamera1StatusChanged(true);
} else if (cameraIndex == 2) {
m_pStatus->OnCamera2StatusChanged(true);
}
m_pStatus->OnStatusUpdate(QString("相机%1重连成功").arg(cameraInfo.name.c_str()).toStdString());
} else {
LOG_DEBUG("Camera %d (%s) reconnection failed with error: %d\n", cameraIndex, cameraInfo.name.c_str(), nRet);
allConnected = false;
}
}
// 更新相机连接状态
m_bCameraConnected = (connectedCount > 0);
// 更新默认相机索引为第一个连接的相机
for (int i = 0; i < static_cast<int>(m_vrEyeDeviceList.size()); i++) {
if (m_vrEyeDeviceList[i].second != nullptr) {
m_currentCameraIndex = i + 1;
break;
}
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
LOG_DEBUG("Camera reconnection attempt completed: %d/%d cameras connected\n", connectedCount, m_expectedCameraCount);
return (connectedCount == m_expectedCameraCount && allConnected);
}
// 工作点和包裹类型管理
void GrabBagPresenter::_LoadCurrentWorkPositionAndPackageType()
{
LOG_INFO("Loading current work position, camera, and package type configuration\n");
// 1. 从配置中获取第一个工作点
WorkPositionConfig* defaultWorkPosition = nullptr;
if (!m_configResult.workPositions.empty()) {
defaultWorkPosition = &m_configResult.workPositions[0];
LOG_INFO("Using first work position: %s\n", defaultWorkPosition->name.c_str());
} else {
LOG_ERROR("No work positions configured!\n");
return;
}
// 2. 从工作点中获取默认相机
CameraConfig* defaultCamera = defaultWorkPosition->GetDefaultCamera();
if (defaultCamera) {
LOG_INFO("Using default camera: %s (index: %d)\n",
defaultCamera->cameraName.c_str(), defaultCamera->cameraIndex);
} else if (!defaultWorkPosition->cameras.empty()) {
// 如果没有默认配置,使用第一个相机
defaultCamera = &defaultWorkPosition->cameras[0];
LOG_WARNING("No default camera configured for work position %s, using first camera: %s\n",
defaultWorkPosition->name.c_str(), defaultCamera->cameraName.c_str());
} else {
LOG_ERROR("No cameras configured for work position %s!\n", defaultWorkPosition->name.c_str());
return;
}
// 3. 从相机中获取默认包裹
PackageTypeConfig* defaultPackage = defaultCamera->GetDefaultPackage();
if (defaultPackage) {
LOG_INFO("Using default package: %s\n", defaultPackage->name.c_str());
} else if (!defaultCamera->packages.empty()) {
// 如果没有默认配置,使用第一个包裹
defaultPackage = &defaultCamera->packages[0];
LOG_WARNING("No default package configured for camera %s, using first package: %s\n",
defaultCamera->cameraName.c_str(), defaultPackage->name.c_str());
} else {
LOG_ERROR("No packages configured for camera %s!\n", defaultCamera->cameraName.c_str());
return;
}
// 4. 更新当前执行参数(使用找到的默认配置)
_UpdateCurrentExecutionParams(defaultWorkPosition, defaultCamera, defaultPackage, defaultCamera->cameraIndex);
}
int GrabBagPresenter::_ApplyCameraParameters()
{
LOG_INFO("Applying camera parameters for camera %d\n", m_currentExecutionParams.cameraIndex);
int cameraIndex = m_currentExecutionParams.cameraIndex;
// 检查相机索引有效性
if (cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())) {
LOG_ERROR("Invalid camera index: %d\n", cameraIndex);
return ERR_CODE(DEV_ARG_INVAILD);
}
// 获取相机设备
IVrEyeDevice* device = m_vrEyeDeviceList[cameraIndex - 1].second;
if (!device) {
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
return ERR_CODE(DEV_NOT_FIND);
}
const VrCameraParams& cameraParam = m_currentExecutionParams.cameraParam;
// 设置曝光时间
if (cameraParam.exposure > 0) {
unsigned int exposeTime = static_cast<unsigned int>(cameraParam.exposure);
int ret = device->SetEyeExpose(exposeTime);
if (ret == SUCCESS) {
LOG_INFO("Set expose time %.2f for camera %d successfully\n", cameraParam.exposure, cameraIndex);
} else {
LOG_ERROR("Failed to set expose time for camera %d, error: %d\n", cameraIndex, ret);
return ret;
}
}
// 设置增益
if (cameraParam.gain > 0) {
unsigned int gain = static_cast<unsigned int>(cameraParam.gain);
int ret = device->SetEyeGain(gain);
if (ret == SUCCESS) {
LOG_INFO("Set gain %.2f for camera %d successfully\n", cameraParam.gain, cameraIndex);
} else {
LOG_ERROR("Failed to set gain for camera %d, error: %d\n", cameraIndex, ret);
return ret;
}
}
// 设置帧率
if (cameraParam.frameRate > 0) {
int frameRate = static_cast<int>(cameraParam.frameRate);
int ret = device->SetFrame(frameRate);
if (ret == SUCCESS) {
LOG_INFO("Set frame rate %.2f for camera %d successfully\n", cameraParam.frameRate, cameraIndex);
} else {
LOG_ERROR("Failed to set frame rate for camera %d, error: %d\n", cameraIndex, ret);
return ret;
}
}
// 设置摆动速度
if (cameraParam.swingSpeed > 0) {
float swingSpeed = static_cast<float>(cameraParam.swingSpeed);
int ret = device->SetSwingSpeed(swingSpeed);
if (ret == SUCCESS) {
LOG_INFO("Set swing speed %.2f for camera %d successfully\n", cameraParam.swingSpeed, cameraIndex);
} else {
LOG_ERROR("Failed to set swing speed for camera %d, error: %d\n", cameraIndex, ret);
return ret;
}
}
// 设置摆动角度
if (cameraParam.swingStartAngle != cameraParam.swingStopAngle) {
float startAngle = static_cast<float>(cameraParam.swingStartAngle);
float stopAngle = static_cast<float>(cameraParam.swingStopAngle);
int ret = device->SetSwingAngle(startAngle, stopAngle);
LOG_INFO("Set swing angle %.2f-%.2f for camera %d ret : %d\n", cameraParam.swingStartAngle, cameraParam.swingStopAngle, cameraIndex, ret);
}
LOG_INFO("Camera parameters applied successfully for camera %d\n", cameraIndex);
return SUCCESS;
}
void GrabBagPresenter::_UpdateCurrentExecutionParams()
{
LOG_INFO("Updating current execution parameters using m_currentCameraIndex: %d\n", m_currentCameraIndex);
// 1. 设置相机序号(使用当前默认相机)
m_currentExecutionParams.cameraIndex = m_currentCameraIndex;
// 2. 初始化调平参数为单位矩阵(默认值)
double identityMatrix9[9] = {1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < 9; i++) {
m_currentExecutionParams.planeCalibParam.planeCalib[i] = identityMatrix9[i];
m_currentExecutionParams.planeCalibParam.invRMatrix[i] = identityMatrix9[i];
}
m_currentExecutionParams.planeCalibParam.planeHeight = -1.0;
// 3. 初始化手眼标定矩阵为单位矩阵(默认值)
double identityMatrix16[16] = {
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
};
for (int i = 0; i < 16; i++) {
m_currentExecutionParams.handEyeCalibMatrix.clibMatrix[i] = identityMatrix16[i];
}
// 4. 通过 m_currentCameraIndex 查找配置
const WorkPositionConfig* currentWorkPosition = nullptr;
const CameraConfig* currentCamera = nullptr;
const PackageTypeConfig* currentPackage = nullptr;
// 遍历所有工作点、相机,找到匹配 cameraIndex 的相机配置
for (const auto& wp : m_configResult.workPositions) {
for (const auto& cam : wp.cameras) {
if (cam.cameraIndex == m_currentCameraIndex) {
currentWorkPosition = &wp;
currentCamera = &cam;
// 获取该相机的默认包裹或第一个包裹
if (!cam.packages.empty()) {
currentPackage = cam.GetDefaultPackage();
if (!currentPackage) {
currentPackage = &cam.packages[0];
}
}
break;
}
}
if (currentCamera) break;
}
if (!currentWorkPosition || !currentCamera || !currentPackage) {
LOG_WARNING("Failed to find configuration for camera index %d, using default parameters\n", m_currentCameraIndex);
return;
}
// 5. 设置相机参数(从包裹配置中获取)
const VrCameraParams* cameraParam = currentPackage->GetCameraParam(m_currentCameraIndex);
if (cameraParam) {
m_currentExecutionParams.cameraParam = *cameraParam;
LOG_INFO("Using camera parameters from package %s for camera %d: exposure=%.2f, gain=%.2f, frameRate=%.2f\n",
currentPackage->name.c_str(), m_currentCameraIndex,
cameraParam->exposure, cameraParam->gain, cameraParam->frameRate);
} else {
LOG_WARNING("No camera parameters found for camera %d in package %s, using defaults\n",
m_currentCameraIndex, currentPackage->name.c_str());
// 使用默认参数
m_currentExecutionParams.cameraParam.cameraIndex = m_currentCameraIndex;
m_currentExecutionParams.cameraParam.exposure = 100.0;
m_currentExecutionParams.cameraParam.gain = 1.0;
m_currentExecutionParams.cameraParam.frameRate = 30.0;
m_currentExecutionParams.cameraParam.swingSpeed = 10.0;
m_currentExecutionParams.cameraParam.swingStartAngle = 0.0;
m_currentExecutionParams.cameraParam.swingStopAngle = 180.0;
}
// 5.1 设置包裹参数(从包裹配置中获取)
m_currentExecutionParams.bagParam = currentPackage->bagParam;
LOG_INFO("Using bag parameters: L=%.1f, W=%.1f, H=%.1f\n",
currentPackage->bagParam.bagL, currentPackage->bagParam.bagW, currentPackage->bagParam.bagH);
// 6. 设置调平参数(从相机配置中获取)
if (currentCamera->planeCalibParam.isCalibrated &&
currentCamera->planeCalibParam.cameraIndex == m_currentCameraIndex) {
// 使用实际标定矩阵
for (int i = 0; i < 9; i++) {
m_currentExecutionParams.planeCalibParam.planeCalib[i] = currentCamera->planeCalibParam.planeCalib[i];
m_currentExecutionParams.planeCalibParam.invRMatrix[i] = currentCamera->planeCalibParam.invRMatrix[i];
}
m_currentExecutionParams.planeCalibParam.planeHeight = currentCamera->planeCalibParam.planeHeight;
LOG_INFO("Using calibrated plane parameters from camera %s (height=%.3f)\n",
currentCamera->cameraName.c_str(), currentCamera->planeCalibParam.planeHeight);
} else {
LOG_INFO("Camera %s is not calibrated, using identity matrix for plane calibration\n",
currentCamera->cameraName.c_str());
}
// 7. 设置手眼标定参数(从相机配置中获取)
if (currentCamera->handEyeCalibParam.isCalibrated &&
currentCamera->handEyeCalibParam.cameraIndex == m_currentCameraIndex) {
// 使用实际标定矩阵
for (int i = 0; i < 16; i++) {
m_currentExecutionParams.handEyeCalibMatrix.clibMatrix[i] = currentCamera->handEyeCalibParam.transformMatrix[i];
}
LOG_INFO("Using calibrated hand-eye matrix from camera %s\n", currentCamera->cameraName.c_str());
} else {
LOG_INFO("Camera %s hand-eye is not calibrated, using identity matrix\n",
currentCamera->cameraName.c_str());
}
LOG_INFO("Current execution parameters updated successfully\n");
LOG_INFO(" Work Position: %s\n", currentWorkPosition->name.c_str());
LOG_INFO(" Camera: %s (Index: %d)\n", currentCamera->cameraName.c_str(), m_currentCameraIndex);
LOG_INFO(" Package: %s\n", currentPackage->name.c_str());
// 更新 ParameterManager 的当前执行参数
m_pParameterManager->SetCurrentExecutionParams(
m_currentExecutionParams.planeCalibParam,
m_currentExecutionParams.handEyeCalibMatrix
);
}
// 重载版本:直接使用配置指针更新执行参数
void GrabBagPresenter::_UpdateCurrentExecutionParams(const WorkPositionConfig* workPosition, const CameraConfig* camera, const PackageTypeConfig* package, int cameraIndex)
{
LOG_INFO("Updating current execution parameters with provided configuration\n");
if (!workPosition || !camera || !package) {
LOG_ERROR("Invalid configuration pointers provided\n");
return;
}
// 1. 设置相机序号
m_currentExecutionParams.cameraIndex = cameraIndex;
// 2. 初始化调平参数为单位矩阵(默认值)
double identityMatrix9[9] = {1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < 9; i++) {
m_currentExecutionParams.planeCalibParam.planeCalib[i] = identityMatrix9[i];
m_currentExecutionParams.planeCalibParam.invRMatrix[i] = identityMatrix9[i];
}
m_currentExecutionParams.planeCalibParam.planeHeight = -1.0;
// 3. 初始化手眼标定矩阵为单位矩阵(默认值)
double identityMatrix16[16] = {
1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0
};
for (int i = 0; i < 16; i++) {
m_currentExecutionParams.handEyeCalibMatrix.clibMatrix[i] = identityMatrix16[i];
}
// 4. 设置相机参数(从包裹配置中获取)
const VrCameraParams* cameraParam = package->GetCameraParam(cameraIndex);
if (cameraParam) {
m_currentExecutionParams.cameraParam = *cameraParam;
LOG_INFO("Using camera parameters from package %s for camera %d: exposure=%.2f, gain=%.2f, frameRate=%.2f\n",
package->name.c_str(), cameraIndex,
cameraParam->exposure, cameraParam->gain, cameraParam->frameRate);
} else {
LOG_WARNING("No camera parameters found for camera %d in package %s, using defaults\n",
cameraIndex, package->name.c_str());
// 使用默认参数
m_currentExecutionParams.cameraParam.cameraIndex = cameraIndex;
m_currentExecutionParams.cameraParam.exposure = 100.0;
m_currentExecutionParams.cameraParam.gain = 1.0;
m_currentExecutionParams.cameraParam.frameRate = 30.0;
m_currentExecutionParams.cameraParam.swingSpeed = 10.0;
m_currentExecutionParams.cameraParam.swingStartAngle = 0.0;
m_currentExecutionParams.cameraParam.swingStopAngle = 180.0;
}
// 4.1 设置包裹参数(从包裹配置中获取)
m_currentExecutionParams.bagParam = package->bagParam;
LOG_INFO("Using bag parameters: L=%.1f, W=%.1f, H=%.1f\n",
package->bagParam.bagL, package->bagParam.bagW, package->bagParam.bagH);
// 5. 设置调平参数(从相机配置中获取)
if (camera->planeCalibParam.isCalibrated &&
camera->planeCalibParam.cameraIndex == cameraIndex) {
// 使用实际标定矩阵
for (int i = 0; i < 9; i++) {
m_currentExecutionParams.planeCalibParam.planeCalib[i] = camera->planeCalibParam.planeCalib[i];
m_currentExecutionParams.planeCalibParam.invRMatrix[i] = camera->planeCalibParam.invRMatrix[i];
}
m_currentExecutionParams.planeCalibParam.planeHeight = camera->planeCalibParam.planeHeight;
LOG_INFO("Using calibrated plane parameters from camera %s (height=%.3f)\n",
camera->cameraName.c_str(), camera->planeCalibParam.planeHeight);
} else {
LOG_INFO("Camera %s is not calibrated, using identity matrix for plane calibration\n",
camera->cameraName.c_str());
}
// 6. 设置手眼标定参数(从相机配置中获取)
if (camera->handEyeCalibParam.isCalibrated &&
camera->handEyeCalibParam.cameraIndex == cameraIndex) {
// 使用实际标定矩阵
for (int i = 0; i < 16; i++) {
m_currentExecutionParams.handEyeCalibMatrix.clibMatrix[i] = camera->handEyeCalibParam.transformMatrix[i];
}
LOG_INFO("Using calibrated hand-eye matrix from camera %s\n", camera->cameraName.c_str());
} else {
LOG_INFO("Camera %s hand-eye is not calibrated, using identity matrix\n",
camera->cameraName.c_str());
}
LOG_INFO("Current execution parameters updated successfully\n");
LOG_INFO(" Work Position: %s\n", workPosition->name.c_str());
LOG_INFO(" Camera: %s (Index: %d)\n", camera->cameraName.c_str(), cameraIndex);
LOG_INFO(" Package: %s\n", package->name.c_str());
// 更新 ParameterManager 的当前执行参数
m_pParameterManager->SetCurrentExecutionParams(
m_currentExecutionParams.planeCalibParam,
m_currentExecutionParams.handEyeCalibMatrix
);
}