32 #include <Calibration/StereoCalibration.h>
44 chromaCallback(
const crl::multisense::image::Header& header,
void* userDataP)
50 lumaCallback(
const crl::multisense::image::Header& header,
void* userDataP)
56 lidarCallback(
const crl::multisense::lidar::Header& header,
void* userDataP)
64 crl::multisense::image::Calibration
c;
65 crl::multisense::lidar::Calibration l;
66 crl::multisense::Status
status;
71 crl::multisense::Channel::Create(getProperty<std::string>(
"ipAddress").getValue());
75 throw std::runtime_error(
"Unable to connect to multisense device");
78 crl::multisense::Status
status = driver->getImageCalibration(
c);
79 if (
status != crl::multisense::Status_Ok)
81 throw std::runtime_error(
"Unable to retrieve image calibration.");
85 if (getProperty<bool>(
"useLidar").getValue())
87 driver->getLidarCalibration(l);
88 if (
status != crl::multisense::Status_Ok)
90 throw std::runtime_error(
"Unable to retrieve liadr calibration.");
97 catch (std::exception& e)
100 <<
" Try entering ' roslaunch multisense_bringup multisense.launch "
101 "mtu:=1500 ' in your console";
102 crl::multisense::Channel::Destroy(driver);
108 q_matrix = cv::Mat::zeros(4, 4, cv::DataType<double>::type);
110 const int width = 1024;
111 const int height = 544;
113 rgbImage = cv::Mat::zeros(height, width, CV_8UC3);
114 disparityImage = cv::Mat::zeros(height, width, CV_16SC1);
117 rgbImages =
new CByteImage*[2];
119 rgbImages[0] =
new CByteImage(width, height, CByteImage::eRGB24);
120 rgbImages[1] =
new CByteImage(width, height, CByteImage::eRGB24);
133 double xScale = 2048 / width;
134 double yScale = 1088 / height;
135 double Fx =
c.right.P[0][0] / xScale;
136 double Cx =
c.right.P[0][2] / xScale;
137 double Fy =
c.right.P[1][1] / yScale;
138 double Cy =
c.right.P[1][2] / yScale;
139 double Tx = (
c.right.P[0][3] / xScale) / Fx;
141 q_matrix.at<
double>(0, 0) = Fy * Tx;
142 q_matrix.at<
double>(0, 3) = -Fy * Cx * Tx;
143 q_matrix.at<
double>(1, 1) = Fx * Tx;
144 q_matrix.at<
double>(1, 3) = -Fx * Cy * Tx;
145 q_matrix.at<
double>(2, 3) = Fx * Fy * Tx;
146 q_matrix.at<
double>(3, 2) = -Fy;
149 std::string calibrationFileName =
150 getProperty<std::string>(
"CalibrationFileName").getValue();
153 CStereoCalibration ivtStereoCalibration;
154 if (ivtStereoCalibration.LoadCameraParameters(calibrationFileName.c_str(),
true))
160 ARMARX_WARNING <<
"Failed loading calibration file " << calibrationFileName;
165 ARMARX_WARNING <<
"Calibration file with file name " << calibrationFileName
168 calibration.calibrationLeft.cameraParam.width = width;
169 calibration.calibrationLeft.cameraParam.height = height;
170 calibration.calibrationLeft.cameraParam.principalPoint.push_back(Cx);
171 calibration.calibrationLeft.cameraParam.principalPoint.push_back(Cy);
172 calibration.calibrationLeft.cameraParam.focalLength.push_back(Fx);
173 calibration.calibrationLeft.cameraParam.focalLength.push_back(Fy);
182 Eigen::Vector4f minPoint = getProperty<Eigen::Vector4f>(
"minPoint").getValue();
183 Eigen::Vector4f maxPoint = getProperty<Eigen::Vector4f>(
"maxPoint").getValue();
186 cropBoxFilter.setMin(minPoint);
187 cropBoxFilter.setMax(maxPoint);
189 mask = crl::multisense::Source_Unknown;
192 if (getProperty<bool>(
"useLidar").getValue())
195 mask |= crl::multisense::Source_Lidar_Scan;
199 status = driver->addIsolatedCallback(
201 mask |= crl::multisense::Source_Disparity;
204 if (
status != crl::multisense::Status_Ok)
206 throw std::runtime_error(
"Unable to attach isolated callback");
211 crl::multisense::Source_Luma_Left |
212 crl::multisense::Source_Luma_Right,
214 mask |= crl::multisense::Source_Luma_Left | crl::multisense::Source_Luma_Right;
216 if (
status != crl::multisense::Status_Ok)
218 throw std::runtime_error(
"Unable to attach isolated callback");
223 driver->addIsolatedCallback(
chromaCallback, crl::multisense::Source_Chroma_Left,
this);
224 mask |= crl::multisense::Source_Chroma_Left;
226 if (
status != crl::multisense::Status_Ok)
228 throw std::runtime_error(
"Unable to attach isolated callback");
231 if (getProperty<bool>(
"enableLight").getValue())
233 crl::multisense::lighting::Config lightingConfig;
234 lightingConfig.setDutyCycle(crl::multisense::lighting::MAX_DUTY_CYCLE);
236 if (driver->setLightingConfig(lightingConfig) != crl::multisense::Status_Ok)
250 crl::multisense::Status
status;
252 if (getProperty<bool>(
"useLidar").getValue())
262 if (
status != crl::multisense::Status_Ok)
269 if (
status != crl::multisense::Status_Ok)
276 if (
status != crl::multisense::Status_Ok)
281 crl::multisense::Channel::Destroy(driver);
292 crl::multisense::Status
status;
293 crl::multisense::image::Config config;
297 status = driver->getImageConfig(config);
298 if (
status != crl::multisense::Status_Ok)
300 throw std::runtime_error(
"Unable read image config");
304 status = driver->setImageConfig(config);
305 if (
status != crl::multisense::Status_Ok)
307 throw std::runtime_error(
"Unable to set image config");
310 pointCloudData.reset(
new pcl::PointCloud<pcl::PointXYZRGBA>());
312 pointCloudData->width = config.width();
313 pointCloudData->height = config.height();
314 pointCloudData->resize(config.width() * config.height());
316 status = driver->startStreams(mask);
318 if (
status != crl::multisense::Status_Ok)
320 throw std::runtime_error(
"Unable to start image streams");
323 catch (std::exception& e)
326 crl::multisense::Channel::Destroy(driver);
329 hasNewColorData =
false;
330 hasNewLumaData =
false;
331 hasNewDisparityData =
false;
337 crl::multisense::Status
status = driver->stopStreams(mask);
339 if (
status != crl::multisense::Status_Ok)
342 << crl::multisense::Channel::statusString(
status);
343 crl::multisense::Channel::Destroy(driver);
350 if (hasNewDisparityData && hasNewColorData)
352 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pointCloudTemp(
353 new pcl::PointCloud<pcl::PointXYZRGBA>());
355 std::unique_lock lock(dataMutex);
357 cropBoxFilter.setInputCloud(pointCloudData);
358 cropBoxFilter.filter(*pointCloudTemp);
361 hasNewColorData =
false;
362 hasNewDisparityData =
false;
366 std::vector<int>
index;
367 pcl::removeNaNFromPointCloud(*pointCloudTemp, *pointCloudTemp,
index);
372 std::string outputPCDName =
"/home/peter/output_" + boost::lexical_cast<std::string>(x) +
".pcd";
373 pcl::io::savePCDFileASCII(outputPCDName.data(), *pointCloudTemp);
377 ARMARX_INFO <<
"Providing point cloud of size " << pointCloudTemp->points.size();
390 const uint32_t imageSize = header.width * header.height;
392 std::unique_lock lock(dataMutex);
394 if (header.source == crl::multisense::Source_Luma_Right)
396 uint32_t height = header.height;
397 uint32_t width = header.width;
399 const uint8_t* rightLumaData =
reinterpret_cast<const uint8_t*
>(header.imageDataP);
401 for (
size_t j = 0; j < height; j++)
403 for (
size_t i = 0; i < width; i++)
405 const uint32_t
index = (j * width) + i;
406 rgbImages[1]->pixels[3 *
index + 0] = rightLumaData[
index];
407 rgbImages[1]->pixels[3 *
index + 1] = rightLumaData[
index];
408 rgbImages[1]->pixels[3 *
index + 2] = rightLumaData[
index];
415 lumaData.resize(imageSize);
417 memcpy((
void*)&lumaData.data()[0], header.imageDataP, imageSize);
419 hasNewLumaData =
true;
426 std::unique_lock lock(dataMutex);
433 const uint8_t* chromaP =
reinterpret_cast<const uint8_t*
>(header.imageDataP);
435 uint32_t height = header.height * 2;
436 uint32_t width = header.width * 2;
438 for (
size_t j = 0; j < height; j++)
440 for (
size_t i = 0; i < width; i++)
442 const uint32_t lumaOffset = (j * width) + i;
443 const uint32_t chromaOffset = 2 * (((j / 2) * (width / 2)) + (i / 2));
445 const float px_y =
static_cast<float>(lumaData[lumaOffset]);
446 const float px_cb =
static_cast<float>(chromaP[chromaOffset + 0]) - 128.0f;
447 const float px_cr =
static_cast<float>(chromaP[chromaOffset + 1]) - 128.0f;
449 float px_r = px_y + 1.402f * px_cr;
450 float px_g = px_y - 0.34414f * px_cb - 0.71414f * px_cr;
451 float px_b = px_y + 1.772f * px_cb;
457 int index = j * width + i;
460 pcl::PointXYZRGBA& targetPoint = pointCloudData->points[
index];
462 targetPoint.r =
static_cast<uint8_t
>(px_r);
463 targetPoint.g =
static_cast<uint8_t
>(px_g);
464 targetPoint.b =
static_cast<uint8_t
>(px_b);
466 rgbImages[0]->pixels[3 *
index + 0] =
static_cast<uint8_t
>(px_r);
467 rgbImages[0]->pixels[3 *
index + 1] =
static_cast<uint8_t
>(px_g);
468 rgbImages[0]->pixels[3 *
index + 2] =
static_cast<uint8_t
>(px_b);
470 rgbImage.at<cv::Vec3b>(j, i)[0] =
static_cast<uint8_t
>(px_r);
471 rgbImage.at<cv::Vec3b>(j, i)[1] =
static_cast<uint8_t
>(px_g);
472 rgbImage.at<cv::Vec3b>(j, i)[2] =
static_cast<uint8_t
>(px_b);
476 hasNewLumaData =
false;
477 hasNewColorData =
true;
484 std::unique_lock lock(dataMutex);
486 if (!hasNewColorData)
492 for (
size_t i = 0; i < header.pointCount; i++)
494 const uint32_t range = header.rangesP[i];
495 const double percent =
496 static_cast<double>(i) /
static_cast<double>(header.pointCount - 1);
497 const double mirrorTheta = -header.scanArc / 2000.0 + percent * header.scanArc / 1000.0;
498 const double spindleTheta =
499 header.spindleAngleStart / 1000.0 +
500 percent * (header.spindleAngleEnd - header.spindleAngleStart) / 1000.0;
503 spindleToMotor.block<3, 3>(0, 0) =
504 Eigen::AngleAxisf(spindleTheta, Eigen::Vector3f::UnitZ()).matrix();
507 Eigen::Vector4f point(
508 range * std::sin(mirrorTheta), 0.0, range * std::cos(mirrorTheta), 1.0);
509 Eigen::Vector4f pointCamera = laserToSpindle * point;
510 pointCamera = spindleToMotor * pointCamera;
511 pointCamera = cameraToSpindle * pointCamera;
514 pcl::PointXYZRGBA& targetPoint = pointCloudData->points[i];
519 visionx::CameraParameters p = calibration.calibrationLeft.cameraParam;
521 int u = (p.focalLength[0] * pointCamera[0] + p.principalPoint[0] * pointCamera[2]) /
523 int v = (p.focalLength[1] * pointCamera[1] + p.principalPoint[1] * pointCamera[2]) /
526 if (u < p.width && v < p.height && u >= 0 &&
v >= 0)
528 int index =
v * p.width + u;
531 targetPoint.getArray4fMap() = pointCamera;
533 targetPoint.r =
static_cast<uint8_t
>(rgbImages[0]->pixels[3 *
index + 0]);
534 targetPoint.g =
static_cast<uint8_t
>(rgbImages[0]->pixels[3 *
index + 1]);
535 targetPoint.b =
static_cast<uint8_t
>(rgbImages[0]->pixels[3 *
index + 2]);
540 targetPoint.x = std::numeric_limits<float>::quiet_NaN();
541 targetPoint.y = std::numeric_limits<float>::quiet_NaN();
542 targetPoint.z = std::numeric_limits<float>::quiet_NaN();
548 pointCloudData->header.stamp =
549 header.timeStartMicroSeconds + header.timeStartSeconds / (1000 * 1000);
551 hasNewDisparityData =
true;
556 const crl::multisense::image::Header& header)
558 if (header.bitsPerPixel != 16)
564 cv::Mat_<uint16_t> disparityOrigP(
567 const_cast<uint16_t*
>(
reinterpret_cast<const uint16_t*
>(header.imageDataP)));
569 std::unique_lock lock(dataMutex);
571 cv::Mat_<float> disparity(header.height, header.width);
572 disparity = disparityOrigP / 16.0f;
573 disparity.copyTo(disparityImage);
575 cv::Mat_<cv::Vec3f> points(disparityImage.rows, disparityImage.cols);
577 cv::reprojectImageTo3D(disparity, points, q_matrix,
false);
579 for (
size_t j = 0; j < header.height; j++)
581 for (
size_t i = 0; i < header.width; i++)
583 int index = j * header.width + i;
585 pcl::PointXYZRGBA& targetPoint = pointCloudData->points[
index];
594 targetPoint.x = sourcePoint[0] * 1000.0;
595 targetPoint.y = sourcePoint[1] * 1000.0;
596 targetPoint.z = sourcePoint[2] * 1000.0;
600 targetPoint.x = std::numeric_limits<float>::quiet_NaN();
601 targetPoint.y = std::numeric_limits<float>::quiet_NaN();
602 targetPoint.z = std::numeric_limits<float>::quiet_NaN();
610 pointCloudData->header.frame_id = header.frameId;
611 pointCloudData->header.stamp = header.timeMicroSeconds + header.timeSeconds / (1000 * 1000);
614 hasNewDisparityData =
true;
620 setImageFormat(visionx::ImageDimension(1024, 544), visionx::eRgb);
624 visionx::StereoCalibration
632 visionx::MonocularCalibration
638 return calibration.calibrationLeft;
641 visionx::MetaPointCloudFormatPtr
644 visionx::MetaPointCloudFormatPtr info =
new visionx::MetaPointCloudFormat();
645 info->capacity = 1024 * 544 *
sizeof(visionx::ColoredPoint3D);
646 info->size = info->capacity;
647 info->type = visionx::PointContentType::eColoredPoints;