MultiSensePointCloudProvider.cpp
Go to the documentation of this file.
1 /*
2  * This file is part of ArmarX.
3  *
4  * Copyright (C) 2011-2016, High Performance Humanoid Technologies (H2T), Karlsruhe Institute of Technology (KIT), all rights reserved.
5  *
6  * ArmarX is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * ArmarX is distributed in the hope that it will be useful, but
11  * WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program. If not, see <http://www.gnu.org/licenses/>.
17  *
18  * @package VisionX::ArmarXObjects::MultiSensePointCloudProvider
19  * @author Markus Grotz ( markus dot grotz at kit dot edu )
20  * @date 2016
21  * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
22  * GNU General Public License
23  */
24 
26 
28 
30 
31 // IVT
32 #include <Calibration/StereoCalibration.h>
33 
34 namespace armarx
35 {
36 
37  void
38  disparityCallback(const crl::multisense::image::Header& header, void* userDataP)
39  {
40  reinterpret_cast<MultiSensePointCloudProvider*>(userDataP)->disparityImageCallback(header);
41  }
42 
43  void
44  chromaCallback(const crl::multisense::image::Header& header, void* userDataP)
45  {
46  reinterpret_cast<MultiSensePointCloudProvider*>(userDataP)->chromaImageCallback(header);
47  }
48 
49  void
50  lumaCallback(const crl::multisense::image::Header& header, void* userDataP)
51  {
52  reinterpret_cast<MultiSensePointCloudProvider*>(userDataP)->lumaImageCallback(header);
53  }
54 
55  void
56  lidarCallback(const crl::multisense::lidar::Header& header, void* userDataP)
57  {
58  reinterpret_cast<MultiSensePointCloudProvider*>(userDataP)->lidarScanCallback(header);
59  }
60 
61  void
63  {
64  crl::multisense::image::Calibration c;
65  crl::multisense::lidar::Calibration l;
66  crl::multisense::Status status;
67 
68  try
69  {
70  driver =
71  crl::multisense::Channel::Create(getProperty<std::string>("ipAddress").getValue());
72 
73  if (!driver)
74  {
75  throw std::runtime_error("Unable to connect to multisense device");
76  }
77 
78  crl::multisense::Status status = driver->getImageCalibration(c);
79  if (status != crl::multisense::Status_Ok)
80  {
81  throw std::runtime_error("Unable to retrieve image calibration.");
82  }
83 
84 
85  if (getProperty<bool>("useLidar").getValue())
86  {
87  driver->getLidarCalibration(l);
88  if (status != crl::multisense::Status_Ok)
89  {
90  throw std::runtime_error("Unable to retrieve liadr calibration.");
91  }
92 
93  //cameraToSpindle = Eigen::Matrix4f(l.cameraToSpindleFixed);
94  //laserToSpindl = Eigen::Matrix4f(l.laserToSpindle);
95  }
96  }
97  catch (std::exception& e)
98  {
99  ARMARX_FATAL << "Multisense error: " << e.what()
100  << " Try entering ' roslaunch multisense_bringup multisense.launch "
101  "mtu:=1500 ' in your console";
102  crl::multisense::Channel::Destroy(driver);
103  }
104 
105 
106  // see http://carnegierobotics.com/support-updates/2015/10/27/multisense-q-reprojection-matrix
107 
108  q_matrix = cv::Mat::zeros(4, 4, cv::DataType<double>::type);
109 
110  const int width = 1024;
111  const int height = 544;
112 
113  rgbImage = cv::Mat::zeros(height, width, CV_8UC3);
114  disparityImage = cv::Mat::zeros(height, width, CV_16SC1);
115 
116 
117  rgbImages = new CByteImage*[2];
118 
119  rgbImages[0] = new CByteImage(width, height, CByteImage::eRGB24);
120  rgbImages[1] = new CByteImage(width, height, CByteImage::eRGB24);
121 
122  /*
123  Q = [ FyTx, 0, 0, -FyCxTx]
124  [ 0, FxTx, 0, -FxCyTx]
125  [ 0, 0, 0, FxFyTx]
126  [ 0, 0, -Fy, 0]
127 
128  P_right = [Fx, 0, cx, FxTx]
129  [ 0, Fy, cy, 0]
130  [ 0, 0, 1, 0]
131  */
132 
133  double xScale = 2048 / width;
134  double yScale = 1088 / height;
135  double Fx = c.right.P[0][0] / xScale;
136  double Cx = c.right.P[0][2] / xScale;
137  double Fy = c.right.P[1][1] / yScale;
138  double Cy = c.right.P[1][2] / yScale;
139  double Tx = (c.right.P[0][3] / xScale) / Fx;
140 
141  q_matrix.at<double>(0, 0) = Fy * Tx;
142  q_matrix.at<double>(0, 3) = -Fy * Cx * Tx;
143  q_matrix.at<double>(1, 1) = Fx * Tx;
144  q_matrix.at<double>(1, 3) = -Fx * Cy * Tx;
145  q_matrix.at<double>(2, 3) = Fx * Fy * Tx;
146  q_matrix.at<double>(3, 2) = -Fy;
147 
148 
149  std::string calibrationFileName =
150  getProperty<std::string>("CalibrationFileName").getValue();
151  if (armarx::ArmarXDataPath::getAbsolutePath(calibrationFileName, calibrationFileName))
152  {
153  CStereoCalibration ivtStereoCalibration;
154  if (ivtStereoCalibration.LoadCameraParameters(calibrationFileName.c_str(), true))
155  {
156  calibration = visionx::tools::convert(ivtStereoCalibration);
157  }
158  else
159  {
160  ARMARX_WARNING << "Failed loading calibration file " << calibrationFileName;
161  }
162  }
163  else
164  {
165  ARMARX_WARNING << "Calibration file with file name " << calibrationFileName
166  << " not found";
167 
168  calibration.calibrationLeft.cameraParam.width = width;
169  calibration.calibrationLeft.cameraParam.height = height;
170  calibration.calibrationLeft.cameraParam.principalPoint.push_back(Cx);
171  calibration.calibrationLeft.cameraParam.principalPoint.push_back(Cy);
172  calibration.calibrationLeft.cameraParam.focalLength.push_back(Fx);
173  calibration.calibrationLeft.cameraParam.focalLength.push_back(Fy);
174  //::visionx::types::Vec distortion;
175  //::visionx::types::Mat rotation;
176  //::visionx::types::Vec translation;
177  //calibration.calibrationLeft.cameraParam.distortion;
178  //calibration.calibrationLeft.cameraParam.rotation;
179  //calibration.calibrationLeft.cameraParam.translation;
180  }
181 
182  Eigen::Vector4f minPoint = getProperty<Eigen::Vector4f>("minPoint").getValue();
183  Eigen::Vector4f maxPoint = getProperty<Eigen::Vector4f>("maxPoint").getValue();
184  ;
185 
186  cropBoxFilter.setMin(minPoint);
187  cropBoxFilter.setMax(maxPoint);
188 
189  mask = crl::multisense::Source_Unknown;
190 
191 
192  if (getProperty<bool>("useLidar").getValue())
193  {
194  status = driver->addIsolatedCallback(lidarCallback, this);
195  mask |= crl::multisense::Source_Lidar_Scan;
196  }
197  else
198  {
199  status = driver->addIsolatedCallback(
200  disparityCallback, crl::multisense::Source_Disparity, this);
201  mask |= crl::multisense::Source_Disparity;
202  }
203 
204  if (status != crl::multisense::Status_Ok)
205  {
206  throw std::runtime_error("Unable to attach isolated callback");
207  }
208 
209 
210  status = driver->addIsolatedCallback(lumaCallback,
211  crl::multisense::Source_Luma_Left |
212  crl::multisense::Source_Luma_Right,
213  this);
214  mask |= crl::multisense::Source_Luma_Left | crl::multisense::Source_Luma_Right;
215 
216  if (status != crl::multisense::Status_Ok)
217  {
218  throw std::runtime_error("Unable to attach isolated callback");
219  }
220 
221 
222  status =
223  driver->addIsolatedCallback(chromaCallback, crl::multisense::Source_Chroma_Left, this);
224  mask |= crl::multisense::Source_Chroma_Left;
225 
226  if (status != crl::multisense::Status_Ok)
227  {
228  throw std::runtime_error("Unable to attach isolated callback");
229  }
230 
231  if (getProperty<bool>("enableLight").getValue())
232  {
233  crl::multisense::lighting::Config lightingConfig;
234  lightingConfig.setDutyCycle(crl::multisense::lighting::MAX_DUTY_CYCLE);
235 
236  if (driver->setLightingConfig(lightingConfig) != crl::multisense::Status_Ok)
237  {
238  ARMARX_WARNING << "Failed to switch on LED light";
239  }
240  else
241  {
242  ARMARX_INFO << "Switched on LED light";
243  }
244  }
245  }
246 
247  void
249  {
250  crl::multisense::Status status;
251 
252  if (getProperty<bool>("useLidar").getValue())
253  {
254  status = driver->removeIsolatedCallback(lidarCallback);
255  }
256  else
257  {
258  status = driver->removeIsolatedCallback(disparityCallback);
259  }
260 
261 
262  if (status != crl::multisense::Status_Ok)
263  {
264  ARMARX_FATAL << "Unable to remove isolated callback";
265  }
266 
267  status = driver->removeIsolatedCallback(chromaCallback);
268 
269  if (status != crl::multisense::Status_Ok)
270  {
271  ARMARX_FATAL << "Unable to remove isolated callback";
272  }
273 
274  status = driver->removeIsolatedCallback(lumaCallback);
275 
276  if (status != crl::multisense::Status_Ok)
277  {
278  ARMARX_FATAL << "Unable to remove isolated callback";
279  }
280 
281  crl::multisense::Channel::Destroy(driver);
282 
283  delete rgbImages[0];
284  delete rgbImages[1];
285 
286  delete[] rgbImages;
287  }
288 
289  void
291  {
292  crl::multisense::Status status;
293  crl::multisense::image::Config config;
294 
295  try
296  {
297  status = driver->getImageConfig(config);
298  if (status != crl::multisense::Status_Ok)
299  {
300  throw std::runtime_error("Unable read image config");
301  }
302 
303  config.setFps(frameRate);
304  status = driver->setImageConfig(config);
305  if (status != crl::multisense::Status_Ok)
306  {
307  throw std::runtime_error("Unable to set image config");
308  }
309 
310  pointCloudData.reset(new pcl::PointCloud<pcl::PointXYZRGBA>());
311 
312  pointCloudData->width = config.width();
313  pointCloudData->height = config.height();
314  pointCloudData->resize(config.width() * config.height());
315 
316  status = driver->startStreams(mask);
317 
318  if (status != crl::multisense::Status_Ok)
319  {
320  throw std::runtime_error("Unable to start image streams");
321  }
322  }
323  catch (std::exception& e)
324  {
325  ARMARX_FATAL << "multisense error " << e.what();
326  crl::multisense::Channel::Destroy(driver);
327  }
328 
329  hasNewColorData = false;
330  hasNewLumaData = false;
331  hasNewDisparityData = false;
332  }
333 
334  void
336  {
337  crl::multisense::Status status = driver->stopStreams(mask);
338 
339  if (status != crl::multisense::Status_Ok)
340  {
341  ARMARX_FATAL << "Unable to stop streams. Status was "
342  << crl::multisense::Channel::statusString(status);
343  crl::multisense::Channel::Destroy(driver);
344  }
345  }
346 
347  bool
349  {
350  if (hasNewDisparityData && hasNewColorData)
351  {
352  pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pointCloudTemp(
353  new pcl::PointCloud<pcl::PointXYZRGBA>());
354 
355  std::unique_lock lock(dataMutex);
356 
357  cropBoxFilter.setInputCloud(pointCloudData);
358  cropBoxFilter.filter(*pointCloudTemp);
359 
360 
361  hasNewColorData = false;
362  hasNewDisparityData = false;
363 
364  lock.unlock();
365 
366  std::vector<int> index;
367  pcl::removeNaNFromPointCloud(*pointCloudTemp, *pointCloudTemp, index);
368 
369 #if 0
370  static int x = 0;
371  x++;
372  std::string outputPCDName = "/home/peter/output_" + boost::lexical_cast<std::string>(x) + ".pcd";
373  pcl::io::savePCDFileASCII(outputPCDName.data(), *pointCloudTemp);
374 #endif
375 
376 
377  ARMARX_INFO << "Providing point cloud of size " << pointCloudTemp->points.size();
378  providePointCloud(pointCloudTemp);
379 
380  provideImages(rgbImages);
381  return true;
382  }
383 
384  return false;
385  }
386 
387  void
388  MultiSensePointCloudProvider::lumaImageCallback(const crl::multisense::image::Header& header)
389  {
390  const uint32_t imageSize = header.width * header.height;
391 
392  std::unique_lock lock(dataMutex);
393 
394  if (header.source == crl::multisense::Source_Luma_Right)
395  {
396  uint32_t height = header.height;
397  uint32_t width = header.width;
398 
399  const uint8_t* rightLumaData = reinterpret_cast<const uint8_t*>(header.imageDataP);
400 
401  for (size_t j = 0; j < height; j++)
402  {
403  for (size_t i = 0; i < width; i++)
404  {
405  const uint32_t index = (j * width) + i;
406  rgbImages[1]->pixels[3 * index + 0] = rightLumaData[index];
407  rgbImages[1]->pixels[3 * index + 1] = rightLumaData[index];
408  rgbImages[1]->pixels[3 * index + 2] = rightLumaData[index];
409  }
410  }
411  }
412  else
413  {
414 
415  lumaData.resize(imageSize);
416 
417  memcpy((void*)&lumaData.data()[0], header.imageDataP, imageSize);
418 
419  hasNewLumaData = true;
420  }
421  }
422 
423  void
424  MultiSensePointCloudProvider::chromaImageCallback(const crl::multisense::image::Header& header)
425  {
426  std::unique_lock lock(dataMutex);
427 
428  if (!hasNewLumaData)
429  {
430  return;
431  }
432 
433  const uint8_t* chromaP = reinterpret_cast<const uint8_t*>(header.imageDataP);
434 
435  uint32_t height = header.height * 2;
436  uint32_t width = header.width * 2;
437 
438  for (size_t j = 0; j < height; j++)
439  {
440  for (size_t i = 0; i < width; i++)
441  {
442  const uint32_t lumaOffset = (j * width) + i;
443  const uint32_t chromaOffset = 2 * (((j / 2) * (width / 2)) + (i / 2));
444 
445  const float px_y = static_cast<float>(lumaData[lumaOffset]);
446  const float px_cb = static_cast<float>(chromaP[chromaOffset + 0]) - 128.0f;
447  const float px_cr = static_cast<float>(chromaP[chromaOffset + 1]) - 128.0f;
448 
449  float px_r = px_y + 1.402f * px_cr;
450  float px_g = px_y - 0.34414f * px_cb - 0.71414f * px_cr;
451  float px_b = px_y + 1.772f * px_cb;
452 
453  px_r = std::max(0.0f, std::min(255.0f, px_r));
454  px_g = std::max(0.0f, std::min(255.0f, px_g));
455  px_b = std::max(0.0f, std::min(255.0f, px_b));
456 
457  int index = j * width + i;
458 
459 
460  pcl::PointXYZRGBA& targetPoint = pointCloudData->points[index];
461 
462  targetPoint.r = static_cast<uint8_t>(px_r);
463  targetPoint.g = static_cast<uint8_t>(px_g);
464  targetPoint.b = static_cast<uint8_t>(px_b);
465 
466  rgbImages[0]->pixels[3 * index + 0] = static_cast<uint8_t>(px_r);
467  rgbImages[0]->pixels[3 * index + 1] = static_cast<uint8_t>(px_g);
468  rgbImages[0]->pixels[3 * index + 2] = static_cast<uint8_t>(px_b);
469 
470  rgbImage.at<cv::Vec3b>(j, i)[0] = static_cast<uint8_t>(px_r);
471  rgbImage.at<cv::Vec3b>(j, i)[1] = static_cast<uint8_t>(px_g);
472  rgbImage.at<cv::Vec3b>(j, i)[2] = static_cast<uint8_t>(px_b);
473  }
474  }
475 
476  hasNewLumaData = false;
477  hasNewColorData = true;
478  }
479 
480  void
481  MultiSensePointCloudProvider::lidarScanCallback(const crl::multisense::lidar::Header& header)
482  {
483 
484  std::unique_lock lock(dataMutex);
485 
486  if (!hasNewColorData)
487  {
488  return;
489  }
490 
491 
492  for (size_t i = 0; i < header.pointCount; i++)
493  {
494  const uint32_t range = header.rangesP[i];
495  const double percent =
496  static_cast<double>(i) / static_cast<double>(header.pointCount - 1);
497  const double mirrorTheta = -header.scanArc / 2000.0 + percent * header.scanArc / 1000.0;
498  const double spindleTheta =
499  header.spindleAngleStart / 1000.0 +
500  percent * (header.spindleAngleEnd - header.spindleAngleStart) / 1000.0;
501 
502  Eigen::Matrix4f spindleToMotor = Eigen::Matrix4f::Identity();
503  spindleToMotor.block<3, 3>(0, 0) =
504  Eigen::AngleAxisf(spindleTheta, Eigen::Vector3f::UnitZ()).matrix();
505 
506 
507  Eigen::Vector4f point(
508  range * std::sin(mirrorTheta), 0.0, range * std::cos(mirrorTheta), 1.0);
509  Eigen::Vector4f pointCamera = laserToSpindle * point;
510  pointCamera = spindleToMotor * pointCamera;
511  pointCamera = cameraToSpindle * pointCamera;
512 
513 
514  pcl::PointXYZRGBA& targetPoint = pointCloudData->points[i];
515 
516  if (std::isfinite(pointCamera[2]))
517  {
518 
519  visionx::CameraParameters p = calibration.calibrationLeft.cameraParam;
520 
521  int u = (p.focalLength[0] * pointCamera[0] + p.principalPoint[0] * pointCamera[2]) /
522  pointCamera[2];
523  int v = (p.focalLength[1] * pointCamera[1] + p.principalPoint[1] * pointCamera[2]) /
524  pointCamera[2];
525 
526  if (u < p.width && v < p.height && u >= 0 && v >= 0)
527  {
528  int index = v * p.width + u;
529 
530 
531  targetPoint.getArray4fMap() = pointCamera;
532 
533  targetPoint.r = static_cast<uint8_t>(rgbImages[0]->pixels[3 * index + 0]);
534  targetPoint.g = static_cast<uint8_t>(rgbImages[0]->pixels[3 * index + 1]);
535  targetPoint.b = static_cast<uint8_t>(rgbImages[0]->pixels[3 * index + 2]);
536  }
537  }
538  else
539  {
540  targetPoint.x = std::numeric_limits<float>::quiet_NaN();
541  targetPoint.y = std::numeric_limits<float>::quiet_NaN();
542  targetPoint.z = std::numeric_limits<float>::quiet_NaN();
543  }
544  }
545 
546 
547  // pointCloudData->header.frame_id = header.frameId;
548  pointCloudData->header.stamp =
549  header.timeStartMicroSeconds + header.timeStartSeconds / (1000 * 1000);
550 
551  hasNewDisparityData = true;
552  }
553 
554  void
556  const crl::multisense::image::Header& header)
557  {
558  if (header.bitsPerPixel != 16)
559  {
560  ARMARX_ERROR << "invalid data";
561  return;
562  }
563 
564  cv::Mat_<uint16_t> disparityOrigP(
565  header.height,
566  header.width,
567  const_cast<uint16_t*>(reinterpret_cast<const uint16_t*>(header.imageDataP)));
568 
569  std::unique_lock lock(dataMutex);
570 
571  cv::Mat_<float> disparity(header.height, header.width);
572  disparity = disparityOrigP / 16.0f;
573  disparity.copyTo(disparityImage);
574 
575  cv::Mat_<cv::Vec3f> points(disparityImage.rows, disparityImage.cols);
576 
577  cv::reprojectImageTo3D(disparity, points, q_matrix, false);
578 
579  for (size_t j = 0; j < header.height; j++)
580  {
581  for (size_t i = 0; i < header.width; i++)
582  {
583  int index = j * header.width + i;
584  cv::Vec3f sourcePoint = points.at<cv::Vec3f>(j, i);
585  pcl::PointXYZRGBA& targetPoint = pointCloudData->points[index];
586 
587  //uint16_t value = disparityOrigP.at<uint16_t>(j, i);
588 
589  //rgbImages[1]->pixels[3 * index + 0] = value & 0xFF;
590  //rgbImages[1]->pixels[3 * index + 1] = (value >> 8) & 0xFF;
591 
592  if (std::isfinite(sourcePoint[2]))
593  {
594  targetPoint.x = sourcePoint[0] * 1000.0;
595  targetPoint.y = sourcePoint[1] * 1000.0;
596  targetPoint.z = sourcePoint[2] * 1000.0;
597  }
598  else
599  {
600  targetPoint.x = std::numeric_limits<float>::quiet_NaN();
601  targetPoint.y = std::numeric_limits<float>::quiet_NaN();
602  targetPoint.z = std::numeric_limits<float>::quiet_NaN();
603  }
604 
605  //point.getVector3fMap() =
606  }
607  }
608 
609 
610  pointCloudData->header.frame_id = header.frameId;
611  pointCloudData->header.stamp = header.timeMicroSeconds + header.timeSeconds / (1000 * 1000);
612 
613 
614  hasNewDisparityData = true;
615  }
616 
617  void
619  {
620  setImageFormat(visionx::ImageDimension(1024, 544), visionx::eRgb);
621  setNumberImages(2);
622  }
623 
624  visionx::StereoCalibration
626  {
627  std::unique_lock lock(captureMutex);
628 
629  return calibration;
630  }
631 
632  visionx::MonocularCalibration
634  {
635 
636  std::unique_lock lock(captureMutex);
637 
638  return calibration.calibrationLeft;
639  }
640 
641  visionx::MetaPointCloudFormatPtr
643  {
644  visionx::MetaPointCloudFormatPtr info = new visionx::MetaPointCloudFormat();
645  info->capacity = 1024 * 544 * sizeof(visionx::ColoredPoint3D);
646  info->size = info->capacity;
647  info->type = visionx::PointContentType::eColoredPoints;
648  return info;
649  }
650 
653  {
656  }
657 } // namespace armarx
armarx::MultiSensePointCloudProvider::chromaImageCallback
void chromaImageCallback(const crl::multisense::image::Header &header)
Definition: MultiSensePointCloudProvider.cpp:424
armarx::MultiSensePointCloudProvider::lumaImageCallback
void lumaImageCallback(const crl::multisense::image::Header &header)
Definition: MultiSensePointCloudProvider.cpp:388
GfxTL::Vec3f
VectorXD< 3, float > Vec3f
Definition: VectorXD.h:733
index
uint8_t index
Definition: EtherCATFrame.h:59
armarx::MultiSensePointCloudProvider::onStopCapture
void onStopCapture() override
Definition: MultiSensePointCloudProvider.cpp:335
armarx::chromaCallback
void chromaCallback(const crl::multisense::image::Header &header, void *userDataP)
Definition: MultiSensePointCloudProvider.cpp:44
GfxTL::Matrix4f
MatrixXX< 4, 4, float > Matrix4f
Definition: MatrixXX.h:650
MultiSensePointCloudProvider.h
c
constexpr T c
Definition: UnscentedKalmanFilterTest.cpp:46
armarx::MultiSensePointCloudProvider::createPropertyDefinitions
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
Definition: MultiSensePointCloudProvider.cpp:652
visionx::ImageProvider::setImageFormat
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
Definition: ImageProvider.cpp:305
armarx::MultiSensePointCloudProvider::getStereoCalibration
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c=Ice::emptyCurrent) override
Definition: MultiSensePointCloudProvider.cpp:625
visionx::CapturingPointCloudProvider::captureMutex
std::mutex captureMutex
mutex for capturing for proper exit
Definition: CapturingPointCloudProvider.h:215
ARMARX_FATAL
#define ARMARX_FATAL
Definition: Logging.h:199
visionx::tools::convert
CByteImage::ImageType convert(const ImageType visionxImageType)
Converts a VisionX image type into an image type of IVT's ByteImage.
Definition: TypeMapping.cpp:97
std::isfinite
bool isfinite(const std::vector< T, Ts... > &v)
Definition: algorithm.h:366
armarx::status
status
Definition: FiniteStateMachine.h:244
GfxTL::Identity
void Identity(MatrixXX< N, N, T > *a)
Definition: MatrixXX.h:570
armarx::MultiSensePointCloudProvider::lidarScanCallback
void lidarScanCallback(const crl::multisense::lidar::Header &header)
Definition: MultiSensePointCloudProvider.cpp:481
visionx::CapturingPointCloudProvider::frameRate
float frameRate
Required frame rate.
Definition: CapturingPointCloudProvider.h:220
armarx::lumaCallback
void lumaCallback(const crl::multisense::image::Header &header, void *userDataP)
Definition: MultiSensePointCloudProvider.cpp:50
max
T max(T t1, T t2)
Definition: gdiam.h:51
ARMARX_ERROR
#define ARMARX_ERROR
Definition: Logging.h:196
armarx::MultiSensePointCloudProvider::onInitImageProvider
void onInitImageProvider() override
Definition: MultiSensePointCloudProvider.cpp:618
visionx::ImageProvider::provideImages
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
Definition: ImageProvider.cpp:351
visionx::ImageProvider::setNumberImages
void setNumberImages(int numberImages)
Sets the number of images on each capture.
Definition: ImageProvider.cpp:344
armarx::MultiSensePointCloudProviderPropertyDefinitions
Definition: MultiSensePointCloudProvider.h:67
armarx::Component::getConfigIdentifier
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition: Component.cpp:79
armarx::MultiSensePointCloudProvider::onInitCapturingPointCloudProvider
void onInitCapturingPointCloudProvider() override
Definition: MultiSensePointCloudProvider.cpp:62
armarx::MultiSensePointCloudProvider::onExitCapturingPointCloudProvider
void onExitCapturingPointCloudProvider() override
Definition: MultiSensePointCloudProvider.cpp:248
armarx::ctrlutil::v
double v(double t, double v0, double a0, double j)
Definition: CtrlUtil.h:39
visionx::PointCloudProvider::providePointCloud
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
Definition: PointCloudProvider.h:122
ARMARX_INFO
#define ARMARX_INFO
Definition: Logging.h:181
IceUtil::Handle< class PropertyDefinitionContainer >
armarx::MultiSensePointCloudProvider::disparityImageCallback
void disparityImageCallback(const crl::multisense::image::Header &header)
Definition: MultiSensePointCloudProvider.cpp:555
armarx::disparityCallback
void disparityCallback(const crl::multisense::image::Header &header, void *userDataP)
Definition: MultiSensePointCloudProvider.cpp:38
TypeMapping.h
armarx::ArmarXDataPath::getAbsolutePath
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
Definition: ArmarXDataPath.cpp:109
armarx::MultiSensePointCloudProvider::onStartCapture
void onStartCapture(float frameRate) override
Definition: MultiSensePointCloudProvider.cpp:290
min
T min(T t1, T t2)
Definition: gdiam.h:44
ARMARX_WARNING
#define ARMARX_WARNING
Definition: Logging.h:193
armarx::PropertyDefinitionsPtr
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
Definition: forward_declarations.h:35
armarx::MultiSensePointCloudProvider
Brief description of class MultiSensePointCloudProvider.
Definition: MultiSensePointCloudProvider.h:103
armarx::MultiSensePointCloudProvider::doCapture
bool doCapture() override
Definition: MultiSensePointCloudProvider.cpp:348
ArmarXDataPath.h
armarx::MultiSensePointCloudProvider::getMonocularCalibration
visionx::MonocularCalibration getMonocularCalibration(const ::Ice::Current &c=Ice::emptyCurrent) override
Definition: MultiSensePointCloudProvider.cpp:633
armarx
This file offers overloads of toIce() and fromIce() functions for STL container types.
Definition: ArmarXTimeserver.cpp:27
armarx::MultiSensePointCloudProvider::getDefaultPointCloudFormat
visionx::MetaPointCloudFormatPtr getDefaultPointCloudFormat() override
default point cloud format used to initialize shared memory
Definition: MultiSensePointCloudProvider.cpp:642
armarx::lidarCallback
void lidarCallback(const crl::multisense::lidar::Header &header, void *userDataP)
Definition: MultiSensePointCloudProvider.cpp:56