MultiSensePointCloudProvider.cpp
Go to the documentation of this file.
1 /*
2  * This file is part of ArmarX.
3  *
4  * Copyright (C) 2011-2016, High Performance Humanoid Technologies (H2T), Karlsruhe Institute of Technology (KIT), all rights reserved.
5  *
6  * ArmarX is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * ArmarX is distributed in the hope that it will be useful, but
11  * WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program. If not, see <http://www.gnu.org/licenses/>.
17  *
18  * @package VisionX::ArmarXObjects::MultiSensePointCloudProvider
19  * @author Markus Grotz ( markus dot grotz at kit dot edu )
20  * @date 2016
21  * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
22  * GNU General Public License
23  */
24 
26 
29 
30 // IVT
31 #include <Calibration/StereoCalibration.h>
32 
33 namespace armarx
34 {
35 
36  void disparityCallback(const crl::multisense::image::Header& header, void* userDataP)
37  {
38  reinterpret_cast<MultiSensePointCloudProvider*>(userDataP)->disparityImageCallback(header);
39  }
40 
41  void chromaCallback(const crl::multisense::image::Header& header, void* userDataP)
42  {
43  reinterpret_cast<MultiSensePointCloudProvider*>(userDataP)->chromaImageCallback(header);
44  }
45 
46 
47  void lumaCallback(const crl::multisense::image::Header& header, void* userDataP)
48  {
49  reinterpret_cast<MultiSensePointCloudProvider*>(userDataP)->lumaImageCallback(header);
50  }
51 
52  void lidarCallback(const crl::multisense::lidar::Header& header, void* userDataP)
53  {
54  reinterpret_cast<MultiSensePointCloudProvider*>(userDataP)->lidarScanCallback(header);
55  }
56 
58  {
59  crl::multisense::image::Calibration c;
60  crl::multisense::lidar::Calibration l;
61  crl::multisense::Status status;
62 
63  try
64  {
65  driver = crl::multisense::Channel::Create(getProperty<std::string>("ipAddress").getValue());
66 
67  if (!driver)
68  {
69  throw std::runtime_error("Unable to connect to multisense device");
70  }
71 
72  crl::multisense::Status status = driver->getImageCalibration(c);
73  if (status != crl::multisense::Status_Ok)
74  {
75  throw std::runtime_error("Unable to retrieve image calibration.");
76  }
77 
78 
79  if (getProperty<bool>("useLidar").getValue())
80  {
81  driver->getLidarCalibration(l);
82  if (status != crl::multisense::Status_Ok)
83  {
84  throw std::runtime_error("Unable to retrieve liadr calibration.");
85  }
86 
87  //cameraToSpindle = Eigen::Matrix4f(l.cameraToSpindleFixed);
88  //laserToSpindl = Eigen::Matrix4f(l.laserToSpindle);
89  }
90  }
91  catch (std::exception& e)
92  {
93  ARMARX_FATAL << "Multisense error: " << e.what() << " Try entering ' roslaunch multisense_bringup multisense.launch mtu:=1500 ' in your console";
94  crl::multisense::Channel::Destroy(driver);
95  }
96 
97 
98  // see http://carnegierobotics.com/support-updates/2015/10/27/multisense-q-reprojection-matrix
99 
100  q_matrix = cv::Mat::zeros(4, 4, cv::DataType<double>::type);
101 
102  const int width = 1024;
103  const int height = 544;
104 
105  rgbImage = cv::Mat::zeros(height, width, CV_8UC3);
106  disparityImage = cv::Mat::zeros(height, width, CV_16SC1);
107 
108 
109  rgbImages = new CByteImage*[2];
110 
111  rgbImages[0] = new CByteImage(width, height, CByteImage::eRGB24);
112  rgbImages[1] = new CByteImage(width, height, CByteImage::eRGB24);
113 
114  /*
115  Q = [ FyTx, 0, 0, -FyCxTx]
116  [ 0, FxTx, 0, -FxCyTx]
117  [ 0, 0, 0, FxFyTx]
118  [ 0, 0, -Fy, 0]
119 
120  P_right = [Fx, 0, cx, FxTx]
121  [ 0, Fy, cy, 0]
122  [ 0, 0, 1, 0]
123  */
124 
125  double xScale = 2048 / width;
126  double yScale = 1088 / height;
127  double Fx = c.right.P[0][0] / xScale;
128  double Cx = c.right.P[0][2] / xScale;
129  double Fy = c.right.P[1][1] / yScale;
130  double Cy = c.right.P[1][2] / yScale;
131  double Tx = (c.right.P[0][3] / xScale) / Fx;
132 
133  q_matrix.at<double>(0, 0) = Fy * Tx;
134  q_matrix.at<double>(0, 3) = - Fy * Cx * Tx;
135  q_matrix.at<double>(1, 1) = Fx * Tx;
136  q_matrix.at<double>(1, 3) = - Fx * Cy * Tx;
137  q_matrix.at<double>(2, 3) = Fx * Fy * Tx;
138  q_matrix.at<double>(3, 2) = -Fy;
139 
140 
141  std::string calibrationFileName = getProperty<std::string>("CalibrationFileName").getValue();
142  if (armarx::ArmarXDataPath::getAbsolutePath(calibrationFileName, calibrationFileName))
143  {
144  CStereoCalibration ivtStereoCalibration;
145  if (ivtStereoCalibration.LoadCameraParameters(calibrationFileName.c_str(), true))
146  {
147  calibration = visionx::tools::convert(ivtStereoCalibration);
148  }
149  else
150  {
151  ARMARX_WARNING << "Failed loading calibration file " << calibrationFileName;
152  }
153  }
154  else
155  {
156  ARMARX_WARNING << "Calibration file with file name " << calibrationFileName << " not found";
157 
158  calibration.calibrationLeft.cameraParam.width = width;
159  calibration.calibrationLeft.cameraParam.height = height;
160  calibration.calibrationLeft.cameraParam.principalPoint.push_back(Cx);
161  calibration.calibrationLeft.cameraParam.principalPoint.push_back(Cy);
162  calibration.calibrationLeft.cameraParam.focalLength.push_back(Fx);
163  calibration.calibrationLeft.cameraParam.focalLength.push_back(Fy);
164  //::visionx::types::Vec distortion;
165  //::visionx::types::Mat rotation;
166  //::visionx::types::Vec translation;
167  //calibration.calibrationLeft.cameraParam.distortion;
168  //calibration.calibrationLeft.cameraParam.rotation;
169  //calibration.calibrationLeft.cameraParam.translation;
170  }
171 
172  Eigen::Vector4f minPoint = getProperty<Eigen::Vector4f>("minPoint").getValue();
173  Eigen::Vector4f maxPoint = getProperty<Eigen::Vector4f>("maxPoint").getValue();;
174 
175  cropBoxFilter.setMin(minPoint);
176  cropBoxFilter.setMax(maxPoint);
177 
178  mask = crl::multisense::Source_Unknown;
179 
180 
181  if (getProperty<bool>("useLidar").getValue())
182  {
183  status = driver->addIsolatedCallback(lidarCallback, this);
184  mask |= crl::multisense::Source_Lidar_Scan;
185 
186  }
187  else
188  {
189  status = driver->addIsolatedCallback(disparityCallback, crl::multisense::Source_Disparity, this);
190  mask |= crl::multisense::Source_Disparity;
191  }
192 
193  if (status != crl::multisense::Status_Ok)
194  {
195  throw std::runtime_error("Unable to attach isolated callback");
196  }
197 
198 
199  status = driver->addIsolatedCallback(lumaCallback, crl::multisense::Source_Luma_Left | crl::multisense::Source_Luma_Right, this);
200  mask |= crl::multisense::Source_Luma_Left | crl::multisense::Source_Luma_Right;
201 
202  if (status != crl::multisense::Status_Ok)
203  {
204  throw std::runtime_error("Unable to attach isolated callback");
205  }
206 
207 
208  status = driver->addIsolatedCallback(chromaCallback, crl::multisense::Source_Chroma_Left, this);
209  mask |= crl::multisense::Source_Chroma_Left;
210 
211  if (status != crl::multisense::Status_Ok)
212  {
213  throw std::runtime_error("Unable to attach isolated callback");
214  }
215 
216  if (getProperty<bool>("enableLight").getValue())
217  {
218  crl::multisense::lighting::Config lightingConfig;
219  lightingConfig.setDutyCycle(crl::multisense::lighting::MAX_DUTY_CYCLE);
220 
221  if (driver->setLightingConfig(lightingConfig) != crl::multisense::Status_Ok)
222  {
223  ARMARX_WARNING << "Failed to switch on LED light";
224  }
225  else
226  {
227  ARMARX_INFO << "Switched on LED light";
228  }
229  }
230 
231  }
232 
234  {
235  crl::multisense::Status status;
236 
237  if (getProperty<bool>("useLidar").getValue())
238  {
239  status = driver->removeIsolatedCallback(lidarCallback);
240  }
241  else
242  {
243  status = driver->removeIsolatedCallback(disparityCallback);
244  }
245 
246 
247  if (status != crl::multisense::Status_Ok)
248  {
249  ARMARX_FATAL << "Unable to remove isolated callback";
250  }
251 
252  status = driver->removeIsolatedCallback(chromaCallback);
253 
254  if (status != crl::multisense::Status_Ok)
255  {
256  ARMARX_FATAL << "Unable to remove isolated callback";
257  }
258 
259  status = driver->removeIsolatedCallback(lumaCallback);
260 
261  if (status != crl::multisense::Status_Ok)
262  {
263  ARMARX_FATAL << "Unable to remove isolated callback";
264  }
265 
266  crl::multisense::Channel::Destroy(driver);
267 
268  delete rgbImages[0];
269  delete rgbImages[1];
270 
271  delete [] rgbImages;
272  }
273 
275  {
276  crl::multisense::Status status;
277  crl::multisense::image::Config config;
278 
279  try
280  {
281  status = driver->getImageConfig(config);
282  if (status != crl::multisense::Status_Ok)
283  {
284  throw std::runtime_error("Unable read image config");
285  }
286 
287  config.setFps(frameRate);
288  status = driver->setImageConfig(config);
289  if (status != crl::multisense::Status_Ok)
290  {
291  throw std::runtime_error("Unable to set image config");
292  }
293 
294  pointCloudData.reset(new pcl::PointCloud<pcl::PointXYZRGBA>());
295 
296  pointCloudData->width = config.width();
297  pointCloudData->height = config.height();
298  pointCloudData->resize(config.width() * config.height());
299 
300  status = driver->startStreams(mask);
301 
302  if (status != crl::multisense::Status_Ok)
303  {
304  throw std::runtime_error("Unable to start image streams");
305  }
306 
307 
308  }
309  catch (std::exception& e)
310  {
311  ARMARX_FATAL << "multisense error " << e.what();
312  crl::multisense::Channel::Destroy(driver);
313  }
314 
315  hasNewColorData = false;
316  hasNewLumaData = false;
317  hasNewDisparityData = false;
318  }
319 
321  {
322  crl::multisense::Status status = driver->stopStreams(mask);
323 
324  if (status != crl::multisense::Status_Ok)
325  {
326  ARMARX_FATAL << "Unable to stop streams. Status was " << crl::multisense::Channel::statusString(status);
327  crl::multisense::Channel::Destroy(driver);
328  }
329  }
330 
332  {
333  if (hasNewDisparityData && hasNewColorData)
334  {
335  pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pointCloudTemp(new pcl::PointCloud<pcl::PointXYZRGBA>());
336 
337  std::unique_lock lock(dataMutex);
338 
339  cropBoxFilter.setInputCloud(pointCloudData);
340  cropBoxFilter.filter(*pointCloudTemp);
341 
342 
343  hasNewColorData = false;
344  hasNewDisparityData = false;
345 
346  lock.unlock();
347 
348  std::vector<int> index;
349  pcl::removeNaNFromPointCloud(*pointCloudTemp, *pointCloudTemp, index);
350 
351 #if 0
352  static int x = 0;
353  x++;
354  std::string outputPCDName = "/home/peter/output_" + boost::lexical_cast<std::string>(x) + ".pcd";
355  pcl::io::savePCDFileASCII(outputPCDName.data(), *pointCloudTemp);
356 #endif
357 
358 
359  ARMARX_INFO << "Providing point cloud of size " << pointCloudTemp->points.size();
360  providePointCloud(pointCloudTemp);
361 
362  provideImages(rgbImages);
363  return true;
364  }
365 
366  return false;
367  }
368 
369  void MultiSensePointCloudProvider::lumaImageCallback(const crl::multisense::image::Header& header)
370  {
371  const uint32_t imageSize = header.width * header.height;
372 
373  std::unique_lock lock(dataMutex);
374 
375  if (header.source == crl::multisense::Source_Luma_Right)
376  {
377  uint32_t height = header.height;
378  uint32_t width = header.width;
379 
380  const uint8_t* rightLumaData = reinterpret_cast<const uint8_t*>(header.imageDataP);
381 
382  for (size_t j = 0; j < height; j++)
383  {
384  for (size_t i = 0; i < width; i++)
385  {
386  const uint32_t index = (j * width) + i;
387  rgbImages[1]->pixels[3 * index + 0] = rightLumaData[index];
388  rgbImages[1]->pixels[3 * index + 1] = rightLumaData[index];
389  rgbImages[1]->pixels[3 * index + 2] = rightLumaData[index];
390  }
391  }
392  }
393  else
394  {
395 
396  lumaData.resize(imageSize);
397 
398  memcpy((void*)&lumaData.data()[0], header.imageDataP, imageSize);
399 
400  hasNewLumaData = true;
401  }
402  }
403 
404  void MultiSensePointCloudProvider::chromaImageCallback(const crl::multisense::image::Header& header)
405  {
406  std::unique_lock lock(dataMutex);
407 
408  if (!hasNewLumaData)
409  {
410  return;
411  }
412 
413  const uint8_t* chromaP = reinterpret_cast<const uint8_t*>(header.imageDataP);
414 
415  uint32_t height = header.height * 2;
416  uint32_t width = header.width * 2;
417 
418  for (size_t j = 0; j < height; j++)
419  {
420  for (size_t i = 0; i < width; i++)
421  {
422  const uint32_t lumaOffset = (j * width) + i;
423  const uint32_t chromaOffset = 2 * (((j / 2) * (width / 2)) + (i / 2));
424 
425  const float px_y = static_cast<float>(lumaData[lumaOffset]);
426  const float px_cb = static_cast<float>(chromaP[chromaOffset + 0]) - 128.0f;
427  const float px_cr = static_cast<float>(chromaP[chromaOffset + 1]) - 128.0f;
428 
429  float px_r = px_y + 1.402f * px_cr;
430  float px_g = px_y - 0.34414f * px_cb - 0.71414f * px_cr;
431  float px_b = px_y + 1.772f * px_cb;
432 
433  px_r = std::max(0.0f, std::min(255.0f, px_r));
434  px_g = std::max(0.0f, std::min(255.0f, px_g));
435  px_b = std::max(0.0f, std::min(255.0f, px_b));
436 
437  int index = j * width + i;
438 
439 
440  pcl::PointXYZRGBA& targetPoint = pointCloudData->points[index];
441 
442  targetPoint.r = static_cast<uint8_t>(px_r);
443  targetPoint.g = static_cast<uint8_t>(px_g);
444  targetPoint.b = static_cast<uint8_t>(px_b);
445 
446  rgbImages[0]->pixels[3 * index + 0] = static_cast<uint8_t>(px_r);
447  rgbImages[0]->pixels[3 * index + 1] = static_cast<uint8_t>(px_g);
448  rgbImages[0]->pixels[3 * index + 2] = static_cast<uint8_t>(px_b);
449 
450  rgbImage.at<cv::Vec3b>(j, i)[0] = static_cast<uint8_t>(px_r);
451  rgbImage.at<cv::Vec3b>(j, i)[1] = static_cast<uint8_t>(px_g);
452  rgbImage.at<cv::Vec3b>(j, i)[2] = static_cast<uint8_t>(px_b);
453  }
454  }
455 
456  hasNewLumaData = false;
457  hasNewColorData = true;
458 
459  }
460 
461  void MultiSensePointCloudProvider::lidarScanCallback(const crl::multisense::lidar::Header& header)
462  {
463 
464  std::unique_lock lock(dataMutex);
465 
466  if (!hasNewColorData)
467  {
468  return;
469  }
470 
471 
472  for (size_t i = 0; i < header.pointCount; i++)
473  {
474  const uint32_t range = header.rangesP[i];
475  const double percent = static_cast<double>(i) / static_cast<double>(header.pointCount - 1);
476  const double mirrorTheta = - header.scanArc / 2000.0 + percent * header.scanArc / 1000.0;
477  const double spindleTheta = header.spindleAngleStart / 1000.0 + percent * (header.spindleAngleEnd - header.spindleAngleStart) / 1000.0;
478 
479  Eigen::Matrix4f spindleToMotor = Eigen::Matrix4f::Identity();
480  spindleToMotor.block<3, 3>(0, 0) = Eigen::AngleAxisf(spindleTheta, Eigen::Vector3f::UnitZ()).matrix();
481 
482 
483  Eigen::Vector4f point(range * std::sin(mirrorTheta), 0.0, range * std::cos(mirrorTheta), 1.0);
484  Eigen::Vector4f pointCamera = laserToSpindle * point;
485  pointCamera = spindleToMotor * pointCamera;
486  pointCamera = cameraToSpindle * pointCamera;
487 
488 
489  pcl::PointXYZRGBA& targetPoint = pointCloudData->points[i];
490 
491  if (std::isfinite(pointCamera[2]))
492  {
493 
494  visionx:: CameraParameters p = calibration.calibrationLeft.cameraParam;
495 
496  int u = (p.focalLength[0] * pointCamera[0] + p.principalPoint[0] * pointCamera[2]) / pointCamera[2];
497  int v = (p.focalLength[1] * pointCamera[1] + p.principalPoint[1] * pointCamera[2]) / pointCamera[2];
498 
499  if (u < p.width && v < p.height && u >= 0 && v >= 0)
500  {
501  int index = v * p.width + u;
502 
503 
504  targetPoint.getArray4fMap() = pointCamera;
505 
506  targetPoint.r = static_cast<uint8_t>(rgbImages[0]->pixels[3 * index + 0]);
507  targetPoint.g = static_cast<uint8_t>(rgbImages[0]->pixels[3 * index + 1]);
508  targetPoint.b = static_cast<uint8_t>(rgbImages[0]->pixels[3 * index + 2]);
509  }
510  }
511  else
512  {
513  targetPoint.x = std::numeric_limits<float>::quiet_NaN();
514  targetPoint.y = std::numeric_limits<float>::quiet_NaN();
515  targetPoint.z = std::numeric_limits<float>::quiet_NaN();
516  }
517 
518  }
519 
520 
521 
522  // pointCloudData->header.frame_id = header.frameId;
523  pointCloudData->header.stamp = header.timeStartMicroSeconds + header.timeStartSeconds / (1000 * 1000);
524 
525  hasNewDisparityData = true;
526 
527  }
528 
529  void MultiSensePointCloudProvider::disparityImageCallback(const crl::multisense::image::Header& header)
530  {
531  if (header.bitsPerPixel != 16)
532  {
533  ARMARX_ERROR << "invalid data";
534  return;
535  }
536 
537  cv::Mat_<uint16_t> disparityOrigP(header.height, header.width, const_cast<uint16_t*>(reinterpret_cast<const uint16_t*>(header.imageDataP)));
538 
539  std::unique_lock lock(dataMutex);
540 
541  cv::Mat_<float> disparity(header.height, header.width);
542  disparity = disparityOrigP / 16.0f;
543  disparity.copyTo(disparityImage);
544 
545  cv::Mat_<cv::Vec3f> points(disparityImage.rows, disparityImage.cols);
546 
547  cv::reprojectImageTo3D(disparity, points, q_matrix, false);
548 
549  for (size_t j = 0; j < header.height; j++)
550  {
551  for (size_t i = 0; i < header.width; i++)
552  {
553  int index = j * header.width + i;
554  cv::Vec3f sourcePoint = points.at<cv::Vec3f>(j, i);
555  pcl::PointXYZRGBA& targetPoint = pointCloudData->points[index];
556 
557  //uint16_t value = disparityOrigP.at<uint16_t>(j, i);
558 
559  //rgbImages[1]->pixels[3 * index + 0] = value & 0xFF;
560  //rgbImages[1]->pixels[3 * index + 1] = (value >> 8) & 0xFF;
561 
562  if (std::isfinite(sourcePoint[2]))
563  {
564  targetPoint.x = sourcePoint[0] * 1000.0;
565  targetPoint.y = sourcePoint[1] * 1000.0;
566  targetPoint.z = sourcePoint[2] * 1000.0;
567  }
568  else
569  {
570  targetPoint.x = std::numeric_limits<float>::quiet_NaN();
571  targetPoint.y = std::numeric_limits<float>::quiet_NaN();
572  targetPoint.z = std::numeric_limits<float>::quiet_NaN();
573  }
574 
575  //point.getVector3fMap() =
576  }
577  }
578 
579 
580  pointCloudData->header.frame_id = header.frameId;
581  pointCloudData->header.stamp = header.timeMicroSeconds + header.timeSeconds / (1000 * 1000);
582 
583 
584  hasNewDisparityData = true;
585  }
586 
588  {
589  setImageFormat(visionx::ImageDimension(1024, 544), visionx::eRgb);
590  setNumberImages(2);
591  }
592 
593 
594  visionx::StereoCalibration MultiSensePointCloudProvider::getStereoCalibration(const Ice::Current&)
595  {
596  std::unique_lock lock(captureMutex);
597 
598  return calibration;
599  }
600 
601  visionx::MonocularCalibration MultiSensePointCloudProvider::getMonocularCalibration(const Ice::Current& c)
602  {
603 
604  std::unique_lock lock(captureMutex);
605 
606  return calibration.calibrationLeft;
607  }
608 
609 
611  {
612  visionx::MetaPointCloudFormatPtr info = new visionx::MetaPointCloudFormat();
613  info->capacity = 1024 * 544 * sizeof(visionx::ColoredPoint3D);
614  info->size = info->capacity;
615  info->type = visionx::PointContentType::eColoredPoints;
616  return info;
617  }
618 
620  {
623  }
624 }
armarx::MultiSensePointCloudProvider::chromaImageCallback
void chromaImageCallback(const crl::multisense::image::Header &header)
Definition: MultiSensePointCloudProvider.cpp:404
armarx::MultiSensePointCloudProvider::lumaImageCallback
void lumaImageCallback(const crl::multisense::image::Header &header)
Definition: MultiSensePointCloudProvider.cpp:369
index
uint8_t index
Definition: EtherCATFrame.h:59
armarx::MultiSensePointCloudProvider::onStopCapture
void onStopCapture() override
Definition: MultiSensePointCloudProvider.cpp:320
armarx::chromaCallback
void chromaCallback(const crl::multisense::image::Header &header, void *userDataP)
Definition: MultiSensePointCloudProvider.cpp:41
MultiSensePointCloudProvider.h
c
constexpr T c
Definition: UnscentedKalmanFilterTest.cpp:43
armarx::MultiSensePointCloudProvider::createPropertyDefinitions
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
Definition: MultiSensePointCloudProvider.cpp:619
visionx::ImageProvider::setImageFormat
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
Definition: ImageProvider.cpp:275
armarx::MultiSensePointCloudProvider::getStereoCalibration
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c=Ice::emptyCurrent) override
Definition: MultiSensePointCloudProvider.cpp:594
visionx::CapturingPointCloudProvider::captureMutex
std::mutex captureMutex
mutex for capturing for proper exit
Definition: CapturingPointCloudProvider.h:209
ARMARX_FATAL
#define ARMARX_FATAL
Definition: Logging.h:192
visionx::tools::convert
CByteImage::ImageType convert(const ImageType visionxImageType)
Converts a VisionX image type into an image type of IVT's ByteImage.
Definition: TypeMapping.cpp:95
std::isfinite
bool isfinite(const std::vector< T, Ts... > &v)
Definition: algorithm.h:327
armarx::status
status
Definition: FiniteStateMachine.h:259
GfxTL::Identity
void Identity(MatrixXX< N, N, T > *a)
Definition: MatrixXX.h:523
armarx::MultiSensePointCloudProvider::lidarScanCallback
void lidarScanCallback(const crl::multisense::lidar::Header &header)
Definition: MultiSensePointCloudProvider.cpp:461
visionx::CapturingPointCloudProvider::frameRate
float frameRate
Required frame rate.
Definition: CapturingPointCloudProvider.h:214
armarx::lumaCallback
void lumaCallback(const crl::multisense::image::Header &header, void *userDataP)
Definition: MultiSensePointCloudProvider.cpp:47
max
T max(T t1, T t2)
Definition: gdiam.h:48
ARMARX_ERROR
#define ARMARX_ERROR
Definition: Logging.h:189
armarx::MultiSensePointCloudProvider::onInitImageProvider
void onInitImageProvider() override
Definition: MultiSensePointCloudProvider.cpp:587
visionx::ImageProvider::provideImages
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
Definition: ImageProvider.cpp:319
visionx::ImageProvider::setNumberImages
void setNumberImages(int numberImages)
Sets the number of images on each capture.
Definition: ImageProvider.cpp:313
armarx::MultiSensePointCloudProviderPropertyDefinitions
Definition: MultiSensePointCloudProvider.h:65
armarx::Component::getConfigIdentifier
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition: Component.cpp:74
armarx::MultiSensePointCloudProvider::onInitCapturingPointCloudProvider
void onInitCapturingPointCloudProvider() override
Definition: MultiSensePointCloudProvider.cpp:57
armarx::MultiSensePointCloudProvider::onExitCapturingPointCloudProvider
void onExitCapturingPointCloudProvider() override
Definition: MultiSensePointCloudProvider.cpp:233
armarx::ctrlutil::v
double v(double t, double v0, double a0, double j)
Definition: CtrlUtil.h:39
GfxTL::Matrix4f
MatrixXX< 4, 4, float > Matrix4f
Definition: MatrixXX.h:601
visionx::PointCloudProvider::providePointCloud
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
Definition: PointCloudProvider.h:122
ARMARX_INFO
#define ARMARX_INFO
Definition: Logging.h:174
IceUtil::Handle< class PropertyDefinitionContainer >
armarx::MultiSensePointCloudProvider::disparityImageCallback
void disparityImageCallback(const crl::multisense::image::Header &header)
Definition: MultiSensePointCloudProvider.cpp:529
armarx::disparityCallback
void disparityCallback(const crl::multisense::image::Header &header, void *userDataP)
Definition: MultiSensePointCloudProvider.cpp:36
GfxTL::Vec3f
VectorXD< 3, float > Vec3f
Definition: VectorXD.h:691
TypeMapping.h
armarx::ArmarXDataPath::getAbsolutePath
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
Definition: ArmarXDataPath.cpp:111
armarx::MultiSensePointCloudProvider::onStartCapture
void onStartCapture(float frameRate) override
Definition: MultiSensePointCloudProvider.cpp:274
min
T min(T t1, T t2)
Definition: gdiam.h:42
ARMARX_WARNING
#define ARMARX_WARNING
Definition: Logging.h:186
armarx::PropertyDefinitionsPtr
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
Definition: forward_declarations.h:34
armarx::MultiSensePointCloudProvider
Brief description of class MultiSensePointCloudProvider.
Definition: MultiSensePointCloudProvider.h:94
armarx::MultiSensePointCloudProvider::doCapture
bool doCapture() override
Definition: MultiSensePointCloudProvider.cpp:331
ArmarXDataPath.h
armarx::MultiSensePointCloudProvider::getMonocularCalibration
visionx::MonocularCalibration getMonocularCalibration(const ::Ice::Current &c=Ice::emptyCurrent) override
Definition: MultiSensePointCloudProvider.cpp:601
armarx
This file offers overloads of toIce() and fromIce() functions for STL container types.
Definition: ArmarXTimeserver.cpp:28
armarx::MultiSensePointCloudProvider::getDefaultPointCloudFormat
visionx::MetaPointCloudFormatPtr getDefaultPointCloudFormat() override
default point cloud format used to initialize shared memory
Definition: MultiSensePointCloudProvider.cpp:610
armarx::lidarCallback
void lidarCallback(const crl::multisense::lidar::Header &header, void *userDataP)
Definition: MultiSensePointCloudProvider.cpp:52