AzureKinectPointCloudProvider.cpp
Go to the documentation of this file.
2 
3 #include <cstdio>
4 #include <filesystem>
5 #include <functional>
6 #include <mutex>
7 #include <utility>
8 
9 #include <opencv2/core/hal/interface.h>
10 #include <opencv2/imgproc/imgproc.hpp>
11 
12 #include <IceUtil/Time.h>
13 
23 
25 
31 
32 #include <Calibration/Calibration.h>
33 #include <Image/ImageProcessor.h>
34 
35 #ifdef INCLUDE_BODY_TRACKING
37 #endif
38 
39 
40 namespace
41 {
42  /**
43  * @brief Converts a k4a image to an IVT image.
44  * @param color_image Reference to a k4a image containing the color data that should be converted.
45  * @param result IVT image the converted image should be stored in. The size of this image and the color_image need to match.
46  */
47  [[maybe_unused]] void
48  k4aToIvtImage(const k4a::image& color_image, ::CByteImage& result)
49  {
50  if (color_image.get_format() == K4A_IMAGE_FORMAT_COLOR_YUY2)
51  {
52  // Convert YUY2 to RGB using OpenCV
53  cv::Mat yuy2_image(color_image.get_height_pixels(),
54  color_image.get_width_pixels(),
55  CV_8UC2,
56  const_cast<uint8_t*>(color_image.get_buffer()));
57  cv::Mat rgb_image;
58  cv::cvtColor(yuy2_image, rgb_image, cv::COLOR_YUV2RGB_YUY2);
59 
60  // Convert the OpenCV Mat to the IVT CByteImage format
61  visionx::imrec::convert(rgb_image, result);
62  }
63  else if (color_image.get_format() == K4A_IMAGE_FORMAT_COLOR_BGRA32)
64  {
65  auto cw = static_cast<unsigned int>(color_image.get_width_pixels());
66  auto ch = static_cast<unsigned int>(color_image.get_height_pixels());
67  ARMARX_CHECK_EQUAL(static_cast<unsigned int>(result.width), cw);
68  ARMARX_CHECK_EQUAL(static_cast<unsigned int>(result.height), ch);
69 
70  auto color_buffer = color_image.get_buffer();
71  auto rgb_buffer_ivt = result.pixels;
72 
73  // Index in the IVT image. This value will be increased by 3 per pixel.
74  int index_ivt = 0;
75 
76  // Index in the k4a image. This value increases by 4 per pixel.
77  int index_k4a = 0;
78 
79  for (unsigned int y = 0; y < ch; ++y)
80  {
81  for (unsigned int x = 0; x < cw; ++x)
82  {
83  rgb_buffer_ivt[index_ivt] = color_buffer[index_k4a + 2];
84  rgb_buffer_ivt[index_ivt + 1] = color_buffer[index_k4a + 1];
85  rgb_buffer_ivt[index_ivt + 2] = color_buffer[index_k4a + 0];
86  index_ivt += 3;
87  index_k4a += 4;
88  }
89  }
90  }
91  else
92  {
93  throw std::runtime_error("Unsupported color format in k4a image");
94  }
95  }
96 
97 
98 #ifdef INCLUDE_BODY_TRACKING
99  void
100  printBodyInformation(k4abt_body_t body)
101  {
102  ARMARX_VERBOSE << "Body ID: " << body.id;
103  for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
104  {
105  const k4a_float3_t position = body.skeleton.joints[i].position;
106  const k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
107  const k4abt_joint_confidence_level_t confidence_level =
108  body.skeleton.joints[i].confidence_level;
109 
110  ARMARX_VERBOSE << "Joint" << i << ": " << "Position[mm] (" << position.v[0] << ","
111  << position.v[1] << "," << position.v[2] << "); " << "Orientation ("
112  << orientation.v[0] << "," << orientation.v[1] << "," << orientation.v[2]
113  << "," << orientation.v[3] << "); " << "Confidence Level "
114  << confidence_level;
115  }
116  }
117 #endif
118 
119  const char*
120  k4aImageFormatToString(k4a_image_format_t format)
121  {
122  switch (format)
123  {
124  case K4A_IMAGE_FORMAT_COLOR_MJPG:
125  return "COLOR_MJPG";
126  case K4A_IMAGE_FORMAT_COLOR_NV12:
127  return "COLOR_NV12";
128  case K4A_IMAGE_FORMAT_COLOR_YUY2:
129  return "COLOR_YUY2";
130  case K4A_IMAGE_FORMAT_COLOR_BGRA32:
131  return "COLOR_BGRA32";
132  case K4A_IMAGE_FORMAT_DEPTH16:
133  return "DEPTH16";
134  case K4A_IMAGE_FORMAT_IR16:
135  return "IR16";
136  case K4A_IMAGE_FORMAT_CUSTOM:
137  return "CUSTOM";
138  default:
139  return "UNKNOWN";
140  }
141  }
142 
143 } // namespace
144 
145 namespace visionx
146 {
147 
148  std::function<void(armarx::Duration)>
149  AzureKinectPointCloudProvider::createSwCallback(const std::string& description)
150  {
151  return [=](armarx::Duration duration)
152  {
153  std::string name = "duration " + description + " in [ms]";
154  {
155  std::lock_guard g{metaInfoMtx};
156  setMetaInfo(name, new armarx::Variant{duration.toMilliSecondsDouble()});
157  }
158 
159  {
160  std::lock_guard g{debugObserverMtx};
161  setDebugObserverDatafield(name, duration.toMilliSecondsDouble());
162  }
163  };
164  }
165 
169  {
170  defineOptionalProperty<k4a_color_resolution_t>(
171  "ColorResolution", K4A_COLOR_RESOLUTION_720P, "Resolution of the RGB camera image.")
172  .map("0", K4A_COLOR_RESOLUTION_OFF) /** Color camera will be turned off */
173  .map("720", K4A_COLOR_RESOLUTION_720P) /** 1280x720 16:9 */
174  .map("1080", K4A_COLOR_RESOLUTION_1080P) /** 1920x1080 16:9 */
175  .map("1440", K4A_COLOR_RESOLUTION_1440P) /** 2560x1440 16:9 */
176  .map("1536", K4A_COLOR_RESOLUTION_1536P) /** 2048x1536 4:3 */
177  .map("2160", K4A_COLOR_RESOLUTION_2160P) /** 3840x2160 16:9 */
178  .map("3072", K4A_COLOR_RESOLUTION_3072P); /** 4096x3072 4:3 */
179  defineOptionalProperty<k4a_depth_mode_t>(
180  "DepthMode", K4A_DEPTH_MODE_NFOV_UNBINNED, "Resolution/mode of the depth camera image.")
181  .setCaseInsensitive(true)
182  .map("OFF", K4A_DEPTH_MODE_OFF)
183  .map("NFOV_2X2BINNED", K4A_DEPTH_MODE_NFOV_2X2BINNED)
184  .map("NFOV_UNBINNED", K4A_DEPTH_MODE_NFOV_UNBINNED)
185  .map("WFOV_2X2BINNED", K4A_DEPTH_MODE_WFOV_2X2BINNED)
186  .map("WFOV_UNBINNED", K4A_DEPTH_MODE_WFOV_UNBINNED)
187  .map("PASSIVE_IR", K4A_DEPTH_MODE_PASSIVE_IR);
188 
189  defineOptionalProperty<float>("MaxDepth",
190  6000.0,
191  "Max. allowed depth value in mm. Depth values above this "
192  "threshold will be set to nan.");
193 
194  defineOptionalProperty<float>("CaptureTimeOffset",
195  16.0f,
196  "In Milliseconds. Time offset between capturing the image on "
197  "the hardware and receiving the image in this process.",
199 
200  defineOptionalProperty<k4a_image_format_t>(
201  "ColorFormat", K4A_IMAGE_FORMAT_COLOR_BGRA32, "Color format of the RGB camera image.")
202  .map("BGRA", K4A_IMAGE_FORMAT_COLOR_BGRA32)
203  .map("YUY2", K4A_IMAGE_FORMAT_COLOR_YUY2);
204  }
205 
208  {
211 
212  defs->optional(
213  enableColorUndistortion,
214  "EnableColorUndistortion",
215  "Undistort the color images using the full 8 radial and tangential distortion "
216  "parameters provided by the Azure Kinect.\n"
217  "This can help for processing tasks which cannot handle radial parameters k3-k6.\n"
218  "Note that this drastically reduces the FPS (to something like 3).");
219  defs->optional(externalCalibrationFilePath,
220  "ExternalCalibrationFilePath",
221  "Path to an optional external"
222  " calibration file, which has a"
223  " camera matrix and distortion"
224  " parameters.");
225  defs->optional(mDeviceId, "device_id", "ID of the device.");
226 
227  defs->optional(robotName, "robotName");
228  defs->optional(bodyCameraFrameName, "bodyCameraFrameName");
229  defs->optional(framerate.value,
230  "framerate.images",
231  "The framerate of RGB-D images [frames per second]."
232  "\nNote that the point cloud and body tracking frame rates are controlled by"
233  " the properties 'framerate' (point cloud) and 'framerate.bodyTracking'"
234  " (body tracking), respectively.")
235  .setMin(5)
236  .setMax(30);
237 
238 #ifdef INCLUDE_BODY_TRACKING
239  defs->optional(bodyTrackingEnabled,
240  "bodyTrackingEnabled",
241  "Whether the Azure Kinect Body Tracking SDK should be enabled or not.");
242  defs->optional(bodyTrackingRunAtStart,
243  "bodyTrackingRunAtStart",
244  "Whether the Azure Kinect Body Tracking SDK should directly run when the "
245  "component is startet."
246  "Otherwise it has to be activated by the ice interface.");
247  defs->optional(bodyTrackingModelFilename,
248  "bodyTrackingModelPath",
249  "Path where the .onnx DNN files can be found");
250  defs->optional(bodyTrackingGPUDeviceID, "bodyTrackingGPUDeviceID", "GPU Device ID.");
251  defs->optional(bodyTrackingTemporalSmoothingFactor,
252  "bodyTrackingTemporalSmoothingFactor",
253  "Temporal smoothing factor for Azure Kinect body tracking.");
254 
255  defs->optional(bodyTrackingDepthMaskMinX, "bodyTrackingDepthMaskMinX");
256  defs->optional(bodyTrackingDepthMaskMaxX, "bodyTrackingDepthMaskMaxX");
257  defs->optional(bodyTrackingDepthMaskMaxZ, "bodyTrackingDepthMaskMaxZ");
258  defs->optional(framerate.bodyTracking.value,
259  "framerate.bodyTracking",
260  "The framerate with with the body tracking is run [frames per second]."
261  "\nNote that the RGB-D image and point cloud frame rates are controlled by"
262  " the properties 'framerate.image' (RGB-D images) and 'framerate'"
263  " (point cloud), respectively.")
264  .setMin(1.)
265  .setMax(30.);
266 
267  defs->optional(startIMU, "startIMU");
268 
269  humanPoseWriter.registerPropertyDefinitions(defs);
270 #endif
271 
272  defs->optional(enableHeartbeat, "enableHeartbeat");
273 
274  return defs;
275  }
276 
277  void
279  {
280  config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
281  if (framerate.value == 5)
282  {
283  config.camera_fps = K4A_FRAMES_PER_SECOND_5;
284  }
285  else if (framerate.value == 15)
286  {
287  config.camera_fps = K4A_FRAMES_PER_SECOND_15;
288  }
289  else if (framerate.value == 30)
290  {
291  config.camera_fps = K4A_FRAMES_PER_SECOND_30;
292  }
293  else
294  {
295  throw armarx::LocalException("Invalid image framerate (property 'framerate.images'): ")
296  << framerate.value << ". Only framerates 5, 15 and 30 are "
297  << "supported by Azure Kinect.";
298  }
299 
300  framerate.pointCloud.value =
301  getProperty<float>("framerate"); // point cloud provider framerate
302  framerate.pointCloud.update(framerate.value);
303 
304 #ifdef INCLUDE_BODY_TRACKING
305  framerate.bodyTracking.update(framerate.value);
306 #endif
307 
308  config.depth_mode = getProperty<k4a_depth_mode_t>("DepthMode");
309  config.color_format = getProperty<k4a_image_format_t>("ColorFormat");
310  config.color_resolution = getProperty<k4a_color_resolution_t>("ColorResolution");
311 
312  // This means that we'll only get captures that have both color and depth images, so we don't
313  // need to check if the capture contains a particular type of image.
314  config.synchronized_images_only = true;
315 
316  // Set number of images per frame.
317  setNumberImages(2);
318 
319  auto depth_dim = GetDepthDimensions(config.depth_mode);
320  auto color_dim = GetColorDimensions(config.color_resolution);
321 
322  ARMARX_INFO << "Depth image size: " << depth_dim.first << "x" << depth_dim.second;
323  ARMARX_INFO << "Color image size: " << color_dim.first << "x" << color_dim.second;
324 
325  alignedDepthImage =
326  k4a::image::create(K4A_IMAGE_FORMAT_DEPTH16,
327  color_dim.first,
328  color_dim.second,
329  color_dim.first * 2 * static_cast<int32_t>(sizeof(uint8_t)));
330 
331  setImageFormat(visionx::ImageDimension(color_dim.first, color_dim.second),
332  visionx::eRgb,
333  visionx::eBayerPatternGr);
334 
335  resultDepthImage.reset(visionx::tools::createByteImage(getImageFormat(), visionx::eRgb));
336  resultColorImage =
337  std::make_unique<CByteImage>(color_dim.first, color_dim.second, CByteImage::eRGB24);
338 
339  xyzImage = k4a::image::create(K4A_IMAGE_FORMAT_CUSTOM,
340  color_dim.first,
341  color_dim.second,
342  color_dim.first * 3 * static_cast<int32_t>(sizeof(int16_t)));
343 
344  pointcloud = std::make_unique<pcl::PointCloud<CloudPointType>>();
345  }
346 
347  void
349  {
350  ARMARX_DEBUG << "Connecting " << getName();
351 
353 
357  pointcloudTask->start();
358 
359 #ifdef INCLUDE_BODY_TRACKING
360  if (bodyTrackingEnabled)
361  {
362  ARMARX_INFO << "Connecting to human memory ...";
363  humanPoseWriter.connect(memoryNameSystem());
364  ARMARX_INFO << "Connected to human memory.";
365 
366  bodyTrackingPublishTask = new armarx::RunningTask<AzureKinectPointCloudProvider>(
367  this, &AzureKinectPointCloudProvider::runPublishBodyTrackingResults);
368  }
369 #endif
370 
371  if (enableHeartbeat)
372  {
373  ARMARX_CHECK_NOT_NULL(heartbeatPlugin);
374  heartbeatPlugin->signUp(armarx::Duration::MilliSeconds(500),
376  {"Vision", "Camera"},
377  "AzureKinectPointCloudProvider");
378  }
379 
380  ARMARX_VERBOSE << "Connected " << getName();
381  }
382 
383  void
385  {
386  ARMARX_DEBUG << "Disconnecting " << getName();
387 
388  // Stop task
389  {
390  std::lock_guard<std::mutex> lock(pointcloudProcMutex);
391  ARMARX_DEBUG << "Stopping pointcloud processing thread...";
392  const bool WAIT_FOR_JOIN = false;
393  pointcloudTask->stop(WAIT_FOR_JOIN);
394  pointcloudProcSignal.notify_all();
395  ARMARX_DEBUG << "Waiting for pointcloud processing thread to stop...";
396  pointcloudTask->waitForStop();
397  ARMARX_DEBUG << "Pointcloud processing thread stopped";
398  }
399 
400 #ifdef INCLUDE_BODY_TRACKING
401  if (bodyTrackingIsRunning)
402  {
403  bodyTrackingPublishTask->stop();
404  }
405 #endif
406 
407  ARMARX_DEBUG << "Disconnected " << getName();
408  }
409 
410  void
412  {
413  this->setPointCloudSyncMode(eCaptureSynchronization);
414  ARMARX_TRACE;
415  }
416 
417  void
419  {
420  ARMARX_INFO << "Closing Azure Kinect device";
421  device.close();
422  ARMARX_INFO << "Closed Azure Kinect device";
423  }
424 
425  void
427  {
428  ARMARX_TRACE;
429 
430  cloudFormat = getPointCloudFormat();
431 
432  // Check for devices.
433  const uint32_t DEVICE_COUNT = k4a::device::get_installed_count();
434  if (DEVICE_COUNT == 0)
435  {
436  getArmarXManager()->asyncShutdown();
437  throw armarx::LocalException("No Azure Kinect devices detected!");
438  }
439 
440  ARMARX_TRACE;
441 
442  device = k4a::device::open(static_cast<uint32_t>(mDeviceId));
443  ARMARX_DEBUG << "Opened device id #" << mDeviceId << " with serial number "
444  << device.get_serialnum() << ".";
445 
446  ARMARX_TRACE;
447 
448  auto depthDim = GetDepthDimensions(config.depth_mode);
449  auto colorDim = GetColorDimensions(config.color_resolution);
450  (void)depthDim;
451  (void)colorDim;
452 
453  k4aCalibration = device.get_calibration(config.depth_mode, config.color_resolution);
455  << "Color camera calibration:" << "\n"
456  << "cx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cx
457  << "\n"
458  << "cy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cy
459  << "\n"
460  << "fx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fx
461  << "\n"
462  << "fy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fy
463  << "\n"
464  << "k1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k1
465  << "\n"
466  << "k2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k2
467  << "\n"
468  << "p1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p1
469  << "\n"
470  << "p2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p2
471  << "\n"
472  << "k3: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k3
473  << "\n"
474  << "k4: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k4
475  << "\n"
476  << "k5: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k5
477  << "\n"
478  << "k6: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k6;
479 
480 
481  auto c = getStereoCalibration(Ice::Current());
482  CCalibration* c_left = tools::convert(c.calibrationLeft);
483  c_left->PrintCameraParameters();
484  delete c_left;
485  CCalibration* c_right = tools::convert(c.calibrationRight);
486  c_right->PrintCameraParameters();
487  delete c_right;
488 
489  transformation = k4a::transformation(k4aCalibration);
490 
491 #ifdef INCLUDE_BODY_TRACKING
492  if (bodyTrackingEnabled)
493  {
494  // eventually, resolve environment variable
495  armarx::ArmarXDataPath::ReplaceEnvVars(bodyTrackingModelFilename);
496 
497  const bool found = armarx::ArmarXDataPath::getAbsolutePath(bodyTrackingModelFilename,
498  bodyTrackingModelFilename);
499  ARMARX_CHECK(found) << "Body tracking DNN model could not be found/resolved at `"
500  << bodyTrackingModelFilename << "`.";
501 
502  ARMARX_INFO << "Using body tracking DNN model from directory `"
503  << bodyTrackingModelFilename << "`.";
504 
505  ARMARX_CHECK(std::filesystem::exists(bodyTrackingModelFilename))
506  << "The path `" << bodyTrackingModelFilename << "` does not exist!";
507 
508  k4abt_tracker_configuration_t const bodyTrackingConfig{
509  .sensor_orientation = K4ABT_SENSOR_ORIENTATION_DEFAULT,
510  .processing_mode = K4ABT_TRACKER_PROCESSING_MODE_GPU_CUDA,
511  .gpu_device_id = bodyTrackingGPUDeviceID,
512  .model_path = bodyTrackingModelFilename.c_str()};
513 
514  bodyTracker = k4abt::tracker::create(k4aCalibration, bodyTrackingConfig);
515  bodyTracker.set_temporal_smoothing(bodyTrackingTemporalSmoothingFactor);
516 
517  if (bodyTrackingRunAtStart and not bodyTrackingIsRunning)
518  {
519  bodyTrackingIsRunning = true;
520  bodyTrackingPublishTask->start();
521  }
522  }
523 #endif
524 
525  ARMARX_TRACE;
526  device.set_color_control(K4A_COLOR_CONTROL_BRIGHTNESS, K4A_COLOR_CONTROL_MODE_MANUAL, 128);
527 
528  device.start_cameras(&config);
529 
530  ARMARX_TRACE;
531 
532  if (startIMU)
533  {
534  device.start_imu();
535  ARMARX_INFO << "IMU is active" << std::endl;
536 
537  ARMARX_TRACE;
538  }
539 
540  setMetaInfo("serialNumber", new armarx::Variant(device.get_serialnum()));
541  setMetaInfo("rgbVersion", new armarx::Variant(VersionToString(device.get_version().rgb)));
542  setMetaInfo("depthVersion",
543  new armarx::Variant(VersionToString(device.get_version().depth)));
544  setMetaInfo("depthSensorVersion",
545  new armarx::Variant(VersionToString(device.get_version().depth_sensor)));
546  setMetaInfo("audioVersion",
547  new armarx::Variant(VersionToString(device.get_version().audio)));
548 
549  ARMARX_TRACE;
550 
551 
552  // Color image calibration
553  {
554  // Load intrinsics from camera
555  const k4a_calibration_camera_t calibration{k4aCalibration.color_camera_calibration};
556  const k4a_calibration_intrinsic_parameters_t::_param param =
557  calibration.intrinsics.parameters.param;
558 
559  // // Scale intrinsics according to image scale
560  // param.fx *= image_scale_;
561  // param.fy *= image_scale_;
562 
563  // Calculate distortion map
564  cv::Mat1f camera_matrix(3, 3);
565  camera_matrix << param.fx, 0, param.cx, 0, param.fy, param.cy, 0, 0, 1;
566  cv::Mat1f new_camera_matrix(3, 3);
567  new_camera_matrix << param.fx, 0, param.cx, 0, param.fx, param.cy, 0, 0, 1;
568  cv::Mat1f distortion_coeff(1, 8);
569  distortion_coeff << param.k1, param.k2, param.p1, param.p2, param.k3, param.k4,
570  param.k5, param.k6;
571  cv::Mat map1, map2, map3;
572  cv::initUndistortRectifyMap(
573  camera_matrix,
574  distortion_coeff,
575  cv::Mat{},
576  new_camera_matrix,
577  cv::Size{calibration.resolution_width, calibration.resolution_height},
578  CV_32FC1,
579  map1,
580  map2);
581  cv::convertMaps(map1, map2, colorDistortionMap, map3, CV_16SC2, true);
582  }
583  }
584 
585  void
587  {
588  if (startIMU)
589  {
590  device.stop_imu();
591  }
592  }
593 
594  bool
596  {
599 
600  ScopedStopWatch sw_total{createSwCallback("doCapture")};
601 
602  k4a::capture capture;
603  const std::chrono::milliseconds TIMEOUT{1000};
604 
605  StopWatch sw_get_capture;
606  bool status = false;
607  try
608  {
609  ARMARX_DEBUG << "Try capture.";
610  status = device.get_capture(&capture, TIMEOUT);
611  ARMARX_DEBUG << "Got capture.";
612  }
613  catch (const std::exception&)
614  {
615  ARMARX_WARNING << "Failed to get capture from device (#" << ++mDiagnostics.num_crashes
616  << "). Restarting camera.";
617  StopWatch sw;
618  device.stop_cameras();
619  device.start_cameras(&config);
620  ARMARX_INFO << "Restarting took " << sw.stop() << ".";
621  return false;
622  }
623 
624  if (status)
625  {
626  createSwCallback("waiting for get_capture")(sw_get_capture.stop());
627 
628  {
629  std::lock_guard g{metaInfoMtx};
630  setMetaInfo("temperature", new armarx::Variant(capture.get_temperature_c()));
631  }
632 
633  // see ROS: This will give INCORRECT timestamps until the first image.
634  const bool mustInitializeTimestampOffset = [&]()
635  {
636  std::lock_guard g{deviceToRealtimeOffsetMtx};
637  return device_to_realtime_offset_.count() == 0;
638  }();
639  if (mustInitializeTimestampOffset)
640  {
641  initializeTimestampOffset(capture.get_depth_image().get_device_timestamp());
642  }
643 
644  // next, we update the timestamp offset continuously
645  updateTimestampOffset(capture.get_ir_image().get_device_timestamp(),
646  capture.get_ir_image().get_system_timestamp());
647 
648  const k4a::image DEPTH_IMAGE = capture.get_depth_image();
649 
650  // This function assumes that the image is made of depth pixels (i.e. uint16_t's),
651  // which is only true for IR/depth images.
652  const k4a_image_format_t IMAGE_FORMAT = DEPTH_IMAGE.get_format();
653  if (IMAGE_FORMAT != K4A_IMAGE_FORMAT_DEPTH16 && IMAGE_FORMAT != K4A_IMAGE_FORMAT_IR16)
654  {
655  const char* format_str = ::k4aImageFormatToString(IMAGE_FORMAT);
656  std::stringstream error_msg;
657  error_msg << "Attempted to colorize a non-depth image with format: " << format_str;
658  throw std::logic_error(error_msg.str());
659  }
660 
661  // Provide data for pointcloud processing thread and signal to start processing.
662  if (not framerate.pointCloud.skip())
663  {
664  ScopedStopWatch sw{createSwCallback("transform depth image to camera")};
665  // Acquire lock and write data needed by pointcloud thread (i.e.,
666  // alignedDepthImageScaled and depthImageReady).
667  {
668  std::lock_guard lock{pointcloudProcMutex};
669  transformation.depth_image_to_color_camera(DEPTH_IMAGE, &alignedDepthImage);
670 
671  // Signal that point cloud processing may proceed and reset processed flag.
672  depthImageReady = true;
673  }
674 
675  ARMARX_DEBUG << "Notifying pointcloud thread.";
676  pointcloudProcSignal.notify_one();
677  }
678 
679 #ifdef INCLUDE_BODY_TRACKING
680  if (bodyTrackingIsRunning)
681  {
682  ARMARX_DEBUG << "body Tracking Is Running.";
683  std::scoped_lock lock(bodyTrackingParameterMutex);
684 
685  if (not framerate.bodyTracking.skip())
686  {
687  k4a::image ir_image = capture.get_ir_image();
688  std::uint8_t* ir_image_buffer = ir_image.get_buffer();
689 
690  k4a::image depth_image = capture.get_depth_image();
691  std::uint8_t* depth_image_buffer = depth_image.get_buffer();
692 
693  ARMARX_CHECK_EQUAL(ir_image.get_width_pixels(), depth_image.get_width_pixels());
694  ARMARX_CHECK_EQUAL(ir_image.get_height_pixels(),
695  depth_image.get_height_pixels());
696 
697  const int stride = ir_image.get_stride_bytes() / ir_image.get_width_pixels();
698 
699  for (int x = 0; x < ir_image.get_width_pixels(); x++)
700  {
701  for (int y = 0; y < ir_image.get_height_pixels(); y++)
702  {
703  const int i = (y * ir_image.get_width_pixels() * stride) + (x * stride);
704  const int z = (static_cast<int>(depth_image_buffer[i])) +
705  (static_cast<int>(depth_image_buffer[i + 1]) << 8);
706 
707  if ((bodyTrackingDepthMaskMinX > 0 and x < bodyTrackingDepthMaskMinX) or
708  (bodyTrackingDepthMaskMaxX > 0 and x > bodyTrackingDepthMaskMaxX) or
709  (bodyTrackingDepthMaskMaxZ > 0 and z > bodyTrackingDepthMaskMaxZ))
710  {
711  ir_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
712  ir_image_buffer[i + 1] = std::numeric_limits<std::uint8_t>::max();
713  depth_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
714  depth_image_buffer[i + 1] =
716  }
717  }
718  }
719 
720  if (not bodyTracker.enqueue_capture(capture))
721  {
722  ARMARX_WARNING << "Add capture to tracker process queue timeout";
723  }
724  }
725  }
726 #endif
727 
728  const k4a::image COLOR_IMAGE = capture.get_color_image();
729  ARMARX_DEBUG << "Got COLOR image.";
730 
731  auto real_time = IceUtil::Time::now();
732  auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
733  auto clock_diff = real_time - monotonic_time;
734 
735  auto image_monotonic_time =
736  IceUtil::Time::microSeconds(std::chrono::duration_cast<std::chrono::microseconds>(
737  DEPTH_IMAGE.get_system_timestamp())
738  .count());
739  long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
740 
741  imagesTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
742 
743  {
744  std::lock_guard g{metaInfoMtx};
745  setMetaInfo("image age in [ms]",
746  new armarx::Variant{(real_time - imagesTime).toMilliSecondsDouble()});
747  }
748 
749  {
750  std::lock_guard g{debugObserverMtx};
751  setDebugObserverDatafield("image age in [ms]",
752  (real_time - imagesTime).toMilliSecondsDouble());
753  }
754 
755  if (enableColorUndistortion)
756  {
757  // only the color image needs to be rectified
758  cv::Mat tmp_rgb_image;
759 
760  if (COLOR_IMAGE.get_format() == K4A_IMAGE_FORMAT_COLOR_YUY2)
761  {
762  ARMARX_DEBUG << "Converting YUY2 image.";
763  cv::Mat yuy2_image(COLOR_IMAGE.get_height_pixels(),
764  COLOR_IMAGE.get_width_pixels(),
765  CV_8UC2,
766  const_cast<uint8_t*>(COLOR_IMAGE.get_buffer()));
767 
768  cv::cvtColor(yuy2_image, tmp_rgb_image, cv::COLOR_YUV2RGB_YUY2);
769  }
770  else if (COLOR_IMAGE.get_format() == K4A_IMAGE_FORMAT_COLOR_BGRA32)
771  {
772  cv::cvtColor(cv::Mat{cv::Size{COLOR_IMAGE.get_width_pixels(),
773  COLOR_IMAGE.get_height_pixels()},
774  CV_8UC4,
775  (void*)COLOR_IMAGE.get_buffer(),
776  cv::Mat::AUTO_STEP},
777  tmp_rgb_image,
778  cv::COLOR_BGRA2RGB);
779  }
780  else
781  {
782  throw std::runtime_error("Unsupported color format in k4a image");
783  }
784 
785  cv::Mat cv_color_image_undistorted(
786  COLOR_IMAGE.get_width_pixels(), COLOR_IMAGE.get_height_pixels(), CV_8UC3);
787 
788  cv::remap(tmp_rgb_image,
789  cv_color_image_undistorted,
790  colorDistortionMap,
791  cv::Mat{},
792  cv::INTER_NEAREST,
793  cv::BORDER_CONSTANT);
794 
795  visionx::imrec::convert_rgb2cbi(cv_color_image_undistorted, *resultColorImage);
796  }
797  else
798  {
799  // Convert the k4a image to an IVT image.
800  ScopedStopWatch sw{createSwCallback("convert k4a image to IVT")};
801  ::k4aToIvtImage(COLOR_IMAGE, *resultColorImage);
802  }
803 
804 
805  // Prepare result depth image.
806  {
807  ScopedStopWatch sw{createSwCallback("prepare result depth image")};
808 
809  const int DW = alignedDepthImage.get_width_pixels();
810  const int DH = alignedDepthImage.get_height_pixels();
811 
812  ARMARX_CHECK_EXPRESSION(resultDepthImage);
813  ARMARX_CHECK_EQUAL(resultDepthImage->width, DW);
814  ARMARX_CHECK_EQUAL(resultDepthImage->height, DH);
815 
816  auto result_depth_buffer = resultDepthImage->pixels;
817  const auto* depth_buffer =
818  reinterpret_cast<const uint16_t*>(alignedDepthImage.get_buffer());
819 
820  int index = 0;
821  int index_2 = 0;
822  for (int y = 0; y < DH; ++y)
823  {
824  for (int x = 0; x < DW; ++x)
825  {
826  uint16_t depth_value = depth_buffer[index_2];
828  result_depth_buffer[index],
829  result_depth_buffer[index + 1],
830  result_depth_buffer[index + 2]);
831  index += 3;
832  index_2 += 1;
833  }
834  }
835  }
836 
837  // Broadcast RGB-D image.
838  {
839  ScopedStopWatch sw{createSwCallback("broadcast RGB-D image")};
840  CByteImage* images[2] = {resultColorImage.get(), resultDepthImage.get()};
841  provideImages(images, imagesTime);
842  }
843 
844  // Wait until `alignedDepthImage` was processed and can be overridden again.
845  {
846  std::unique_lock signal_lock{pointcloudProcMutex};
847  ARMARX_DEBUG << "Capturing thread waiting for signal...";
848  pointcloudProcSignal.wait(signal_lock, [&] { return depthImageProcessed; });
849  ARMARX_DEBUG << "Capturing thread received signal.";
850  }
851 
852  {
853  std::lock_guard g{debugObserverMtx};
855  }
856 
857  if (enableHeartbeat)
858  {
859  heartbeatPlugin->heartbeat();
860  }
861 
862  // return true;
863  }
864  else
865  {
866  ARMARX_INFO << deactivateSpam(30) << "Did not get frame until timeout of " << TIMEOUT;
867 
868  {
869  std::lock_guard g{debugObserverMtx};
871  }
872 
873  return false;
874  }
875 
876  if (startIMU)
877  {
878  try
879  {
880  Eigen::Vector3f accMean = Eigen::Vector3f::Zero();
881  std::size_t cnt = 0;
882 
883  k4a_imu_sample_t imu_sample;
884  while (device.get_imu_sample(&imu_sample, std::chrono::milliseconds(0)))
885  {
886  const Eigen::Vector3f acceleration{imu_sample.acc_sample.xyz.x,
887  imu_sample.acc_sample.xyz.y,
888  imu_sample.acc_sample.xyz.z};
889 
890  accMean += acceleration;
891  cnt ++;
892  }
893 
894  accMean /= static_cast<float>(cnt);
895 
896  ARMARX_INFO << "Got IMU sample";
897 
898  {
899  std::lock_guard g{debugObserverMtx};
900 
901  setDebugObserverDatafield("acceleration.x", accMean.x());
902  setDebugObserverDatafield("acceleration.y", accMean.y());
903  setDebugObserverDatafield("acceleration.z", accMean.z());
904 
906  }
907  }
908  catch (const std::exception&)
909  {
910  ARMARX_WARNING << "Failed to get imu samples from device (#"
911  << ++mDiagnostics.num_crashes << ").";
912  }
913  }
914 
915  return true;
916  }
917 
918 #ifdef INCLUDE_BODY_TRACKING
919  void
920  AzureKinectPointCloudProvider::runPublishBodyTrackingResults()
921  {
923  while (bodyTrackingIsRunning)
924  {
925 
926  // body frames might just not be available.
927  const k4abt::frame body_frame = [&]() -> k4abt::frame
928  {
929  try
930  {
931  auto result = bodyTracker.pop_result();
932  return result;
933  }
934  catch (...)
935  {
936  ARMARX_VERBOSE << deactivateSpam(1) << "Exception in body tracking publishing: "
938  return {};
939  }
940  }();
941 
942  if (body_frame != nullptr)
943  {
945  createSwCallback("publish body tracking result")};
946  // see https://github.com/microsoft/Azure_Kinect_ROS_Driver/blob/melodic/src/k4a_ros_device.cpp
947  const armarx::DateTime timestamp =
948  timestampToArmarX(body_frame.get_device_timestamp());
949 
950  {
951  auto real_time = IceUtil::Time::now();
952  auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
953  auto clock_diff = real_time - monotonic_time;
954 
955  auto image_monotonic_time = IceUtil::Time::microSeconds(
956  std::chrono::duration_cast<std::chrono::microseconds>(
957  body_frame.get_system_timestamp())
958  .count());
959  // long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
960 
961  // IceUtil::Time imageTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
962  IceUtil::Time imageTime = image_monotonic_time + clock_diff;
963 
964  {
965  std::lock_guard g{debugObserverMtx};
966  setDebugObserverDatafield("ros_vs_ice_timestamp [µs]",
967  imageTime.toMicroSeconds());
968  }
969  }
970 
971  {
972  const armarx::Clock realtimeClock = armarx::Clock(armarx::ClockType::Realtime);
973  const armarx::Clock monotonicClock =
974  armarx::Clock(armarx::ClockType::Monotonic);
975 
976  auto real_time = realtimeClock.now();
977  auto monotonic_time = monotonicClock.now();
978  auto clock_diff = real_time - monotonic_time;
979 
980  auto image_monotonic_time = armarx::Duration::MicroSeconds(
981  std::chrono::duration_cast<std::chrono::microseconds>(
982  body_frame.get_system_timestamp())
983  .count());
984  // long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
985 
986  // IceUtil::Time imageTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
987  auto imageTime = image_monotonic_time + clock_diff;
988 
989  armarx::DateTime imageTimestamp = armarx::DateTime(imageTime);
990 
991  {
992  std::lock_guard g{debugObserverMtx};
993  setDebugObserverDatafield("ros_vs_armarx_timestamp [µs]",
994  imageTime.toMicroSeconds());
995  }
996  }
997 
998 
999  std::uint32_t num_bodies = body_frame.get_num_bodies();
1000  {
1001  std::lock_guard g{debugObserverMtx};
1002  setDebugObserverDatafield("n_bodies_detected", num_bodies);
1003  }
1004 
1005  std::vector<armarx::armem::human::HumanPose> humanPoses;
1006  humanPoses.reserve(num_bodies);
1007 
1008  for (std::uint32_t i = 0; i < num_bodies; i++)
1009  {
1010  k4abt_body_t body = body_frame.get_body(i);
1011  printBodyInformation(body);
1012 
1014  humanPose.humanTrackingId = std::to_string(body.id);
1015  humanPose.cameraFrameName = bodyCameraFrameName;
1017 
1018  for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
1019  {
1020  const auto joint =
1022  const auto name =
1024 
1025  k4a_float3_t position = body.skeleton.joints[i].position;
1026  k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
1027  k4abt_joint_confidence_level_t confidence_level =
1028  body.skeleton.joints[i].confidence_level;
1029 
1031  .label = name,
1032  .confidence = static_cast<float>(static_cast<int>(confidence_level)),
1033  .positionCamera = armarx::FramedPosition(
1034  Eigen::Vector3f{position.v[0], position.v[1], position.v[2]},
1035  bodyCameraFrameName,
1036  robotName),
1037  .orientationCamera =
1038  armarx::FramedOrientation(Eigen::Quaternionf(orientation.wxyz.w,
1039  orientation.wxyz.x,
1040  orientation.wxyz.y,
1041  orientation.wxyz.z)
1042  .toRotationMatrix(),
1043  bodyCameraFrameName,
1044  robotName)};
1045  }
1046 
1047  humanPoses.push_back(humanPose);
1048  }
1049 
1050  {
1051  std::lock_guard g{debugObserverMtx};
1052  setDebugObserverDatafield("bodyTrackingCaptureUntilPublish [ms]",
1053  (armarx::Clock::Now() - timestamp).toMilliSeconds());
1054  }
1055 
1056  humanPoseWriter.commitHumanPosesInCameraFrame(humanPoses, getName(), timestamp);
1057 
1058 
1059  // k4a::image body_index_map = body_frame.get_body_index_map();
1060  // if (body_index_map != nullptr)
1061  // {
1062  // //print_body_index_map_middle_line(body_index_map);
1063  // }
1064  // else
1065  // {
1066  // ARMARX_WARNING << "Error: Failed to generate bodyindex map!";
1067  // }
1068  }
1069  else
1070  {
1071  // It should never hit timeout when K4A_WAIT_INFINITE is set.
1072  ARMARX_VERBOSE << "Error! Pop body frame result time out!";
1073  }
1074 
1075  metronome.waitForNextTick();
1076  }
1077  }
1078 #endif
1079 
1080  void
1082  {
1083  ARMARX_DEBUG << "Started pointcloud processing task.";
1084 
1085  IceUtil::Time time;
1086 
1087  // Main pointcloud processing loop.
1088  while (not pointcloudTask->isStopped())
1089  {
1090  // Wait for data.
1091  std::unique_lock signal_lock{pointcloudProcMutex};
1092  ARMARX_DEBUG << "Pointcloud thread waiting for signal...";
1093  pointcloudProcSignal.wait(
1094  signal_lock, [&] { return pointcloudTask->isStopped() or depthImageReady; });
1095  ARMARX_DEBUG << "Pointcloud thread received signal.";
1096 
1097  // Assess timings, reset flags.
1098  const IceUtil::Time TIMESTAMP = imagesTime;
1099  depthImageReady = false;
1100  depthImageProcessed = false;
1101 
1102  // Transform depth image to pointcloud.
1103  {
1105  transformation.depth_image_to_point_cloud(
1106  alignedDepthImage, K4A_CALIBRATION_TYPE_COLOR, &xyzImage);
1107  ARMARX_DEBUG << "Transforming depth image to point cloud took "
1109  }
1110 
1111  // Construct PCL pointcloud.
1112  {
1114 
1115  ARMARX_CHECK_EXPRESSION(pointcloud);
1116 
1117  pointcloud->width = static_cast<uint32_t>(xyzImage.get_width_pixels());
1118  pointcloud->height = static_cast<uint32_t>(xyzImage.get_height_pixels());
1119 
1120  ARMARX_TRACE;
1121 
1122  pointcloud->is_dense = false;
1123  pointcloud->points.resize(pointcloud->width * pointcloud->height);
1124 
1125  auto k4a_cloud_buffer = reinterpret_cast<const int16_t*>(xyzImage.get_buffer());
1126 
1127  ARMARX_CHECK_EXPRESSION(k4a_cloud_buffer);
1128 
1129  unsigned char* color_buffer = resultColorImage->pixels;
1130  while (color_buffer == nullptr)
1131  {
1132  ARMARX_WARNING << "color_buffer is null. This should never happen. There is "
1133  "probably a race condition somewhere that needs to be fixed. "
1134  "Temporarily we ignore this and continue.\n Timestamp: "
1136  color_buffer = resultColorImage->pixels;
1137  }
1138  ARMARX_CHECK_NOT_NULL(color_buffer);
1139 
1140  ARMARX_CHECK_EQUAL(xyzImage.get_width_pixels(), resultColorImage->width);
1141  ARMARX_CHECK_EQUAL(xyzImage.get_height_pixels(), resultColorImage->height);
1142  ARMARX_CHECK_EQUAL(pointcloud->points.size(),
1143  pointcloud->width * pointcloud->height);
1144 
1145  ARMARX_TRACE;
1146 
1147  size_t index = 0;
1148  float max_depth = getProperty<float>("MaxDepth").getValue();
1149  for (auto& p : pointcloud->points)
1150  {
1151  p.r = color_buffer[index];
1152  p.x = k4a_cloud_buffer[index];
1153 
1154  index++;
1155 
1156  p.g = color_buffer[index];
1157  p.y = k4a_cloud_buffer[index];
1158 
1159  index++;
1160 
1161  p.b = color_buffer[index];
1162  auto z = k4a_cloud_buffer[index];
1163 
1164  index++;
1165 
1166  if (z <= max_depth and z != 0)
1167  {
1168  p.z = z;
1169  }
1170  else
1171  {
1172  p.z = std::numeric_limits<float>::quiet_NaN();
1173  }
1174  }
1175 
1176  ARMARX_DEBUG << "Constructing point cloud took "
1178  }
1179 
1180  // Notify capturing thread that data was processed and may be overridden.
1181  depthImageProcessed = true;
1182  signal_lock.unlock();
1183  ARMARX_DEBUG << "Notifying capturing thread...";
1184  pointcloudProcSignal.notify_all();
1185 
1186  ARMARX_TRACE;
1187 
1188  // Broadcast PCL pointcloud.
1189  {
1191  pointcloud->header.stamp = static_cast<unsigned long>(TIMESTAMP.toMicroSeconds());
1192  providePointCloud(pointcloud);
1193  ARMARX_DEBUG << "Broadcasting pointcloud took "
1195  }
1196  }
1197 
1198  ARMARX_DEBUG << "Stopped pointcloud processing task.";
1199  }
1200 
1201  std::string
1203  {
1204  return "AzureKinectPointCloudProvider";
1205  }
1206 
1207  void
1209  {
1212  }
1213 
1214  void
1216  {
1219  }
1220 
1221  void
1223  {
1224  captureEnabled = false;
1225 
1228  }
1229 
1230  void
1232  {
1235  }
1236 
1237  StereoCalibration
1239  {
1240  using namespace Eigen;
1241  using Matrix3FRowMajor = Matrix<float, 3, 3, StorageOptions::RowMajor>;
1243 
1244  // TODO: use the externally used cameraMatrix and distCoeffs
1245  const auto convert_calibration =
1246  [](const k4a_calibration_camera_t& k4a_calib, float scale = 1.f)
1247  {
1248  MonocularCalibration monocular_calibration;
1249 
1250  const k4a_calibration_intrinsic_parameters_t& params = k4a_calib.intrinsics.parameters;
1251  monocular_calibration.cameraParam.principalPoint = {params.param.cx * scale,
1252  params.param.cy * scale};
1253  monocular_calibration.cameraParam.focalLength = {params.param.fx * scale,
1254  params.param.fy * scale};
1255  // TODO: Figure out convertions. IVT (Calibration.h) expects 4 parameters:
1256  // - The first radial lens distortion parameter.
1257  // - The second radial lens distortion parameter.
1258  // - The first tangential lens distortion parameter.
1259  // - The second tangential lens distortion parameter.
1260  // However, the Kinect offers k1-k6 radial distortion coefficients and 2 p1-p2
1261  // tangential distortion parameters, which means that k3-k6 are unused.
1262  // It is even unclear whether this is correct now, as previously it was a vector of all
1263  // 6 k-params, which resulted in failed assertions in TypeMapping.cpp in the function
1264  // CStereoCalibration* visionx::tools::convert(const visionx::StereoCalibration& stereoCalibration)
1265  // lin 314 at time of this commit.
1266  // See: https://microsoft.github.io/Azure-Kinect-Sensor-SDK/master/structk4a__calibration__intrinsic__parameters__t_1_1__param.html
1267  monocular_calibration.cameraParam.distortion = {
1268  params.param.k1,
1269  params.param.k2,
1270  params.param.p1,
1271  params.param.p2,
1272  };
1273 
1274  // TODO this needs to be scaled! Or why shouldn't it ?
1275  monocular_calibration.cameraParam.width =
1276  std::floor(float(k4a_calib.resolution_width) * scale);
1277  monocular_calibration.cameraParam.height =
1278  std::floor(float(k4a_calib.resolution_height) * scale);
1279 
1280  const Matrix3FRowMajor rotation =
1281  Map<const Matrix3FRowMajor>{k4a_calib.extrinsics.rotation};
1282 
1283  monocular_calibration.cameraParam.rotation = convertEigenMatToVisionX(rotation);
1284  monocular_calibration.cameraParam.translation = {k4a_calib.extrinsics.translation,
1285  k4a_calib.extrinsics.translation + 3};
1286 
1287  return monocular_calibration;
1288  };
1289 
1290  StereoCalibration stereo_calibration;
1291 
1292  stereo_calibration.calibrationLeft =
1293  convert_calibration(k4aCalibration.color_camera_calibration);
1294 
1295  // if the camera images are rectified, the distortion params are 0
1296  if (enableColorUndistortion)
1297  {
1298  auto& colorDistortionParams = stereo_calibration.calibrationLeft.cameraParam.distortion;
1299  std::fill(colorDistortionParams.begin(), colorDistortionParams.end(), 0);
1300  }
1301 
1302  // the depth image is rectified by default
1303  {
1304  // The depth image has been warped to to color camera frame. See depth_image_to_color_camera() above.
1305  // Therefore we use the calibration of the color camera (focal length etc) and set the distortion parameters to 0.
1306  stereo_calibration.calibrationRight =
1307  convert_calibration(k4aCalibration.color_camera_calibration);
1308 
1309  auto& depthDistortionParams =
1310  stereo_calibration.calibrationRight.cameraParam.distortion;
1311  std::fill(depthDistortionParams.begin(), depthDistortionParams.end(), 0);
1312  }
1313 
1314  stereo_calibration.rectificationHomographyLeft =
1316  stereo_calibration.rectificationHomographyRight =
1318 
1319  return stereo_calibration;
1320  }
1321 
1322  bool
1324  {
1325  return enableColorUndistortion;
1326  }
1327 
1328  std::string
1330  {
1331  return getProperty<std::string>("frameName");
1332  }
1333 
1334  std::vector<imrec::ChannelPreferences>
1336  {
1337  ARMARX_TRACE;
1338 
1339  imrec::ChannelPreferences rgb;
1340  rgb.requiresLossless = false;
1341  rgb.name = "rgb";
1342 
1343  imrec::ChannelPreferences depth;
1344  depth.requiresLossless = true;
1345  depth.name = "depth";
1346 
1347  return {rgb, depth};
1348  }
1349 
1350  void
1352  const armarx::EnableHumanPoseEstimationInput& input,
1353  const Ice::Current&)
1354  {
1355 #ifndef INCLUDE_BODY_TRACKING
1356  {
1357  ARMARX_ERROR << "INCLUDE_BODY_TRACKING is not defined.";
1358  return;
1359  }
1360 #endif
1361 
1362 #ifdef INCLUDE_BODY_TRACKING
1363  if (bodyTrackingEnabled)
1364  {
1365  // should not require a mutex
1366  if (not bodyTrackingIsRunning and input.enable3d)
1367  {
1368  bodyTrackingIsRunning = true;
1369  bodyTrackingPublishTask->start();
1370  }
1371  else if (bodyTrackingIsRunning and not input.enable3d)
1372  {
1373  bodyTrackingIsRunning = false;
1374  bodyTrackingPublishTask->stop();
1375  }
1376  }
1377  else
1378  {
1379  ARMARX_ERROR << "Azure Kinect Body Tracking is not enabled";
1380  }
1381 #endif
1382  }
1383 
1384  void
1385  AzureKinectPointCloudProvider::setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current&)
1386  {
1387  std::scoped_lock lock(bodyTrackingParameterMutex);
1388  bodyTrackingDepthMaskMaxZ = maxDepthInMM;
1389  }
1390 
1391  void
1393  int maxXinPixel,
1394  const Ice::Current&)
1395  {
1396  std::scoped_lock lock(bodyTrackingParameterMutex);
1397  bodyTrackingDepthMaskMinX = minXinPixel;
1398  bodyTrackingDepthMaskMaxX = maxXinPixel;
1399  }
1400 
1401  // the code below is taken from https://github.com/microsoft/Azure_Kinect_ROS_Driver
1402  // -> MIT license
1403 
1404  void
1406  const std::chrono::microseconds& k4a_device_timestamp_us,
1407  const std::chrono::nanoseconds& k4a_system_timestamp_ns)
1408  {
1409  // System timestamp is on monotonic system clock.
1410  // Device time is on AKDK hardware clock.
1411  // We want to continuously estimate diff between realtime and AKDK hardware clock as low-pass offset.
1412  // This consists of two parts: device to monotonic, and monotonic to realtime.
1413 
1414  // First figure out realtime to monotonic offset. This will change to keep updating it.
1415  std::chrono::nanoseconds realtime_clock =
1416  std::chrono::system_clock::now().time_since_epoch();
1417  std::chrono::nanoseconds monotonic_clock =
1418  std::chrono::steady_clock::now().time_since_epoch();
1419 
1420  std::chrono::nanoseconds monotonic_to_realtime = realtime_clock - monotonic_clock;
1421 
1422  // Next figure out the other part (combined).
1423  std::chrono::nanoseconds device_to_realtime =
1424  k4a_system_timestamp_ns - k4a_device_timestamp_us + monotonic_to_realtime;
1425 
1426  {
1427  std::lock_guard g{deviceToRealtimeOffsetMtx};
1428 
1429  {
1430  std::lock_guard g{debugObserverMtx};
1431  setDebugObserverDatafield("device_to_realtime_offset [ms]",
1432  device_to_realtime_offset_.count() /
1433  1'000'000.f); // [ns] -> [ms]
1435  "clock_error",
1436  std::abs<float>((device_to_realtime_offset_ - device_to_realtime).count()) /
1437  1'000.f); // [ns] -> [µs]
1438  }
1439 
1440  const std::int64_t timeOffsetThreshold = 1e7; // 10 ms
1441 
1442  // If we're over a certain time off, just snap into place.
1443  if (device_to_realtime_offset_.count() == 0 ||
1444  std::abs((device_to_realtime_offset_ - device_to_realtime).count()) >
1445  timeOffsetThreshold)
1446  {
1447  ARMARX_VERBOSE << deactivateSpam()
1448  << "Initializing or re-initializing the device to realtime offset: "
1449  << device_to_realtime.count() << " ns";
1450  device_to_realtime_offset_ = device_to_realtime;
1451  }
1452  else
1453  {
1454  // Low-pass filter!
1455  constexpr double alpha = 0.10;
1456 
1457  const std::chrono::nanoseconds timeCorrection(static_cast<int64_t>(
1458  std::floor(alpha * (device_to_realtime - device_to_realtime_offset_).count())));
1459  device_to_realtime_offset_ = device_to_realtime_offset_ + timeCorrection;
1460 
1461  {
1462  std::lock_guard g{debugObserverMtx};
1463  setDebugObserverDatafield("timeCorrection [µs]",
1464  timeCorrection.count() / 1'000); // [ns] -> [µs]
1465  }
1466  }
1467  }
1468  }
1469 
1470  armarx::DateTime
1471  AzureKinectPointCloudProvider::timestampToArmarX(
1472  const std::chrono::microseconds& k4a_timestamp_us)
1473  {
1474  std::lock_guard g{deviceToRealtimeOffsetMtx};
1475 
1476  // must be initialized beforehand
1477  ARMARX_CHECK(device_to_realtime_offset_.count() != 0);
1478 
1479  std::chrono::nanoseconds timestamp_in_realtime =
1480  k4a_timestamp_us + device_to_realtime_offset_;
1481 
1482  return armarx::Duration::MicroSeconds(timestamp_in_realtime.count() /
1483  1'000); // [ns] -> [µs]
1484  }
1485 
1486  void
1488  const std::chrono::microseconds& k4a_device_timestamp_us)
1489  {
1490  std::lock_guard g{deviceToRealtimeOffsetMtx};
1491 
1492  // We have no better guess than "now".
1493  std::chrono::nanoseconds realtime_clock =
1494  std::chrono::system_clock::now().time_since_epoch();
1495 
1496  device_to_realtime_offset_ = realtime_clock - k4a_device_timestamp_us;
1497 
1498  ARMARX_INFO << "Initializing the device to realtime offset based on wall clock: "
1499  << device_to_realtime_offset_.count() << " ns";
1500  }
1501 
1503  {
1504  addPlugin(heartbeatPlugin);
1505  }
1506 
1507  void
1509  {
1510  if (std::abs(std::fmod(higherFramerate, value)) > 1e-6)
1511  {
1512  std::stringstream ss;
1513  ss << "Invalid value (" << value << " fps) for framerate of '" << name << "'."
1514  << " Reason: The framerate has to be a divider of the property framerate.image ("
1515  << higherFramerate << " fps).";
1516  throw armarx::LocalException() << ss.str();
1517  }
1518  skipFrames = std::round(higherFramerate / value) - 1;
1519  if (skipFrames != 0)
1520  {
1521  ARMARX_INFO_S << "Only publishing every " << (skipFrames + 1) << "'th " << name
1522  << " result!";
1523  }
1524  }
1525 
1526  bool
1528  {
1529  bool skip = false;
1530  if (skipFrames > 0)
1531  {
1532  skip = skipFramesCount;
1533  skipFramesCount++;
1534  skipFramesCount %= (skipFrames + 1);
1535  }
1536  return skip;
1537  }
1538 
1539 } // namespace visionx
armarx::plugins::HeartbeatComponentPlugin::heartbeat
void heartbeat()
Sends out a heartbeat using the default config.
Definition: HeartbeatComponentPlugin.cpp:80
visionx::AzureKinectPointCloudProviderPropertyDefinitions
Definition: AzureKinectPointCloudProvider.h:73
armarx::Variant
The Variant class is described here: Variants.
Definition: Variant.h:224
ARMARX_VERBOSE
#define ARMARX_VERBOSE
Definition: Logging.h:180
Eigen
Definition: Elements.h:36
visionx::PointCloudProvider::onConnectComponent
void onConnectComponent() override
Definition: PointCloudProvider.cpp:77
visionx::AzureKinectPointCloudProvider::GetDepthDimensions
static std::pair< int, int > GetDepthDimensions(const k4a_depth_mode_t depth_mode)
Returns the dimension of the depth images that will be produced for a certain resolution.
Definition: AzureKinectPointCloudProvider.h:223
armarx::plugins::HeartbeatComponentPlugin::signUp
void signUp(const std::string &channelName="", const std::vector< std::string > &aliases={}, const std::string &description="")
register component to heartbeat
Definition: HeartbeatComponentPlugin.cpp:14
armarx::human::pose::model::k4a_bt_body_32::Joints
Joints
Joints with index as defined in the body model.
Definition: k4a_bt_body_32.h:39
armarx::ManagedIceObject::setMetaInfo
void setMetaInfo(const std::string &id, const VariantBasePtr &value)
Allows to set meta information that can be queried live via Ice interface on the ArmarXManager.
Definition: ManagedIceObject.cpp:749
LocalException.h
visionx
ArmarX headers.
Definition: OpenPoseStressTest.h:38
visionx::AzureKinectPointCloudProvider::Framerate::Subordinate::update
void update(int higherFramerate)
Definition: AzureKinectPointCloudProvider.cpp:1508
AzureKinectPointCloudProvider.h
index
uint8_t index
Definition: EtherCATFrame.h:59
StopWatch.h
visionx::AzureKinectPointCloudProvider::doCapture
bool doCapture() override
Main capturing function.
Definition: AzureKinectPointCloudProvider.cpp:595
armarx::DebugObserverComponentPluginUser::setDebugObserverBatchModeEnabled
void setDebugObserverBatchModeEnabled(bool enable)
Definition: DebugObserverComponentPlugin.cpp:118
visionx::AzureKinectPointCloudProvider::getStereoCalibration
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1238
ARMARX_CHECK_NOT_NULL
#define ARMARX_CHECK_NOT_NULL(ptr)
This macro evaluates whether ptr is not null and if it turns out to be false it will throw an Express...
Definition: ExpressionException.h:206
visionx::CapturingPointCloudProvider::setPointCloudSyncMode
void setPointCloudSyncMode(ImageSyncMode pointCloudSyncMode)
Sets the point cloud synchronization mode.
Definition: CapturingPointCloudProvider.cpp:190
visionx::AzureKinectPointCloudProvider::VersionToString
static std::string VersionToString(const k4a_version_t &version)
Creates a string from a k4a_version_t.
Definition: AzureKinectPointCloudProvider.h:249
armarx::ManagedIceObject::getArmarXManager
ArmarXManagerPtr getArmarXManager() const
Returns the ArmarX manager used to add and remove components.
Definition: ManagedIceObject.cpp:348
trace.h
armarx::armem::human::PoseKeypoint::label
std::string label
Definition: types.h:19
visionx::imrec::convert_rgb2cbi
void convert_rgb2cbi(const cv::Mat &in, CByteImage &out)
Converts an OpenCV RGB Mat to IVT's CByteImage.
Definition: helper.cpp:65
visionx::ImageProvider::onExitComponent
void onExitComponent() override
Definition: ImageProvider.cpp:152
c
constexpr T c
Definition: UnscentedKalmanFilterTest.cpp:43
visionx::AzureKinectPointCloudProvider::initializeTimestampOffset
void initializeTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us)
Definition: AzureKinectPointCloudProvider.cpp:1487
armarx::ManagedIceObject::addPlugin
PluginT * addPlugin(const std::string prefix="", ParamsT &&...params)
Definition: ManagedIceObject.h:182
visionx::AzureKinectPointCloudProvider::GetColorDimensions
static std::pair< int, int > GetColorDimensions(const k4a_color_resolution_t resolution)
Returns the dimension of the color images that will be produced for a certain resolution.
Definition: AzureKinectPointCloudProvider.h:195
visionx::ImageProvider::setImageFormat
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
Definition: ImageProvider.cpp:275
visionx::AzureKinectPointCloudProvider::getImageRecordingChannelPreferences
std::vector< imrec::ChannelPreferences > getImageRecordingChannelPreferences(const Ice::Current &) override
Definition: AzureKinectPointCloudProvider.cpp:1335
visionx::AzureKinectPointCloudProvider::updateTimestampOffset
void updateTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us, const std::chrono::nanoseconds &k4a_system_timestamp_ns)
Definition: AzureKinectPointCloudProvider.cpp:1405
visionx::AzureKinectPointCloudProvider::createSwCallback
std::function< void(armarx::Duration)> createSwCallback(const std::string &description)
Definition: AzureKinectPointCloudProvider.cpp:149
visionx::ImageProvider::getImageFormat
ImageFormatInfo getImageFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the entire image format info struct via Ice.
Definition: ImageProvider.cpp:75
armarx::RunningTask
Definition: ArmarXMultipleObjectsScheduler.h:35
armarx::core::time::Clock
Clock to get date/time from a specific clock type or wait for certain durations or until certain date...
Definition: Clock.h:26
k4a_bt_body_32.h
ARMARX_CHECK
#define ARMARX_CHECK(expression)
Shortcut for ARMARX_CHECK_EXPRESSION.
Definition: ExpressionException.h:82
visionx::tools::createByteImage
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.
visionx::PointCloudProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: PointCloudProvider.cpp:95
visionx::PointCloudProvider::onExitComponent
void onExitComponent() override
Definition: PointCloudProvider.cpp:106
armarx::TimeMode::SystemTime
@ SystemTime
visionx::ImageProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: ImageProvider.cpp:141
visionx::tools::convert
CByteImage::ImageType convert(const ImageType visionxImageType)
Converts a VisionX image type into an image type of IVT's ByteImage.
Definition: TypeMapping.cpp:95
visionx::AzureKinectPointCloudProvider::setWidthBodyTracking
void setWidthBodyTracking(int minXinPixel, int maxXinPixel, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1392
visionx::CapturingPointCloudProvider::capture
virtual void capture()
Definition: CapturingPointCloudProvider.cpp:147
visionx::AzureKinectPointCloudProvider::onStopCapture
void onStopCapture() override
This is called when the point cloud provider capturing has been stopped.
Definition: AzureKinectPointCloudProvider.cpp:586
armarx::armem::human::HumanPose::poseModelId
std::string poseModelId
Definition: types.h:32
armarx::GetHandledExceptionString
std::string GetHandledExceptionString()
Definition: Exception.cpp:147
ARMARX_TRACE
#define ARMARX_TRACE
Definition: trace.h:69
armarx::status
status
Definition: FiniteStateMachine.h:259
GfxTL::Identity
void Identity(MatrixXX< N, N, T > *a)
Definition: MatrixXX.h:523
visionx::CapturingPointCloudProvider::captureEnabled
std::atomic_bool captureEnabled
Indicates that capturing is enabled and running.
Definition: CapturingPointCloudProvider.h:224
armarx::ctrlutil::a
double a(double t, double a0, double j)
Definition: CtrlUtil.h:45
armarx::armem::human::PoseKeypoint
Definition: types.h:17
cxxopts::value
std::shared_ptr< Value > value()
Definition: cxxopts.hpp:926
visionx::imrec::convert
void convert(const CByteImage &in, cv::Mat &out)
Converts an IVT CByteImage to OpenCV's BGR Mat.
Definition: helper.cpp:41
types.h
armarx::abs
std::vector< T > abs(const std::vector< T > &v)
Definition: VectorHelpers.h:253
armarx::human::pose::model::k4a_bt_body_32::JointNames
const simox::meta::EnumNames< Joints > JointNames
Names of the joints as defined in the body model.
Definition: k4a_bt_body_32.h:78
visionx::AzureKinectPointCloudProvider::timestampToArmarX
armarx::DateTime timestampToArmarX(const std::chrono::microseconds &k4a_timestamp_us)
Definition: AzureKinectPointCloudProvider.cpp:1471
if
if(!yyvaluep)
Definition: Grammar.cpp:724
FramedPose.h
visionx::CapturingPointCloudProviderPropertyDefinitions
Definition: CapturingPointCloudProvider.h:38
visionx::PointCloudProvider::getPointCloudFormat
MetaPointCloudFormatPtr getPointCloudFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the point cloud format info struct via Ice.
Definition: PointCloudProvider.cpp:46
visionx::AzureKinectPointCloudProvider::runPointcloudPublishing
void runPointcloudPublishing()
Definition: AzureKinectPointCloudProvider.cpp:1081
ARMARX_DEBUG
#define ARMARX_DEBUG
Definition: Logging.h:177
visionx::tools::depthValueToRGB
void depthValueToRGB(unsigned int depthInMM, unsigned char &r, unsigned char &g, unsigned char &b, bool noiseResistant=false)
Definition: ImageUtil.h:176
visionx::AzureKinectPointCloudProvider::onExitCapturingPointCloudProvider
void onExitCapturingPointCloudProvider() override
This is called when the Component::onExitComponent() setup is called.
Definition: AzureKinectPointCloudProvider.cpp:418
armarx::aron::input
ReaderT::InputType & input
Definition: rw.h:19
visionx::AzureKinectPointCloudProviderPropertyDefinitions::AzureKinectPointCloudProviderPropertyDefinitions
AzureKinectPointCloudProviderPropertyDefinitions(std::string prefix)
Definition: AzureKinectPointCloudProvider.cpp:167
armarx::armem::human::HumanPose::cameraFrameName
std::string cameraFrameName
Definition: types.h:38
armarx::armem::server::ltm::mongodb::constantes::TIMESTAMP
const std::string TIMESTAMP
Definition: operations.h:16
visionx::AzureKinectPointCloudProvider::setMaxDepthBodyTracking
void setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1385
visionx::AzureKinectPointCloudProvider::getReferenceFrame
std::string getReferenceFrame(const Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1329
visionx::PointCloudProvider::onInitComponent
void onInitComponent() override
Definition: PointCloudProvider.cpp:59
visionx::AzureKinectPointCloudProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: AzureKinectPointCloudProvider.cpp:1222
visionx::AzureKinectPointCloudProvider::AzureKinectPointCloudProvider
AzureKinectPointCloudProvider()
Definition: AzureKinectPointCloudProvider.cpp:1502
Metronome.h
max
T max(T t1, T t2)
Definition: gdiam.h:48
ARMARX_ERROR
#define ARMARX_ERROR
Definition: Logging.h:189
armarx::FramedOrientation
The FramedOrientation class.
Definition: FramedPose.h:199
visionx::AzureKinectPointCloudProvider::getImagesAreUndistorted
bool getImagesAreUndistorted(const ::Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1323
visionx::ImageProvider::provideImages
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
Definition: ImageProvider.cpp:319
armarx::armem::human::HumanPose
Definition: types.h:30
visionx::AzureKinectPointCloudProvider::onDisconnectImageProvider
void onDisconnectImageProvider() override
Definition: AzureKinectPointCloudProvider.cpp:384
armarx::armem::Time
armarx::core::time::DateTime Time
Definition: forward_declarations.h:13
armarx::to_string
const std::string & to_string(const std::string &s)
Definition: StringHelpers.h:40
forward_declarations.h
visionx::AzureKinectPointCloudProvider::onInitCapturingPointCloudProvider
void onInitCapturingPointCloudProvider() override
This is called when the Component::onInitComponent() is called.
Definition: AzureKinectPointCloudProvider.cpp:411
armarx::armem::human::HumanPose::humanTrackingId
std::optional< std::string > humanTrackingId
Definition: types.h:36
armarx::human::pose::model::k4a_bt_body_32::ModelId
const std::string ModelId
Definition: k4a_bt_body_32.h:34
visionx::AzureKinectPointCloudProvider::createPropertyDefinitions
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
Definition: AzureKinectPointCloudProvider.cpp:207
visionx::ImageProvider::setNumberImages
void setNumberImages(int numberImages)
Sets the number of images on each capture.
Definition: ImageProvider.cpp:313
armarx::TimeUtil::GetTime
static IceUtil::Time GetTime(TimeMode timeMode=TimeMode::VirtualTime)
Get the current time.
Definition: TimeUtil.cpp:42
armarx::core::time::DateTime::toMilliSecondsSinceEpoch
std::int64_t toMilliSecondsSinceEpoch() const
Definition: DateTime.cpp:102
armarx::core::time::DateTime
Represents a point in time.
Definition: DateTime.h:24
armarx::Component::getConfigIdentifier
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition: Component.cpp:74
visionx::AzureKinectPointCloudProvider::onInitComponent
void onInitComponent() override
Pure virtual hook for the subclass.
Definition: AzureKinectPointCloudProvider.cpp:1208
armarx::TimeUtil::GetTimeSince
static IceUtil::Time GetTimeSince(IceUtil::Time referenceTime, TimeMode timeMode=TimeMode::VirtualTime)
Get the difference between the current time and a reference time.
Definition: TimeUtil.cpp:66
visionx::AzureKinectPointCloudProvider::enableHumanPoseEstimation
void enableHumanPoseEstimation(const armarx::EnableHumanPoseEstimationInput &input, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1351
ARMARX_CHECK_EXPRESSION
#define ARMARX_CHECK_EXPRESSION(expression)
This macro evaluates the expression and if it turns out to be false it will throw an ExpressionExcept...
Definition: ExpressionException.h:73
visionx::PointCloudProvider::providePointCloud
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
Definition: PointCloudProvider.h:122
visionx::ImageProvider::onConnectComponent
void onConnectComponent() override
Definition: ImageProvider.cpp:104
TimeUtil.h
ARMARX_INFO
#define ARMARX_INFO
Definition: Logging.h:174
std
Definition: Application.h:66
armarx::core::time::ScopedStopWatch
Measures the time this stop watch was inside the current scope.
Definition: ScopedStopWatch.h:32
HumanPoseWriter.h
armarx::Quaternion< float, 0 >
visionx::AzureKinectPointCloudProvider::onConnectImageProvider
void onConnectImageProvider() override
This is called when the Component::onConnectComponent() setup is called.
Definition: AzureKinectPointCloudProvider.cpp:348
armarx::DebugObserverComponentPluginUser::sendDebugObserverBatch
void sendDebugObserverBatch()
Definition: DebugObserverComponentPlugin.cpp:122
IceUtil::Handle< class PropertyDefinitionContainer >
armarx::core::time::Metronome
Simple rate limiter for use in loops to maintain a certain frequency given a clock.
Definition: Metronome.h:35
visionx::AzureKinectPointCloudProvider::Framerate::Subordinate::skip
bool skip()
Definition: AzureKinectPointCloudProvider.cpp:1527
visionx::AzureKinectPointCloudProvider::getDefaultName
std::string getDefaultName() const override
Definition: AzureKinectPointCloudProvider.cpp:1202
visionx::ImageProvider::onInitComponent
void onInitComponent() override
Definition: ImageProvider.cpp:88
visionx::tools::convertEigenMatToVisionX
visionx::types::Mat convertEigenMatToVisionX(Eigen::MatrixXf m)
Definition: TypeMapping.cpp:677
armarx::ArmarXDataPath::ReplaceEnvVars
static bool ReplaceEnvVars(std::string &string)
ReplaceEnvVars replaces environment variables in a string with their values, if the env.
Definition: ArmarXDataPath.cpp:483
armarx::core::time::Duration
Represents a duration.
Definition: Duration.h:17
ImageUtil.h
visionx::AzureKinectPointCloudProvider::onExitComponent
void onExitComponent() override
Hook for subclass.
Definition: AzureKinectPointCloudProvider.cpp:1231
armarx::core::time::Clock::Now
static DateTime Now()
Current time on the virtual clock.
Definition: Clock.cpp:97
armarx::Logging::deactivateSpam
SpamFilterDataPtr deactivateSpam(float deactivationDurationSec=10.0f, const std::string &identifier="", bool deactivate=true) const
disables the logging for the current line for the given amount of seconds.
Definition: Logging.cpp:92
Eigen::Matrix
Definition: EigenForwardDeclarations.h:27
armarx::ManagedIceObject::getName
std::string getName() const
Retrieve name of object.
Definition: ManagedIceObject.cpp:107
armarx::RunningTask::pointer_type
IceUtil::Handle< RunningTask< T > > pointer_type
Shared pointer type for convenience.
Definition: RunningTask.h:178
visionx::AzureKinectPointCloudProvider::onStartCapture
void onStartCapture(float frames_per_second) override
This is called when the point cloud provider capturing has been started.
Definition: AzureKinectPointCloudProvider.cpp:426
armarx::core::time::StopWatch
Measures the passed time between the construction or calling reset() and stop().
Definition: StopWatch.h:42
TypeMapping.h
armarx::core::time::Frequency::Hertz
static Frequency Hertz(std::int64_t hertz)
Definition: Frequency.cpp:23
ARMARX_INFO_S
#define ARMARX_INFO_S
Definition: Logging.h:195
armarx::ArmarXDataPath::getAbsolutePath
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
Definition: ArmarXDataPath.cpp:111
armarx::armem::human::HumanPose::keypoints
KeyPointMap keypoints
Definition: types.h:35
Logging.h
armarx::VariantType::FramedPosition
const VariantTypeId FramedPosition
Definition: FramedPose.h:39
ARMARX_CHECK_EQUAL
#define ARMARX_CHECK_EQUAL(lhs, rhs)
This macro evaluates whether lhs is equal (==) rhs and if it turns out to be false it will throw an E...
Definition: ExpressionException.h:130
ARMARX_WARNING
#define ARMARX_WARNING
Definition: Logging.h:186
ScopedStopWatch.h
helper.h
armarx::core::time::Duration::MicroSeconds
static Duration MicroSeconds(std::int64_t microSeconds)
Constructs a duration in microseconds.
Definition: Duration.cpp:27
armarx::PropertyDefinitionBase::eModifiable
@ eModifiable
Definition: PropertyDefinitionInterface.h:57
Variant.h
stopwatch::Clock
std::chrono::system_clock Clock
Definition: Stopwatch.h:10
visionx::AzureKinectPointCloudProvider::onInitImageProvider
void onInitImageProvider() override
This is called when the Component::onInitComponent() is called.
Definition: AzureKinectPointCloudProvider.cpp:278
armarx::core::time::Clock::now
DateTime now() const
Current date/time of the clock.
Definition: Clock.cpp:23
armarx::core::time::Duration::MilliSeconds
static Duration MilliSeconds(std::int64_t milliSeconds)
Constructs a duration in milliseconds.
Definition: Duration.cpp:55
armarx::DebugObserverComponentPluginUser::setDebugObserverDatafield
void setDebugObserverDatafield(Ts &&...ts) const
Definition: DebugObserverComponentPlugin.h:97
visionx::AzureKinectPointCloudProvider::onConnectComponent
void onConnectComponent() override
Pure virtual hook for the subclass.
Definition: AzureKinectPointCloudProvider.cpp:1215