AzureKinectPointCloudProvider.cpp
Go to the documentation of this file.
2 
3 #include <cstdio>
4 #include <filesystem>
5 #include <functional>
6 #include <mutex>
7 #include <utility>
8 
9 #include <opencv2/core/hal/interface.h>
10 #include <opencv2/imgproc/imgproc.hpp>
11 
12 #include <IceUtil/Time.h>
13 
23 
25 
31 
32 #include <Calibration/Calibration.h>
33 #include <Image/ImageProcessor.h>
34 
35 #ifdef INCLUDE_BODY_TRACKING
37 #endif
38 
39 
40 namespace
41 {
42  /**
43  * @brief Converts a k4a image to an IVT image.
44  * @param color_image Reference to a k4a image containing the color data that should be converted.
45  * @param result IVT image the converted image should be stored in. The size of this image and the color_image need to match.
46  */
47  [[maybe_unused]] void
48  k4aToIvtImage(const k4a::image& color_image, ::CByteImage& result)
49  {
50  if (color_image.get_format() == K4A_IMAGE_FORMAT_COLOR_YUY2)
51  {
52  // Convert YUY2 to RGB using OpenCV
53  cv::Mat yuy2_image(color_image.get_height_pixels(),
54  color_image.get_width_pixels(),
55  CV_8UC2,
56  const_cast<uint8_t*>(color_image.get_buffer()));
57  cv::Mat rgb_image;
58  cv::cvtColor(yuy2_image, rgb_image, cv::COLOR_YUV2RGB_YUY2);
59 
60  // Convert the OpenCV Mat to the IVT CByteImage format
61  visionx::imrec::convert(rgb_image, result);
62  }
63  else if (color_image.get_format() == K4A_IMAGE_FORMAT_COLOR_BGRA32)
64  {
65  auto cw = static_cast<unsigned int>(color_image.get_width_pixels());
66  auto ch = static_cast<unsigned int>(color_image.get_height_pixels());
67  ARMARX_CHECK_EQUAL(static_cast<unsigned int>(result.width), cw);
68  ARMARX_CHECK_EQUAL(static_cast<unsigned int>(result.height), ch);
69 
70  auto color_buffer = color_image.get_buffer();
71  auto rgb_buffer_ivt = result.pixels;
72 
73  // Index in the IVT image. This value will be increased by 3 per pixel.
74  int index_ivt = 0;
75 
76  // Index in the k4a image. This value increases by 4 per pixel.
77  int index_k4a = 0;
78 
79  for (unsigned int y = 0; y < ch; ++y)
80  {
81  for (unsigned int x = 0; x < cw; ++x)
82  {
83  rgb_buffer_ivt[index_ivt] = color_buffer[index_k4a + 2];
84  rgb_buffer_ivt[index_ivt + 1] = color_buffer[index_k4a + 1];
85  rgb_buffer_ivt[index_ivt + 2] = color_buffer[index_k4a + 0];
86  index_ivt += 3;
87  index_k4a += 4;
88  }
89  }
90  }
91  else
92  {
93  throw std::runtime_error("Unsupported color format in k4a image");
94  }
95  }
96 
97 
98 #ifdef INCLUDE_BODY_TRACKING
99  void
100  printBodyInformation(k4abt_body_t body)
101  {
102  ARMARX_VERBOSE << "Body ID: " << body.id;
103  for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
104  {
105  const k4a_float3_t position = body.skeleton.joints[i].position;
106  const k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
107  const k4abt_joint_confidence_level_t confidence_level =
108  body.skeleton.joints[i].confidence_level;
109 
110  ARMARX_VERBOSE << "Joint" << i << ": "
111  << "Position[mm] (" << position.v[0] << "," << position.v[1] << ","
112  << position.v[2] << "); "
113  << "Orientation (" << orientation.v[0] << "," << orientation.v[1] << ","
114  << orientation.v[2] << "," << orientation.v[3] << "); "
115  << "Confidence Level " << confidence_level;
116  }
117  }
118 #endif
119 
120  const char*
121  k4aImageFormatToString(k4a_image_format_t format)
122  {
123  switch (format)
124  {
125  case K4A_IMAGE_FORMAT_COLOR_MJPG:
126  return "COLOR_MJPG";
127  case K4A_IMAGE_FORMAT_COLOR_NV12:
128  return "COLOR_NV12";
129  case K4A_IMAGE_FORMAT_COLOR_YUY2:
130  return "COLOR_YUY2";
131  case K4A_IMAGE_FORMAT_COLOR_BGRA32:
132  return "COLOR_BGRA32";
133  case K4A_IMAGE_FORMAT_DEPTH16:
134  return "DEPTH16";
135  case K4A_IMAGE_FORMAT_IR16:
136  return "IR16";
137  case K4A_IMAGE_FORMAT_CUSTOM:
138  return "CUSTOM";
139  default:
140  return "UNKNOWN";
141  }
142  }
143 
144 } // namespace
145 
146 namespace visionx
147 {
148 
149  std::function<void(armarx::Duration)>
150  AzureKinectPointCloudProvider::createSwCallback(const std::string& description)
151  {
152  return [=](armarx::Duration duration)
153  {
154  std::string name = "duration " + description + " in [ms]";
155  {
156  std::lock_guard g{metaInfoMtx};
157  setMetaInfo(name, new armarx::Variant{duration.toMilliSecondsDouble()});
158  }
159 
160  {
161  std::lock_guard g{debugObserverMtx};
162  setDebugObserverDatafield(name, duration.toMilliSecondsDouble());
163  }
164  };
165  }
166 
170  {
171  defineOptionalProperty<k4a_color_resolution_t>(
172  "ColorResolution", K4A_COLOR_RESOLUTION_720P, "Resolution of the RGB camera image.")
173  .map("0", K4A_COLOR_RESOLUTION_OFF) /** Color camera will be turned off */
174  .map("720", K4A_COLOR_RESOLUTION_720P) /** 1280x720 16:9 */
175  .map("1080", K4A_COLOR_RESOLUTION_1080P) /** 1920x1080 16:9 */
176  .map("1440", K4A_COLOR_RESOLUTION_1440P) /** 2560x1440 16:9 */
177  .map("1536", K4A_COLOR_RESOLUTION_1536P) /** 2048x1536 4:3 */
178  .map("2160", K4A_COLOR_RESOLUTION_2160P) /** 3840x2160 16:9 */
179  .map("3072", K4A_COLOR_RESOLUTION_3072P); /** 4096x3072 4:3 */
180  defineOptionalProperty<k4a_depth_mode_t>(
181  "DepthMode", K4A_DEPTH_MODE_NFOV_UNBINNED, "Resolution/mode of the depth camera image.")
182  .setCaseInsensitive(true)
183  .map("OFF", K4A_DEPTH_MODE_OFF)
184  .map("NFOV_2X2BINNED", K4A_DEPTH_MODE_NFOV_2X2BINNED)
185  .map("NFOV_UNBINNED", K4A_DEPTH_MODE_NFOV_UNBINNED)
186  .map("WFOV_2X2BINNED", K4A_DEPTH_MODE_WFOV_2X2BINNED)
187  .map("WFOV_UNBINNED", K4A_DEPTH_MODE_WFOV_UNBINNED)
188  .map("PASSIVE_IR", K4A_DEPTH_MODE_PASSIVE_IR);
189 
190  defineOptionalProperty<float>("MaxDepth",
191  6000.0,
192  "Max. allowed depth value in mm. Depth values above this "
193  "threshold will be set to nan.");
194 
195  defineOptionalProperty<float>("CaptureTimeOffset",
196  16.0f,
197  "In Milliseconds. Time offset between capturing the image on "
198  "the hardware and receiving the image in this process.",
200 
201  defineOptionalProperty<k4a_image_format_t>(
202  "ColorFormat", K4A_IMAGE_FORMAT_COLOR_BGRA32, "Color format of the RGB camera image.")
203  .map("BGRA", K4A_IMAGE_FORMAT_COLOR_BGRA32)
204  .map("YUY2", K4A_IMAGE_FORMAT_COLOR_YUY2);
205  }
206 
209  {
212 
213  defs->optional(
214  enableColorUndistortion,
215  "EnableColorUndistortion",
216  "Undistort the color images using the full 8 radial and tangential distortion "
217  "parameters provided by the Azure Kinect.\n"
218  "This can help for processing tasks which cannot handle radial parameters k3-k6.\n"
219  "Note that this drastically reduces the FPS (to something like 3).");
220  defs->optional(externalCalibrationFilePath,
221  "ExternalCalibrationFilePath",
222  "Path to an optional external"
223  " calibration file, which has a"
224  " camera matrix and distortion"
225  " parameters.");
226  defs->optional(mDeviceId, "device_id", "ID of the device.");
227 
228  defs->optional(robotName, "robotName");
229  defs->optional(bodyCameraFrameName, "bodyCameraFrameName");
230  defs->optional(framerate.value,
231  "framerate.images",
232  "The framerate of RGB-D images [frames per second]."
233  "\nNote that the point cloud and body tracking frame rates are controlled by"
234  " the properties 'framerate' (point cloud) and 'framerate.bodyTracking'"
235  " (body tracking), respectively.")
236  .setMin(5)
237  .setMax(30);
238 
239 #ifdef INCLUDE_BODY_TRACKING
240  defs->optional(bodyTrackingEnabled,
241  "bodyTrackingEnabled",
242  "Whether the Azure Kinect Body Tracking SDK should be enabled or not.");
243  defs->optional(bodyTrackingRunAtStart,
244  "bodyTrackingRunAtStart",
245  "Whether the Azure Kinect Body Tracking SDK should directly run when the "
246  "component is startet."
247  "Otherwise it has to be activated by the ice interface.");
248  defs->optional(bodyTrackingModelFilename,
249  "bodyTrackingModelPath",
250  "Path where the .onnx DNN files can be found");
251  defs->optional(bodyTrackingGPUDeviceID, "bodyTrackingGPUDeviceID", "GPU Device ID.");
252  defs->optional(bodyTrackingTemporalSmoothingFactor,
253  "bodyTrackingTemporalSmoothingFactor",
254  "Temporal smoothing factor for Azure Kinect body tracking.");
255 
256  defs->optional(bodyTrackingDepthMaskMinX, "bodyTrackingDepthMaskMinX");
257  defs->optional(bodyTrackingDepthMaskMaxX, "bodyTrackingDepthMaskMaxX");
258  defs->optional(bodyTrackingDepthMaskMaxZ, "bodyTrackingDepthMaskMaxZ");
259  defs->optional(framerate.bodyTracking.value,
260  "framerate.bodyTracking",
261  "The framerate with with the body tracking is run [frames per second]."
262  "\nNote that the RGB-D image and point cloud frame rates are controlled by"
263  " the properties 'framerate.image' (RGB-D images) and 'framerate'"
264  " (point cloud), respectively.")
265  .setMin(1.)
266  .setMax(30.);
267 
268  defs->optional(startIMU, "startIMU");
269 
270  humanPoseWriter.registerPropertyDefinitions(defs);
271 #endif
272 
273  defs->optional(enableHeartbeat, "enableHeartbeat");
274 
275  return defs;
276  }
277 
278  void
280  {
281  config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
282  if (framerate.value == 5)
283  {
284  config.camera_fps = K4A_FRAMES_PER_SECOND_5;
285  }
286  else if (framerate.value == 15)
287  {
288  config.camera_fps = K4A_FRAMES_PER_SECOND_15;
289  }
290  else if (framerate.value == 30)
291  {
292  config.camera_fps = K4A_FRAMES_PER_SECOND_30;
293  }
294  else
295  {
296  throw armarx::LocalException("Invalid image framerate (property 'framerate.images'): ")
297  << framerate.value << ". Only framerates 5, 15 and 30 are "
298  << "supported by Azure Kinect.";
299  }
300 
301  framerate.pointCloud.value =
302  getProperty<float>("framerate"); // point cloud provider framerate
303  framerate.pointCloud.update(framerate.value);
304 
305 #ifdef INCLUDE_BODY_TRACKING
306  framerate.bodyTracking.update(framerate.value);
307 #endif
308 
309  config.depth_mode = getProperty<k4a_depth_mode_t>("DepthMode");
310  config.color_format = getProperty<k4a_image_format_t>("ColorFormat");
311  config.color_resolution = getProperty<k4a_color_resolution_t>("ColorResolution");
312 
313  // This means that we'll only get captures that have both color and depth images, so we don't
314  // need to check if the capture contains a particular type of image.
315  config.synchronized_images_only = true;
316 
317  // Set number of images per frame.
318  setNumberImages(2);
319 
320  auto depth_dim = GetDepthDimensions(config.depth_mode);
321  auto color_dim = GetColorDimensions(config.color_resolution);
322 
323  ARMARX_INFO << "Depth image size: " << depth_dim.first << "x" << depth_dim.second;
324  ARMARX_INFO << "Color image size: " << color_dim.first << "x" << color_dim.second;
325 
326  alignedDepthImage =
327  k4a::image::create(K4A_IMAGE_FORMAT_DEPTH16,
328  color_dim.first,
329  color_dim.second,
330  color_dim.first * 2 * static_cast<int32_t>(sizeof(uint8_t)));
331 
332  setImageFormat(visionx::ImageDimension(color_dim.first, color_dim.second),
333  visionx::eRgb,
334  visionx::eBayerPatternGr);
335 
336  resultDepthImage.reset(visionx::tools::createByteImage(getImageFormat(), visionx::eRgb));
337  resultColorImage =
338  std::make_unique<CByteImage>(color_dim.first, color_dim.second, CByteImage::eRGB24);
339 
340  xyzImage = k4a::image::create(K4A_IMAGE_FORMAT_CUSTOM,
341  color_dim.first,
342  color_dim.second,
343  color_dim.first * 3 * static_cast<int32_t>(sizeof(int16_t)));
344 
345  pointcloud = std::make_unique<pcl::PointCloud<CloudPointType>>();
346  }
347 
348  void
350  {
351  ARMARX_DEBUG << "Connecting " << getName();
352 
354 
358  pointcloudTask->start();
359 
360 #ifdef INCLUDE_BODY_TRACKING
361  if (bodyTrackingEnabled)
362  {
363  ARMARX_INFO << "Connecting to human memory ...";
364  humanPoseWriter.connect(memoryNameSystem());
365  ARMARX_INFO << "Connected to human memory.";
366 
367  bodyTrackingPublishTask = new armarx::RunningTask<AzureKinectPointCloudProvider>(
368  this, &AzureKinectPointCloudProvider::runPublishBodyTrackingResults);
369  }
370 #endif
371 
372  if (enableHeartbeat)
373  {
374  ARMARX_CHECK_NOT_NULL(heartbeatPlugin);
375  heartbeatPlugin->signUp(armarx::Duration::MilliSeconds(500),
377  {"Vision", "Camera"},
378  "AzureKinectPointCloudProvider");
379  }
380 
381  ARMARX_VERBOSE << "Connected " << getName();
382  }
383 
384  void
386  {
387  ARMARX_DEBUG << "Disconnecting " << getName();
388 
389  // Stop task
390  {
391  std::lock_guard<std::mutex> lock(pointcloudProcMutex);
392  ARMARX_DEBUG << "Stopping pointcloud processing thread...";
393  const bool WAIT_FOR_JOIN = false;
394  pointcloudTask->stop(WAIT_FOR_JOIN);
395  pointcloudProcSignal.notify_all();
396  ARMARX_DEBUG << "Waiting for pointcloud processing thread to stop...";
397  pointcloudTask->waitForStop();
398  ARMARX_DEBUG << "Pointcloud processing thread stopped";
399  }
400 
401 #ifdef INCLUDE_BODY_TRACKING
402  if (bodyTrackingIsRunning)
403  {
404  bodyTrackingPublishTask->stop();
405  }
406 #endif
407 
408  ARMARX_DEBUG << "Disconnected " << getName();
409  }
410 
411  void
413  {
414  this->setPointCloudSyncMode(eCaptureSynchronization);
415  ARMARX_TRACE;
416  }
417 
418  void
420  {
421  ARMARX_INFO << "Closing Azure Kinect device";
422  device.close();
423  ARMARX_INFO << "Closed Azure Kinect device";
424  }
425 
426  void
428  {
429  ARMARX_TRACE;
430 
431  cloudFormat = getPointCloudFormat();
432 
433  // Check for devices.
434  const uint32_t DEVICE_COUNT = k4a::device::get_installed_count();
435  if (DEVICE_COUNT == 0)
436  {
437  getArmarXManager()->asyncShutdown();
438  throw armarx::LocalException("No Azure Kinect devices detected!");
439  }
440 
441  ARMARX_TRACE;
442 
443  device = k4a::device::open(static_cast<uint32_t>(mDeviceId));
444  ARMARX_DEBUG << "Opened device id #" << mDeviceId << " with serial number "
445  << device.get_serialnum() << ".";
446 
447  ARMARX_TRACE;
448 
449  auto depthDim = GetDepthDimensions(config.depth_mode);
450  auto colorDim = GetColorDimensions(config.color_resolution);
451  (void)depthDim;
452  (void)colorDim;
453 
454  k4aCalibration = device.get_calibration(config.depth_mode, config.color_resolution);
456  << "Color camera calibration:"
457  << "\n"
458  << "cx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cx
459  << "\n"
460  << "cy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cy
461  << "\n"
462  << "fx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fx
463  << "\n"
464  << "fy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fy
465  << "\n"
466  << "k1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k1
467  << "\n"
468  << "k2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k2
469  << "\n"
470  << "p1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p1
471  << "\n"
472  << "p2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p2
473  << "\n"
474  << "k3: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k3
475  << "\n"
476  << "k4: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k4
477  << "\n"
478  << "k5: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k5
479  << "\n"
480  << "k6: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k6;
481 
482 
483  auto c = getStereoCalibration(Ice::Current());
484  CCalibration* c_left = tools::convert(c.calibrationLeft);
485  c_left->PrintCameraParameters();
486  delete c_left;
487  CCalibration* c_right = tools::convert(c.calibrationRight);
488  c_right->PrintCameraParameters();
489  delete c_right;
490 
491  transformation = k4a::transformation(k4aCalibration);
492 
493 #ifdef INCLUDE_BODY_TRACKING
494  if (bodyTrackingEnabled)
495  {
496  // eventually, resolve environment variable
497  armarx::ArmarXDataPath::ReplaceEnvVars(bodyTrackingModelFilename);
498 
499  const bool found = armarx::ArmarXDataPath::getAbsolutePath(bodyTrackingModelFilename,
500  bodyTrackingModelFilename);
501  ARMARX_CHECK(found) << "Body tracking DNN model could not be found/resolved at `"
502  << bodyTrackingModelFilename << "`.";
503 
504  ARMARX_INFO << "Using body tracking DNN model from directory `"
505  << bodyTrackingModelFilename << "`.";
506 
507  ARMARX_CHECK(std::filesystem::exists(bodyTrackingModelFilename))
508  << "The path `" << bodyTrackingModelFilename << "` does not exist!";
509 
510  k4abt_tracker_configuration_t const bodyTrackingConfig{
511  .sensor_orientation = K4ABT_SENSOR_ORIENTATION_DEFAULT,
512  .processing_mode = K4ABT_TRACKER_PROCESSING_MODE_GPU_CUDA,
513  .gpu_device_id = bodyTrackingGPUDeviceID,
514  .model_path = bodyTrackingModelFilename.c_str()};
515 
516  bodyTracker = k4abt::tracker::create(k4aCalibration, bodyTrackingConfig);
517  bodyTracker.set_temporal_smoothing(bodyTrackingTemporalSmoothingFactor);
518 
519  if (bodyTrackingRunAtStart and not bodyTrackingIsRunning)
520  {
521  bodyTrackingIsRunning = true;
522  bodyTrackingPublishTask->start();
523  }
524  }
525 #endif
526 
527  ARMARX_TRACE;
528  device.set_color_control(K4A_COLOR_CONTROL_BRIGHTNESS, K4A_COLOR_CONTROL_MODE_MANUAL, 128);
529 
530  device.start_cameras(&config);
531 
532  ARMARX_TRACE;
533 
534  if (startIMU)
535  {
536  device.start_imu();
537  ARMARX_INFO << "IMU is active" << std::endl;
538 
539  ARMARX_TRACE;
540  }
541 
542  setMetaInfo("serialNumber", new armarx::Variant(device.get_serialnum()));
543  setMetaInfo("rgbVersion", new armarx::Variant(VersionToString(device.get_version().rgb)));
544  setMetaInfo("depthVersion",
545  new armarx::Variant(VersionToString(device.get_version().depth)));
546  setMetaInfo("depthSensorVersion",
547  new armarx::Variant(VersionToString(device.get_version().depth_sensor)));
548  setMetaInfo("audioVersion",
549  new armarx::Variant(VersionToString(device.get_version().audio)));
550 
551  ARMARX_TRACE;
552 
553 
554  // Color image calibration
555  {
556  // Load intrinsics from camera
557  const k4a_calibration_camera_t calibration{k4aCalibration.color_camera_calibration};
558  const k4a_calibration_intrinsic_parameters_t::_param param =
559  calibration.intrinsics.parameters.param;
560 
561  // // Scale intrinsics according to image scale
562  // param.fx *= image_scale_;
563  // param.fy *= image_scale_;
564 
565  // Calculate distortion map
566  cv::Mat1f camera_matrix(3, 3);
567  camera_matrix << param.fx, 0, param.cx, 0, param.fy, param.cy, 0, 0, 1;
568  cv::Mat1f new_camera_matrix(3, 3);
569  new_camera_matrix << param.fx, 0, param.cx, 0, param.fx, param.cy, 0, 0, 1;
570  cv::Mat1f distortion_coeff(1, 8);
571  distortion_coeff << param.k1, param.k2, param.p1, param.p2, param.k3, param.k4,
572  param.k5, param.k6;
573  cv::Mat map1, map2, map3;
574  cv::initUndistortRectifyMap(
575  camera_matrix,
576  distortion_coeff,
577  cv::Mat{},
578  new_camera_matrix,
579  cv::Size{calibration.resolution_width, calibration.resolution_height},
580  CV_32FC1,
581  map1,
582  map2);
583  cv::convertMaps(map1, map2, colorDistortionMap, map3, CV_16SC2, true);
584  }
585  }
586 
587  void
589  {
590  if (startIMU)
591  {
592  device.stop_imu();
593  }
594  }
595 
596  bool
598  {
601 
602  ScopedStopWatch sw_total{createSwCallback("doCapture")};
603 
604  k4a::capture capture;
605  const std::chrono::milliseconds TIMEOUT{1000};
606 
607  StopWatch sw_get_capture;
608  bool status = false;
609  try
610  {
611  ARMARX_DEBUG << "Try capture.";
612  status = device.get_capture(&capture, TIMEOUT);
613  ARMARX_DEBUG << "Got capture.";
614  }
615  catch (const std::exception&)
616  {
617  ARMARX_WARNING << "Failed to get capture from device (#" << ++mDiagnostics.num_crashes
618  << "). Restarting camera.";
619  StopWatch sw;
620  device.stop_cameras();
621  device.start_cameras(&config);
622  ARMARX_INFO << "Restarting took " << sw.stop() << ".";
623  return false;
624  }
625 
626  if (status)
627  {
628  createSwCallback("waiting for get_capture")(sw_get_capture.stop());
629 
630  {
631  std::lock_guard g{metaInfoMtx};
632  setMetaInfo("temperature", new armarx::Variant(capture.get_temperature_c()));
633  }
634 
635  // see ROS: This will give INCORRECT timestamps until the first image.
636  const bool mustInitializeTimestampOffset = [&]()
637  {
638  std::lock_guard g{deviceToRealtimeOffsetMtx};
639  return device_to_realtime_offset_.count() == 0;
640  }();
641  if (mustInitializeTimestampOffset)
642  {
643  initializeTimestampOffset(capture.get_depth_image().get_device_timestamp());
644  }
645 
646  // next, we update the timestamp offset continuously
647  updateTimestampOffset(capture.get_ir_image().get_device_timestamp(),
648  capture.get_ir_image().get_system_timestamp());
649 
650  const k4a::image DEPTH_IMAGE = capture.get_depth_image();
651 
652  // This function assumes that the image is made of depth pixels (i.e. uint16_t's),
653  // which is only true for IR/depth images.
654  const k4a_image_format_t IMAGE_FORMAT = DEPTH_IMAGE.get_format();
655  if (IMAGE_FORMAT != K4A_IMAGE_FORMAT_DEPTH16 && IMAGE_FORMAT != K4A_IMAGE_FORMAT_IR16)
656  {
657  const char* format_str = ::k4aImageFormatToString(IMAGE_FORMAT);
658  std::stringstream error_msg;
659  error_msg << "Attempted to colorize a non-depth image with format: " << format_str;
660  throw std::logic_error(error_msg.str());
661  }
662 
663  // Provide data for pointcloud processing thread and signal to start processing.
664  if (not framerate.pointCloud.skip())
665  {
666  ScopedStopWatch sw{createSwCallback("transform depth image to camera")};
667  // Acquire lock and write data needed by pointcloud thread (i.e.,
668  // alignedDepthImageScaled and depthImageReady).
669  {
670  std::lock_guard lock{pointcloudProcMutex};
671  transformation.depth_image_to_color_camera(DEPTH_IMAGE, &alignedDepthImage);
672 
673  // Signal that point cloud processing may proceed and reset processed flag.
674  depthImageReady = true;
675  }
676 
677  ARMARX_DEBUG << "Notifying pointcloud thread.";
678  pointcloudProcSignal.notify_one();
679  }
680 
681 #ifdef INCLUDE_BODY_TRACKING
682  if (bodyTrackingIsRunning)
683  {
684  ARMARX_DEBUG << "body Tracking Is Running.";
685  std::scoped_lock lock(bodyTrackingParameterMutex);
686 
687  if (not framerate.bodyTracking.skip())
688  {
689  k4a::image ir_image = capture.get_ir_image();
690  std::uint8_t* ir_image_buffer = ir_image.get_buffer();
691 
692  k4a::image depth_image = capture.get_depth_image();
693  std::uint8_t* depth_image_buffer = depth_image.get_buffer();
694 
695  ARMARX_CHECK_EQUAL(ir_image.get_width_pixels(), depth_image.get_width_pixels());
696  ARMARX_CHECK_EQUAL(ir_image.get_height_pixels(),
697  depth_image.get_height_pixels());
698 
699  const int stride = ir_image.get_stride_bytes() / ir_image.get_width_pixels();
700 
701  for (int x = 0; x < ir_image.get_width_pixels(); x++)
702  {
703  for (int y = 0; y < ir_image.get_height_pixels(); y++)
704  {
705  const int i = (y * ir_image.get_width_pixels() * stride) + (x * stride);
706  const int z = (static_cast<int>(depth_image_buffer[i])) +
707  (static_cast<int>(depth_image_buffer[i + 1]) << 8);
708 
709  if ((bodyTrackingDepthMaskMinX > 0 and x < bodyTrackingDepthMaskMinX) or
710  (bodyTrackingDepthMaskMaxX > 0 and x > bodyTrackingDepthMaskMaxX) or
711  (bodyTrackingDepthMaskMaxZ > 0 and z > bodyTrackingDepthMaskMaxZ))
712  {
713  ir_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
714  ir_image_buffer[i + 1] = std::numeric_limits<std::uint8_t>::max();
715  depth_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
716  depth_image_buffer[i + 1] =
718  }
719  }
720  }
721 
722  if (not bodyTracker.enqueue_capture(capture))
723  {
724  ARMARX_WARNING << "Add capture to tracker process queue timeout";
725  }
726  }
727  }
728 #endif
729 
730  const k4a::image COLOR_IMAGE = capture.get_color_image();
731  ARMARX_DEBUG << "Got COLOR image.";
732 
733  auto real_time = IceUtil::Time::now();
734  auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
735  auto clock_diff = real_time - monotonic_time;
736 
737  auto image_monotonic_time =
738  IceUtil::Time::microSeconds(std::chrono::duration_cast<std::chrono::microseconds>(
739  DEPTH_IMAGE.get_system_timestamp())
740  .count());
741  long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
742 
743  imagesTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
744 
745  {
746  std::lock_guard g{metaInfoMtx};
747  setMetaInfo("image age in [ms]",
748  new armarx::Variant{(real_time - imagesTime).toMilliSecondsDouble()});
749  }
750 
751  {
752  std::lock_guard g{debugObserverMtx};
753  setDebugObserverDatafield("image age in [ms]",
754  (real_time - imagesTime).toMilliSecondsDouble());
755  }
756 
757  if (enableColorUndistortion)
758  {
759  // only the color image needs to be rectified
760  cv::Mat tmp_rgb_image;
761 
762  if (COLOR_IMAGE.get_format() == K4A_IMAGE_FORMAT_COLOR_YUY2)
763  {
764  ARMARX_DEBUG << "Converting YUY2 image.";
765  cv::Mat yuy2_image(COLOR_IMAGE.get_height_pixels(),
766  COLOR_IMAGE.get_width_pixels(),
767  CV_8UC2,
768  const_cast<uint8_t*>(COLOR_IMAGE.get_buffer()));
769 
770  cv::cvtColor(yuy2_image, tmp_rgb_image, cv::COLOR_YUV2RGB_YUY2);
771  }
772  else if (COLOR_IMAGE.get_format() == K4A_IMAGE_FORMAT_COLOR_BGRA32)
773  {
774  cv::cvtColor(cv::Mat{cv::Size{COLOR_IMAGE.get_width_pixels(),
775  COLOR_IMAGE.get_height_pixels()},
776  CV_8UC4,
777  (void*)COLOR_IMAGE.get_buffer(),
778  cv::Mat::AUTO_STEP},
779  tmp_rgb_image,
780  cv::COLOR_BGRA2RGB);
781  }
782  else
783  {
784  throw std::runtime_error("Unsupported color format in k4a image");
785  }
786 
787  cv::Mat cv_color_image_undistorted(
788  COLOR_IMAGE.get_width_pixels(), COLOR_IMAGE.get_height_pixels(), CV_8UC3);
789 
790  cv::remap(tmp_rgb_image,
791  cv_color_image_undistorted,
792  colorDistortionMap,
793  cv::Mat{},
794  cv::INTER_NEAREST,
795  cv::BORDER_CONSTANT);
796 
797  visionx::imrec::convert_rgb2cbi(cv_color_image_undistorted, *resultColorImage);
798  }
799  else
800  {
801  // Convert the k4a image to an IVT image.
802  ScopedStopWatch sw{createSwCallback("convert k4a image to IVT")};
803  ::k4aToIvtImage(COLOR_IMAGE, *resultColorImage);
804  }
805 
806 
807  // Prepare result depth image.
808  {
809  ScopedStopWatch sw{createSwCallback("prepare result depth image")};
810 
811  const int DW = alignedDepthImage.get_width_pixels();
812  const int DH = alignedDepthImage.get_height_pixels();
813 
814  ARMARX_CHECK_EXPRESSION(resultDepthImage);
815  ARMARX_CHECK_EQUAL(resultDepthImage->width, DW);
816  ARMARX_CHECK_EQUAL(resultDepthImage->height, DH);
817 
818  auto result_depth_buffer = resultDepthImage->pixels;
819  const auto* depth_buffer =
820  reinterpret_cast<const uint16_t*>(alignedDepthImage.get_buffer());
821 
822  int index = 0;
823  int index_2 = 0;
824  for (int y = 0; y < DH; ++y)
825  {
826  for (int x = 0; x < DW; ++x)
827  {
828  uint16_t depth_value = depth_buffer[index_2];
830  result_depth_buffer[index],
831  result_depth_buffer[index + 1],
832  result_depth_buffer[index + 2]);
833  index += 3;
834  index_2 += 1;
835  }
836  }
837  }
838 
839  // Broadcast RGB-D image.
840  {
841  ScopedStopWatch sw{createSwCallback("broadcast RGB-D image")};
842  CByteImage* images[2] = {resultColorImage.get(), resultDepthImage.get()};
843  provideImages(images, imagesTime);
844  }
845 
846  // Wait until `alignedDepthImage` was processed and can be overridden again.
847  {
848  std::unique_lock signal_lock{pointcloudProcMutex};
849  ARMARX_DEBUG << "Capturing thread waiting for signal...";
850  pointcloudProcSignal.wait(signal_lock, [&] { return depthImageProcessed; });
851  ARMARX_DEBUG << "Capturing thread received signal.";
852  }
853 
854  {
855  std::lock_guard g{debugObserverMtx};
857  }
858 
859  if (enableHeartbeat)
860  {
861  heartbeatPlugin->heartbeat();
862  }
863 
864  // return true;
865  }
866  else
867  {
868  ARMARX_INFO << deactivateSpam(30) << "Did not get frame until timeout of " << TIMEOUT;
869 
870  {
871  std::lock_guard g{debugObserverMtx};
873  }
874 
875  return false;
876  }
877 
878  if (startIMU)
879  {
880  try
881  {
882  Eigen::Vector3f accMean = Eigen::Vector3f::Zero();
883  std::size_t cnt = 0;
884 
885  k4a_imu_sample_t imu_sample;
886  while (device.get_imu_sample(&imu_sample, std::chrono::milliseconds(0)))
887  {
888  const Eigen::Vector3f acceleration{imu_sample.acc_sample.xyz.x,
889  imu_sample.acc_sample.xyz.y,
890  imu_sample.acc_sample.xyz.z};
891 
892  accMean += acceleration;
893  cnt++;
894  }
895 
896  accMean /= static_cast<float>(cnt);
897 
898  ARMARX_INFO << "Got IMU sample";
899 
900  {
901  std::lock_guard g{debugObserverMtx};
902 
903  setDebugObserverDatafield("acceleration.x", accMean.x());
904  setDebugObserverDatafield("acceleration.y", accMean.y());
905  setDebugObserverDatafield("acceleration.z", accMean.z());
906 
908  }
909  }
910  catch (const std::exception&)
911  {
912  ARMARX_WARNING << "Failed to get imu samples from device (#"
913  << ++mDiagnostics.num_crashes << ").";
914  }
915  }
916 
917  return true;
918  }
919 
920 #ifdef INCLUDE_BODY_TRACKING
921  void
922  AzureKinectPointCloudProvider::runPublishBodyTrackingResults()
923  {
925  while (bodyTrackingIsRunning)
926  {
927 
928  // body frames might just not be available.
929  const k4abt::frame body_frame = [&]() -> k4abt::frame
930  {
931  try
932  {
933  auto result = bodyTracker.pop_result();
934  return result;
935  }
936  catch (...)
937  {
938  ARMARX_VERBOSE << deactivateSpam(1) << "Exception in body tracking publishing: "
940  return {};
941  }
942  }();
943 
944  if (body_frame != nullptr)
945  {
947  createSwCallback("publish body tracking result")};
948  // see https://github.com/microsoft/Azure_Kinect_ROS_Driver/blob/melodic/src/k4a_ros_device.cpp
950  timestampToArmarX(body_frame.get_device_timestamp());
951 
952  {
953  auto real_time = IceUtil::Time::now();
954  auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
955  auto clock_diff = real_time - monotonic_time;
956 
957  auto image_monotonic_time = IceUtil::Time::microSeconds(
958  std::chrono::duration_cast<std::chrono::microseconds>(
959  body_frame.get_system_timestamp())
960  .count());
961  // long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
962 
963  // IceUtil::Time imageTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
964  IceUtil::Time imageTime = image_monotonic_time + clock_diff;
965 
966  {
967  std::lock_guard g{debugObserverMtx};
968  setDebugObserverDatafield("ros_vs_ice_timestamp [µs]",
969  imageTime.toMicroSeconds());
970  }
971  }
972 
973  {
974  const armarx::Clock realtimeClock = armarx::Clock(armarx::ClockType::Realtime);
975  const armarx::Clock monotonicClock =
976  armarx::Clock(armarx::ClockType::Monotonic);
977 
978  auto real_time = realtimeClock.now();
979  auto monotonic_time = monotonicClock.now();
980  auto clock_diff = real_time - monotonic_time;
981 
982  auto image_monotonic_time = armarx::Duration::MicroSeconds(
983  std::chrono::duration_cast<std::chrono::microseconds>(
984  body_frame.get_system_timestamp())
985  .count());
986  // long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
987 
988  // IceUtil::Time imageTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
989  auto imageTime = image_monotonic_time + clock_diff;
990 
991  armarx::DateTime imageTimestamp = armarx::DateTime(imageTime);
992 
993  {
994  std::lock_guard g{debugObserverMtx};
995  setDebugObserverDatafield("ros_vs_armarx_timestamp [µs]",
996  imageTime.toMicroSeconds());
997  }
998  }
999 
1000 
1001  std::uint32_t num_bodies = body_frame.get_num_bodies();
1002  {
1003  std::lock_guard g{debugObserverMtx};
1004  setDebugObserverDatafield("n_bodies_detected", num_bodies);
1005  }
1006 
1007  std::vector<armarx::armem::human::HumanPose> humanPoses;
1008  humanPoses.reserve(num_bodies);
1009 
1010  for (std::uint32_t i = 0; i < num_bodies; i++)
1011  {
1012  k4abt_body_t body = body_frame.get_body(i);
1013  printBodyInformation(body);
1014 
1016  humanPose.humanTrackingId = std::to_string(body.id);
1017  humanPose.cameraFrameName = bodyCameraFrameName;
1019 
1020  for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
1021  {
1022  const auto joint =
1024  const auto name =
1026 
1027  k4a_float3_t position = body.skeleton.joints[i].position;
1028  k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
1029  k4abt_joint_confidence_level_t confidence_level =
1030  body.skeleton.joints[i].confidence_level;
1031 
1033  .label = name,
1034  .confidence = static_cast<float>(static_cast<int>(confidence_level)),
1035  .positionCamera = armarx::FramedPosition(
1036  Eigen::Vector3f{position.v[0], position.v[1], position.v[2]},
1037  bodyCameraFrameName,
1038  robotName),
1039  .orientationCamera =
1040  armarx::FramedOrientation(Eigen::Quaternionf(orientation.wxyz.w,
1041  orientation.wxyz.x,
1042  orientation.wxyz.y,
1043  orientation.wxyz.z)
1044  .toRotationMatrix(),
1045  bodyCameraFrameName,
1046  robotName)};
1047  }
1048 
1049  humanPoses.push_back(humanPose);
1050  }
1051 
1052  {
1053  std::lock_guard g{debugObserverMtx};
1054  setDebugObserverDatafield("bodyTrackingCaptureUntilPublish [ms]",
1055  (armarx::Clock::Now() - timestamp).toMilliSeconds());
1056  }
1057 
1058 
1059  {
1060  ARMARX_TRACE;
1061  ARMARX_INFO << deactivateSpam(1) << "committing human poses";
1062  humanPoseWriter.commitHumanPosesInCameraFrame(humanPoses, getName(), timestamp);
1063  }
1064 
1065 
1066  // k4a::image body_index_map = body_frame.get_body_index_map();
1067  // if (body_index_map != nullptr)
1068  // {
1069  // //print_body_index_map_middle_line(body_index_map);
1070  // }
1071  // else
1072  // {
1073  // ARMARX_WARNING << "Error: Failed to generate bodyindex map!";
1074  // }
1075  }
1076  else
1077  {
1078  // It should never hit timeout when K4A_WAIT_INFINITE is set.
1079  ARMARX_VERBOSE << "Error! Pop body frame result time out!";
1080  }
1081 
1082  metronome.waitForNextTick();
1083  }
1084  }
1085 #endif
1086 
1087  void
1089  {
1090  ARMARX_DEBUG << "Started pointcloud processing task.";
1091 
1092  IceUtil::Time time;
1093 
1094  // Main pointcloud processing loop.
1095  while (not pointcloudTask->isStopped())
1096  {
1097  // Wait for data.
1098  std::unique_lock signal_lock{pointcloudProcMutex};
1099  ARMARX_DEBUG << "Pointcloud thread waiting for signal...";
1100  pointcloudProcSignal.wait(
1101  signal_lock, [&] { return pointcloudTask->isStopped() or depthImageReady; });
1102  ARMARX_DEBUG << "Pointcloud thread received signal.";
1103 
1104  // Assess timings, reset flags.
1105  const IceUtil::Time TIMESTAMP = imagesTime;
1106  depthImageReady = false;
1107  depthImageProcessed = false;
1108 
1109  // Transform depth image to pointcloud.
1110  {
1112  transformation.depth_image_to_point_cloud(
1113  alignedDepthImage, K4A_CALIBRATION_TYPE_COLOR, &xyzImage);
1114  ARMARX_DEBUG << "Transforming depth image to point cloud took "
1116  }
1117 
1118  // Construct PCL pointcloud.
1119  {
1121 
1122  ARMARX_CHECK_EXPRESSION(pointcloud);
1123 
1124  pointcloud->width = static_cast<uint32_t>(xyzImage.get_width_pixels());
1125  pointcloud->height = static_cast<uint32_t>(xyzImage.get_height_pixels());
1126 
1127  ARMARX_TRACE;
1128 
1129  pointcloud->is_dense = false;
1130  pointcloud->points.resize(pointcloud->width * pointcloud->height);
1131 
1132  auto k4a_cloud_buffer = reinterpret_cast<const int16_t*>(xyzImage.get_buffer());
1133 
1134  ARMARX_CHECK_EXPRESSION(k4a_cloud_buffer);
1135 
1136  unsigned char* color_buffer = resultColorImage->pixels;
1137  while (color_buffer == nullptr)
1138  {
1139  ARMARX_INFO << "color_buffer is null. This should never happen. There is "
1140  "probably a race condition somewhere that needs to be fixed. "
1141  "Temporarily we ignore this and continue.\n Timestamp: "
1143  color_buffer = resultColorImage->pixels;
1144  }
1145  ARMARX_CHECK_NOT_NULL(color_buffer);
1146 
1147  ARMARX_CHECK_EQUAL(xyzImage.get_width_pixels(), resultColorImage->width);
1148  ARMARX_CHECK_EQUAL(xyzImage.get_height_pixels(), resultColorImage->height);
1149  ARMARX_CHECK_EQUAL(pointcloud->points.size(),
1150  pointcloud->width * pointcloud->height);
1151 
1152  ARMARX_TRACE;
1153 
1154  size_t index = 0;
1155  float max_depth = getProperty<float>("MaxDepth").getValue();
1156  for (auto& p : pointcloud->points)
1157  {
1158  p.r = color_buffer[index];
1159  p.x = k4a_cloud_buffer[index];
1160 
1161  index++;
1162 
1163  p.g = color_buffer[index];
1164  p.y = k4a_cloud_buffer[index];
1165 
1166  index++;
1167 
1168  p.b = color_buffer[index];
1169  auto z = k4a_cloud_buffer[index];
1170 
1171  index++;
1172 
1173  if (z <= max_depth and z != 0)
1174  {
1175  p.z = z;
1176  }
1177  else
1178  {
1179  p.z = std::numeric_limits<float>::quiet_NaN();
1180  }
1181  }
1182 
1183  ARMARX_DEBUG << "Constructing point cloud took "
1185  }
1186 
1187  // Notify capturing thread that data was processed and may be overridden.
1188  depthImageProcessed = true;
1189  signal_lock.unlock();
1190  ARMARX_DEBUG << "Notifying capturing thread...";
1191  pointcloudProcSignal.notify_all();
1192 
1193  ARMARX_TRACE;
1194 
1195  // Broadcast PCL pointcloud.
1196  {
1198  pointcloud->header.stamp = static_cast<unsigned long>(TIMESTAMP.toMicroSeconds());
1199  providePointCloud(pointcloud);
1200  ARMARX_DEBUG << "Broadcasting pointcloud took "
1202  }
1203  }
1204 
1205  ARMARX_DEBUG << "Stopped pointcloud processing task.";
1206  }
1207 
1208  std::string
1210  {
1211  return "AzureKinectPointCloudProvider";
1212  }
1213 
1214  void
1216  {
1219  }
1220 
1221  void
1223  {
1226  }
1227 
1228  void
1230  {
1231  captureEnabled = false;
1232 
1235  }
1236 
1237  void
1239  {
1242  }
1243 
1244  StereoCalibration
1246  {
1247  using namespace Eigen;
1248  using Matrix3FRowMajor = Matrix<float, 3, 3, StorageOptions::RowMajor>;
1250 
1251  // TODO: use the externally used cameraMatrix and distCoeffs
1252  const auto convert_calibration =
1253  [](const k4a_calibration_camera_t& k4a_calib, float scale = 1.f)
1254  {
1255  MonocularCalibration monocular_calibration;
1256 
1257  const k4a_calibration_intrinsic_parameters_t& params = k4a_calib.intrinsics.parameters;
1258  monocular_calibration.cameraParam.principalPoint = {params.param.cx * scale,
1259  params.param.cy * scale};
1260  monocular_calibration.cameraParam.focalLength = {params.param.fx * scale,
1261  params.param.fy * scale};
1262  // TODO: Figure out convertions. IVT (Calibration.h) expects 4 parameters:
1263  // - The first radial lens distortion parameter.
1264  // - The second radial lens distortion parameter.
1265  // - The first tangential lens distortion parameter.
1266  // - The second tangential lens distortion parameter.
1267  // However, the Kinect offers k1-k6 radial distortion coefficients and 2 p1-p2
1268  // tangential distortion parameters, which means that k3-k6 are unused.
1269  // It is even unclear whether this is correct now, as previously it was a vector of all
1270  // 6 k-params, which resulted in failed assertions in TypeMapping.cpp in the function
1271  // CStereoCalibration* visionx::tools::convert(const visionx::StereoCalibration& stereoCalibration)
1272  // lin 314 at time of this commit.
1273  // See: https://microsoft.github.io/Azure-Kinect-Sensor-SDK/master/structk4a__calibration__intrinsic__parameters__t_1_1__param.html
1274  monocular_calibration.cameraParam.distortion = {
1275  params.param.k1,
1276  params.param.k2,
1277  params.param.p1,
1278  params.param.p2,
1279  };
1280 
1281  // TODO this needs to be scaled! Or why shouldn't it ?
1282  monocular_calibration.cameraParam.width =
1283  std::floor(float(k4a_calib.resolution_width) * scale);
1284  monocular_calibration.cameraParam.height =
1285  std::floor(float(k4a_calib.resolution_height) * scale);
1286 
1287  const Matrix3FRowMajor rotation =
1288  Map<const Matrix3FRowMajor>{k4a_calib.extrinsics.rotation};
1289 
1290  monocular_calibration.cameraParam.rotation = convertEigenMatToVisionX(rotation);
1291  monocular_calibration.cameraParam.translation = {k4a_calib.extrinsics.translation,
1292  k4a_calib.extrinsics.translation + 3};
1293 
1294  return monocular_calibration;
1295  };
1296 
1297  StereoCalibration stereo_calibration;
1298 
1299  stereo_calibration.calibrationLeft =
1300  convert_calibration(k4aCalibration.color_camera_calibration);
1301 
1302  // if the camera images are rectified, the distortion params are 0
1303  if (enableColorUndistortion)
1304  {
1305  auto& colorDistortionParams = stereo_calibration.calibrationLeft.cameraParam.distortion;
1306  std::fill(colorDistortionParams.begin(), colorDistortionParams.end(), 0);
1307  }
1308 
1309  // the depth image is rectified by default
1310  {
1311  // The depth image has been warped to to color camera frame. See depth_image_to_color_camera() above.
1312  // Therefore we use the calibration of the color camera (focal length etc) and set the distortion parameters to 0.
1313  stereo_calibration.calibrationRight =
1314  convert_calibration(k4aCalibration.color_camera_calibration);
1315 
1316  auto& depthDistortionParams =
1317  stereo_calibration.calibrationRight.cameraParam.distortion;
1318  std::fill(depthDistortionParams.begin(), depthDistortionParams.end(), 0);
1319  }
1320 
1321  stereo_calibration.rectificationHomographyLeft =
1323  stereo_calibration.rectificationHomographyRight =
1325 
1326  return stereo_calibration;
1327  }
1328 
1329  bool
1331  {
1332  return enableColorUndistortion;
1333  }
1334 
1335  std::string
1337  {
1338  return getProperty<std::string>("frameName");
1339  }
1340 
1341  std::vector<imrec::ChannelPreferences>
1343  {
1344  ARMARX_TRACE;
1345 
1346  imrec::ChannelPreferences rgb;
1347  rgb.requiresLossless = false;
1348  rgb.name = "rgb";
1349 
1350  imrec::ChannelPreferences depth;
1351  depth.requiresLossless = true;
1352  depth.name = "depth";
1353 
1354  return {rgb, depth};
1355  }
1356 
1357  void
1359  const armarx::EnableHumanPoseEstimationInput& input,
1360  const Ice::Current&)
1361  {
1362 #ifndef INCLUDE_BODY_TRACKING
1363  {
1364  ARMARX_ERROR << "INCLUDE_BODY_TRACKING is not defined.";
1365  return;
1366  }
1367 #endif
1368 
1369 #ifdef INCLUDE_BODY_TRACKING
1370  if (bodyTrackingEnabled)
1371  {
1372  // should not require a mutex
1373  if (not bodyTrackingIsRunning and input.enable3d)
1374  {
1375  bodyTrackingIsRunning = true;
1376  bodyTrackingPublishTask->start();
1377  }
1378  else if (bodyTrackingIsRunning and not input.enable3d)
1379  {
1380  bodyTrackingIsRunning = false;
1381  bodyTrackingPublishTask->stop();
1382  }
1383  }
1384  else
1385  {
1386  ARMARX_ERROR << "Azure Kinect Body Tracking is not enabled";
1387  }
1388 #endif
1389  }
1390 
1391  void
1392  AzureKinectPointCloudProvider::setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current&)
1393  {
1394  std::scoped_lock lock(bodyTrackingParameterMutex);
1395  bodyTrackingDepthMaskMaxZ = maxDepthInMM;
1396  }
1397 
1398  void
1400  int maxXinPixel,
1401  const Ice::Current&)
1402  {
1403  std::scoped_lock lock(bodyTrackingParameterMutex);
1404  bodyTrackingDepthMaskMinX = minXinPixel;
1405  bodyTrackingDepthMaskMaxX = maxXinPixel;
1406  }
1407 
1408  // the code below is taken from https://github.com/microsoft/Azure_Kinect_ROS_Driver
1409  // -> MIT license
1410 
1411  void
1413  const std::chrono::microseconds& k4a_device_timestamp_us,
1414  const std::chrono::nanoseconds& k4a_system_timestamp_ns)
1415  {
1416  // System timestamp is on monotonic system clock.
1417  // Device time is on AKDK hardware clock.
1418  // We want to continuously estimate diff between realtime and AKDK hardware clock as low-pass offset.
1419  // This consists of two parts: device to monotonic, and monotonic to realtime.
1420 
1421  // First figure out realtime to monotonic offset. This will change to keep updating it.
1422  std::chrono::nanoseconds realtime_clock =
1423  std::chrono::system_clock::now().time_since_epoch();
1424  std::chrono::nanoseconds monotonic_clock =
1425  std::chrono::steady_clock::now().time_since_epoch();
1426 
1427  std::chrono::nanoseconds monotonic_to_realtime = realtime_clock - monotonic_clock;
1428 
1429  // Next figure out the other part (combined).
1430  std::chrono::nanoseconds device_to_realtime =
1431  k4a_system_timestamp_ns - k4a_device_timestamp_us + monotonic_to_realtime;
1432 
1433  {
1434  std::lock_guard g{deviceToRealtimeOffsetMtx};
1435 
1436  {
1437  std::lock_guard g{debugObserverMtx};
1438  setDebugObserverDatafield("device_to_realtime_offset [ms]",
1439  device_to_realtime_offset_.count() /
1440  1'000'000.f); // [ns] -> [ms]
1442  "clock_error",
1443  std::abs<float>((device_to_realtime_offset_ - device_to_realtime).count()) /
1444  1'000.f); // [ns] -> [µs]
1445  }
1446 
1447  const std::int64_t timeOffsetThreshold = 1e7; // 10 ms
1448 
1449  // If we're over a certain time off, just snap into place.
1450  if (device_to_realtime_offset_.count() == 0 ||
1451  std::abs((device_to_realtime_offset_ - device_to_realtime).count()) >
1452  timeOffsetThreshold)
1453  {
1454  ARMARX_VERBOSE << deactivateSpam()
1455  << "Initializing or re-initializing the device to realtime offset: "
1456  << device_to_realtime.count() << " ns";
1457  device_to_realtime_offset_ = device_to_realtime;
1458  }
1459  else
1460  {
1461  // Low-pass filter!
1462  constexpr double alpha = 0.10;
1463 
1464  const std::chrono::nanoseconds timeCorrection(static_cast<int64_t>(
1465  std::floor(alpha * (device_to_realtime - device_to_realtime_offset_).count())));
1466  device_to_realtime_offset_ = device_to_realtime_offset_ + timeCorrection;
1467 
1468  {
1469  std::lock_guard g{debugObserverMtx};
1470  setDebugObserverDatafield("timeCorrection [µs]",
1471  timeCorrection.count() / 1'000); // [ns] -> [µs]
1472  }
1473  }
1474  }
1475  }
1476 
1477  armarx::DateTime
1478  AzureKinectPointCloudProvider::timestampToArmarX(
1479  const std::chrono::microseconds& k4a_timestamp_us)
1480  {
1481  std::lock_guard g{deviceToRealtimeOffsetMtx};
1482 
1483  // must be initialized beforehand
1484  ARMARX_CHECK(device_to_realtime_offset_.count() != 0);
1485 
1486  std::chrono::nanoseconds timestamp_in_realtime =
1487  k4a_timestamp_us + device_to_realtime_offset_;
1488 
1489  return armarx::Duration::MicroSeconds(timestamp_in_realtime.count() /
1490  1'000); // [ns] -> [µs]
1491  }
1492 
1493  void
1495  const std::chrono::microseconds& k4a_device_timestamp_us)
1496  {
1497  std::lock_guard g{deviceToRealtimeOffsetMtx};
1498 
1499  // We have no better guess than "now".
1500  std::chrono::nanoseconds realtime_clock =
1501  std::chrono::system_clock::now().time_since_epoch();
1502 
1503  device_to_realtime_offset_ = realtime_clock - k4a_device_timestamp_us;
1504 
1505  ARMARX_INFO << "Initializing the device to realtime offset based on wall clock: "
1506  << device_to_realtime_offset_.count() << " ns";
1507  }
1508 
1510  {
1511  addPlugin(heartbeatPlugin);
1512  }
1513 
1514  void
1516  {
1517  if (std::abs(std::fmod(higherFramerate, value)) > 1e-6)
1518  {
1519  std::stringstream ss;
1520  ss << "Invalid value (" << value << " fps) for framerate of '" << name << "'."
1521  << " Reason: The framerate has to be a divider of the property framerate.image ("
1522  << higherFramerate << " fps).";
1523  throw armarx::LocalException() << ss.str();
1524  }
1525  skipFrames = std::round(higherFramerate / value) - 1;
1526  if (skipFrames != 0)
1527  {
1528  ARMARX_INFO_S << "Only publishing every " << (skipFrames + 1) << "'th " << name
1529  << " result!";
1530  }
1531  }
1532 
1533  bool
1535  {
1536  bool skip = false;
1537  if (skipFrames > 0)
1538  {
1539  skip = skipFramesCount;
1540  skipFramesCount++;
1541  skipFramesCount %= (skipFrames + 1);
1542  }
1543  return skip;
1544  }
1545 
1546 } // namespace visionx
armarx::plugins::HeartbeatComponentPlugin::heartbeat
void heartbeat()
Sends out a heartbeat using the default config.
Definition: HeartbeatComponentPlugin.cpp:80
visionx::AzureKinectPointCloudProviderPropertyDefinitions
Definition: AzureKinectPointCloudProvider.h:73
armarx::Variant
The Variant class is described here: Variants.
Definition: Variant.h:223
ARMARX_VERBOSE
#define ARMARX_VERBOSE
Definition: Logging.h:187
Eigen
Definition: Elements.h:32
visionx::PointCloudProvider::onConnectComponent
void onConnectComponent() override
Definition: PointCloudProvider.cpp:80
visionx::AzureKinectPointCloudProvider::GetDepthDimensions
static std::pair< int, int > GetDepthDimensions(const k4a_depth_mode_t depth_mode)
Returns the dimension of the depth images that will be produced for a certain resolution.
Definition: AzureKinectPointCloudProvider.h:223
armarx::plugins::HeartbeatComponentPlugin::signUp
void signUp(const std::string &channelName="", const std::vector< std::string > &aliases={}, const std::string &description="")
register component to heartbeat
Definition: HeartbeatComponentPlugin.cpp:14
armarx::human::pose::model::k4a_bt_body_32::Joints
Joints
Joints with index as defined in the body model.
Definition: k4a_bt_body_32.h:38
armarx::ManagedIceObject::setMetaInfo
void setMetaInfo(const std::string &id, const VariantBasePtr &value)
Allows to set meta information that can be queried live via Ice interface on the ArmarXManager.
Definition: ManagedIceObject.cpp:795
LocalException.h
visionx
ArmarX headers.
Definition: OpenPoseStressTest.h:38
visionx::AzureKinectPointCloudProvider::Framerate::Subordinate::update
void update(int higherFramerate)
Definition: AzureKinectPointCloudProvider.cpp:1515
AzureKinectPointCloudProvider.h
index
uint8_t index
Definition: EtherCATFrame.h:59
StopWatch.h
visionx::AzureKinectPointCloudProvider::doCapture
bool doCapture() override
Main capturing function.
Definition: AzureKinectPointCloudProvider.cpp:597
armarx::DebugObserverComponentPluginUser::setDebugObserverBatchModeEnabled
void setDebugObserverBatchModeEnabled(bool enable)
Definition: DebugObserverComponentPlugin.cpp:129
visionx::AzureKinectPointCloudProvider::getStereoCalibration
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1245
ARMARX_CHECK_NOT_NULL
#define ARMARX_CHECK_NOT_NULL(ptr)
This macro evaluates whether ptr is not null and if it turns out to be false it will throw an Express...
Definition: ExpressionException.h:206
visionx::CapturingPointCloudProvider::setPointCloudSyncMode
void setPointCloudSyncMode(ImageSyncMode pointCloudSyncMode)
Sets the point cloud synchronization mode.
Definition: CapturingPointCloudProvider.cpp:198
armarx::DebugObserverComponentPluginUser::setDebugObserverDatafield
void setDebugObserverDatafield(Ts &&... ts) const
Definition: DebugObserverComponentPlugin.h:86
visionx::AzureKinectPointCloudProvider::VersionToString
static std::string VersionToString(const k4a_version_t &version)
Creates a string from a k4a_version_t.
Definition: AzureKinectPointCloudProvider.h:249
armarx::ManagedIceObject::getArmarXManager
ArmarXManagerPtr getArmarXManager() const
Returns the ArmarX manager used to add and remove components.
Definition: ManagedIceObject.cpp:360
trace.h
armarx::armem::human::PoseKeypoint::label
std::string label
Definition: types.h:19
visionx::imrec::convert_rgb2cbi
void convert_rgb2cbi(const cv::Mat &in, CByteImage &out)
Converts an OpenCV RGB Mat to IVT's CByteImage.
Definition: helper.cpp:64
visionx::ImageProvider::onExitComponent
void onExitComponent() override
Definition: ImageProvider.cpp:161
c
constexpr T c
Definition: UnscentedKalmanFilterTest.cpp:46
armarx::ManagedIceObject::addPlugin
PluginT * addPlugin(const std::string prefix="", ParamsT &&... params)
Definition: ManagedIceObject.h:186
visionx::AzureKinectPointCloudProvider::initializeTimestampOffset
void initializeTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us)
Definition: AzureKinectPointCloudProvider.cpp:1494
visionx::AzureKinectPointCloudProvider::GetColorDimensions
static std::pair< int, int > GetColorDimensions(const k4a_color_resolution_t resolution)
Returns the dimension of the color images that will be produced for a certain resolution.
Definition: AzureKinectPointCloudProvider.h:195
visionx::ImageProvider::setImageFormat
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
Definition: ImageProvider.cpp:305
visionx::AzureKinectPointCloudProvider::getImageRecordingChannelPreferences
std::vector< imrec::ChannelPreferences > getImageRecordingChannelPreferences(const Ice::Current &) override
Definition: AzureKinectPointCloudProvider.cpp:1342
visionx::AzureKinectPointCloudProvider::updateTimestampOffset
void updateTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us, const std::chrono::nanoseconds &k4a_system_timestamp_ns)
Definition: AzureKinectPointCloudProvider.cpp:1412
visionx::AzureKinectPointCloudProvider::createSwCallback
std::function< void(armarx::Duration)> createSwCallback(const std::string &description)
Definition: AzureKinectPointCloudProvider.cpp:150
visionx::ImageProvider::getImageFormat
ImageFormatInfo getImageFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the entire image format info struct via Ice.
Definition: ImageProvider.cpp:76
armarx::RunningTask
Definition: ArmarXMultipleObjectsScheduler.h:36
armarx::core::time::Clock
Clock to get date/time from a specific clock type or wait for certain durations or until certain date...
Definition: Clock.h:25
k4a_bt_body_32.h
ARMARX_CHECK
#define ARMARX_CHECK(expression)
Shortcut for ARMARX_CHECK_EXPRESSION.
Definition: ExpressionException.h:82
visionx::tools::createByteImage
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.
visionx::PointCloudProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: PointCloudProvider.cpp:101
visionx::PointCloudProvider::onExitComponent
void onExitComponent() override
Definition: PointCloudProvider.cpp:112
armarx::TimeMode::SystemTime
@ SystemTime
visionx::ImageProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: ImageProvider.cpp:149
visionx::tools::convert
CByteImage::ImageType convert(const ImageType visionxImageType)
Converts a VisionX image type into an image type of IVT's ByteImage.
Definition: TypeMapping.cpp:97
visionx::AzureKinectPointCloudProvider::setWidthBodyTracking
void setWidthBodyTracking(int minXinPixel, int maxXinPixel, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1399
visionx::CapturingPointCloudProvider::capture
virtual void capture()
Definition: CapturingPointCloudProvider.cpp:154
visionx::AzureKinectPointCloudProvider::onStopCapture
void onStopCapture() override
This is called when the point cloud provider capturing has been stopped.
Definition: AzureKinectPointCloudProvider.cpp:588
armarx::armem::human::HumanPose::poseModelId
std::string poseModelId
Definition: types.h:32
armarx::GetHandledExceptionString
std::string GetHandledExceptionString()
Definition: Exception.cpp:165
ARMARX_TRACE
#define ARMARX_TRACE
Definition: trace.h:77
armarx::status
status
Definition: FiniteStateMachine.h:244
GfxTL::Identity
void Identity(MatrixXX< N, N, T > *a)
Definition: MatrixXX.h:570
visionx::CapturingPointCloudProvider::captureEnabled
std::atomic_bool captureEnabled
Indicates that capturing is enabled and running.
Definition: CapturingPointCloudProvider.h:230
armarx::ctrlutil::a
double a(double t, double a0, double j)
Definition: CtrlUtil.h:45
armarx::armem::human::PoseKeypoint
Definition: types.h:17
cxxopts::value
std::shared_ptr< Value > value()
Definition: cxxopts.hpp:855
visionx::imrec::convert
void convert(const CByteImage &in, cv::Mat &out)
Converts an IVT CByteImage to OpenCV's BGR Mat.
Definition: helper.cpp:40
types.h
armarx::abs
std::vector< T > abs(const std::vector< T > &v)
Definition: VectorHelpers.h:281
armarx::human::pose::model::k4a_bt_body_32::JointNames
const simox::meta::EnumNames< Joints > JointNames
Names of the joints as defined in the body model.
Definition: k4a_bt_body_32.h:77
visionx::AzureKinectPointCloudProvider::timestampToArmarX
armarx::DateTime timestampToArmarX(const std::chrono::microseconds &k4a_timestamp_us)
Definition: AzureKinectPointCloudProvider.cpp:1478
if
if(!yyvaluep)
Definition: Grammar.cpp:645
FramedPose.h
visionx::CapturingPointCloudProviderPropertyDefinitions
Definition: CapturingPointCloudProvider.h:39
visionx::PointCloudProvider::getPointCloudFormat
MetaPointCloudFormatPtr getPointCloudFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the point cloud format info struct via Ice.
Definition: PointCloudProvider.cpp:50
visionx::AzureKinectPointCloudProvider::runPointcloudPublishing
void runPointcloudPublishing()
Definition: AzureKinectPointCloudProvider.cpp:1088
ARMARX_DEBUG
#define ARMARX_DEBUG
Definition: Logging.h:184
visionx::tools::depthValueToRGB
void depthValueToRGB(unsigned int depthInMM, unsigned char &r, unsigned char &g, unsigned char &b, bool noiseResistant=false)
Definition: ImageUtil.h:183
visionx::AzureKinectPointCloudProvider::onExitCapturingPointCloudProvider
void onExitCapturingPointCloudProvider() override
This is called when the Component::onExitComponent() setup is called.
Definition: AzureKinectPointCloudProvider.cpp:419
armarx::aron::input
ReaderT::InputType & input
Definition: rw.h:12
visionx::AzureKinectPointCloudProviderPropertyDefinitions::AzureKinectPointCloudProviderPropertyDefinitions
AzureKinectPointCloudProviderPropertyDefinitions(std::string prefix)
Definition: AzureKinectPointCloudProvider.cpp:168
armarx::armem::human::HumanPose::cameraFrameName
std::string cameraFrameName
Definition: types.h:38
armarx::armem::server::ltm::mongodb::constantes::TIMESTAMP
const std::string TIMESTAMP
Definition: operations.h:16
visionx::AzureKinectPointCloudProvider::setMaxDepthBodyTracking
void setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1392
visionx::AzureKinectPointCloudProvider::getReferenceFrame
std::string getReferenceFrame(const Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1336
visionx::PointCloudProvider::onInitComponent
void onInitComponent() override
Definition: PointCloudProvider.cpp:61
visionx::AzureKinectPointCloudProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: AzureKinectPointCloudProvider.cpp:1229
timestamp
std::string timestamp()
Definition: CartographerAdapter.cpp:85
visionx::AzureKinectPointCloudProvider::AzureKinectPointCloudProvider
AzureKinectPointCloudProvider()
Definition: AzureKinectPointCloudProvider.cpp:1509
Metronome.h
max
T max(T t1, T t2)
Definition: gdiam.h:51
ARMARX_ERROR
#define ARMARX_ERROR
Definition: Logging.h:196
armarx::FramedOrientation
The FramedOrientation class.
Definition: FramedPose.h:215
visionx::AzureKinectPointCloudProvider::getImagesAreUndistorted
bool getImagesAreUndistorted(const ::Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1330
visionx::ImageProvider::provideImages
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
Definition: ImageProvider.cpp:351
armarx::armem::human::HumanPose
Definition: types.h:30
visionx::AzureKinectPointCloudProvider::onDisconnectImageProvider
void onDisconnectImageProvider() override
Definition: AzureKinectPointCloudProvider.cpp:385
armarx::armem::Time
armarx::core::time::DateTime Time
Definition: forward_declarations.h:13
armarx::to_string
const std::string & to_string(const std::string &s)
Definition: StringHelpers.h:41
forward_declarations.h
visionx::AzureKinectPointCloudProvider::onInitCapturingPointCloudProvider
void onInitCapturingPointCloudProvider() override
This is called when the Component::onInitComponent() is called.
Definition: AzureKinectPointCloudProvider.cpp:412
armarx::armem::human::HumanPose::humanTrackingId
std::optional< std::string > humanTrackingId
Definition: types.h:36
armarx::human::pose::model::k4a_bt_body_32::ModelId
const std::string ModelId
Definition: k4a_bt_body_32.h:33
visionx::AzureKinectPointCloudProvider::createPropertyDefinitions
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
Definition: AzureKinectPointCloudProvider.cpp:208
visionx::ImageProvider::setNumberImages
void setNumberImages(int numberImages)
Sets the number of images on each capture.
Definition: ImageProvider.cpp:344
armarx::TimeUtil::GetTime
static IceUtil::Time GetTime(TimeMode timeMode=TimeMode::VirtualTime)
Get the current time.
Definition: TimeUtil.cpp:42
armarx::core::time::DateTime::toMilliSecondsSinceEpoch
std::int64_t toMilliSecondsSinceEpoch() const
Definition: DateTime.cpp:93
armarx::core::time::DateTime
Represents a point in time.
Definition: DateTime.h:24
armarx::Component::getConfigIdentifier
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition: Component.cpp:79
visionx::AzureKinectPointCloudProvider::onInitComponent
void onInitComponent() override
Pure virtual hook for the subclass.
Definition: AzureKinectPointCloudProvider.cpp:1215
armarx::TimeUtil::GetTimeSince
static IceUtil::Time GetTimeSince(IceUtil::Time referenceTime, TimeMode timeMode=TimeMode::VirtualTime)
Get the difference between the current time and a reference time.
Definition: TimeUtil.cpp:68
visionx::AzureKinectPointCloudProvider::enableHumanPoseEstimation
void enableHumanPoseEstimation(const armarx::EnableHumanPoseEstimationInput &input, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1358
ARMARX_CHECK_EXPRESSION
#define ARMARX_CHECK_EXPRESSION(expression)
This macro evaluates the expression and if it turns out to be false it will throw an ExpressionExcept...
Definition: ExpressionException.h:73
visionx::PointCloudProvider::providePointCloud
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
Definition: PointCloudProvider.h:122
visionx::ImageProvider::onConnectComponent
void onConnectComponent() override
Definition: ImageProvider.cpp:107
TimeUtil.h
ARMARX_INFO
#define ARMARX_INFO
Definition: Logging.h:181
std
Definition: Application.h:66
armarx::core::time::ScopedStopWatch
Measures the time this stop watch was inside the current scope.
Definition: ScopedStopWatch.h:31
HumanPoseWriter.h
armarx::Quaternion< float, 0 >
visionx::AzureKinectPointCloudProvider::onConnectImageProvider
void onConnectImageProvider() override
This is called when the Component::onConnectComponent() setup is called.
Definition: AzureKinectPointCloudProvider.cpp:349
armarx::DebugObserverComponentPluginUser::sendDebugObserverBatch
void sendDebugObserverBatch()
Definition: DebugObserverComponentPlugin.cpp:135
IceUtil::Handle< class PropertyDefinitionContainer >
armarx::core::time::Metronome
Simple rate limiter for use in loops to maintain a certain frequency given a clock.
Definition: Metronome.h:34
visionx::AzureKinectPointCloudProvider::Framerate::Subordinate::skip
bool skip()
Definition: AzureKinectPointCloudProvider.cpp:1534
visionx::AzureKinectPointCloudProvider::getDefaultName
std::string getDefaultName() const override
Definition: AzureKinectPointCloudProvider.cpp:1209
visionx::ImageProvider::onInitComponent
void onInitComponent() override
Definition: ImageProvider.cpp:91
visionx::tools::convertEigenMatToVisionX
visionx::types::Mat convertEigenMatToVisionX(Eigen::MatrixXf m)
Definition: TypeMapping.cpp:686
armarx::ArmarXDataPath::ReplaceEnvVars
static bool ReplaceEnvVars(std::string &string)
ReplaceEnvVars replaces environment variables in a string with their values, if the env.
Definition: ArmarXDataPath.cpp:480
armarx::core::time::Duration
Represents a duration.
Definition: Duration.h:16
ImageUtil.h
visionx::AzureKinectPointCloudProvider::onExitComponent
void onExitComponent() override
Hook for subclass.
Definition: AzureKinectPointCloudProvider.cpp:1238
armarx::core::time::Clock::Now
static DateTime Now()
Current time on the virtual clock.
Definition: Clock.cpp:93
armarx::Logging::deactivateSpam
SpamFilterDataPtr deactivateSpam(float deactivationDurationSec=10.0f, const std::string &identifier="", bool deactivate=true) const
disables the logging for the current line for the given amount of seconds.
Definition: Logging.cpp:99
Eigen::Matrix
Definition: EigenForwardDeclarations.h:27
armarx::ManagedIceObject::getName
std::string getName() const
Retrieve name of object.
Definition: ManagedIceObject.cpp:108
armarx::RunningTask::pointer_type
IceUtil::Handle< RunningTask< T > > pointer_type
Shared pointer type for convenience.
Definition: RunningTask.h:174
visionx::AzureKinectPointCloudProvider::onStartCapture
void onStartCapture(float frames_per_second) override
This is called when the point cloud provider capturing has been started.
Definition: AzureKinectPointCloudProvider.cpp:427
armarx::core::time::StopWatch
Measures the passed time between the construction or calling reset() and stop().
Definition: StopWatch.h:41
TypeMapping.h
armarx::core::time::Frequency::Hertz
static Frequency Hertz(std::int64_t hertz)
Definition: Frequency.cpp:20
ARMARX_INFO_S
#define ARMARX_INFO_S
Definition: Logging.h:202
armarx::ArmarXDataPath::getAbsolutePath
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
Definition: ArmarXDataPath.cpp:109
armarx::armem::human::HumanPose::keypoints
KeyPointMap keypoints
Definition: types.h:35
Logging.h
armarx::VariantType::FramedPosition
const VariantTypeId FramedPosition
Definition: FramedPose.h:38
ARMARX_CHECK_EQUAL
#define ARMARX_CHECK_EQUAL(lhs, rhs)
This macro evaluates whether lhs is equal (==) rhs and if it turns out to be false it will throw an E...
Definition: ExpressionException.h:130
ARMARX_WARNING
#define ARMARX_WARNING
Definition: Logging.h:193
ScopedStopWatch.h
helper.h
armarx::core::time::Duration::MicroSeconds
static Duration MicroSeconds(std::int64_t microSeconds)
Constructs a duration in microseconds.
Definition: Duration.cpp:24
armarx::PropertyDefinitionBase::eModifiable
@ eModifiable
Definition: PropertyDefinitionInterface.h:57
Variant.h
stopwatch::Clock
std::chrono::system_clock Clock
Definition: Stopwatch.h:10
visionx::AzureKinectPointCloudProvider::onInitImageProvider
void onInitImageProvider() override
This is called when the Component::onInitComponent() is called.
Definition: AzureKinectPointCloudProvider.cpp:279
armarx
This file offers overloads of toIce() and fromIce() functions for STL container types.
Definition: ArmarXTimeserver.cpp:27
armarx::core::time::Clock::now
DateTime now() const
Current date/time of the clock.
Definition: Clock.cpp:22
armarx::core::time::Duration::MilliSeconds
static Duration MilliSeconds(std::int64_t milliSeconds)
Constructs a duration in milliseconds.
Definition: Duration.cpp:48
visionx::AzureKinectPointCloudProvider::onConnectComponent
void onConnectComponent() override
Pure virtual hook for the subclass.
Definition: AzureKinectPointCloudProvider.cpp:1222