AzureKinectPointCloudProvider.cpp
Go to the documentation of this file.
2 
3 #include <cstdio>
4 #include <filesystem>
5 #include <functional>
6 #include <mutex>
7 #include <utility>
8 
9 #include <opencv2/core/hal/interface.h>
10 #include <opencv2/imgproc/imgproc.hpp>
11 
12 #include <IceUtil/Time.h>
13 
23 
25 
31 
32 #include <Calibration/Calibration.h>
33 #include <Image/ImageProcessor.h>
34 
35 #ifdef INCLUDE_BODY_TRACKING
37 #endif
38 
39 
40 namespace
41 {
42  /**
43  * @brief Converts a k4a image to an IVT image.
44  * @param color_image Reference to a k4a image containing the color data that should be converted.
45  * @param result IVT image the converted image should be stored in. The size of this image and the color_image need to match.
46  */
47  [[maybe_unused]] void
48  k4aToIvtImage(const k4a::image& color_image, ::CByteImage& result)
49  {
50  auto cw = static_cast<unsigned int>(color_image.get_width_pixels());
51  auto ch = static_cast<unsigned int>(color_image.get_height_pixels());
52  ARMARX_CHECK_EQUAL(static_cast<unsigned int>(result.width), cw);
53  ARMARX_CHECK_EQUAL(static_cast<unsigned int>(result.height), ch);
54 
55 
56  auto color_buffer = color_image.get_buffer();
57  auto rgb_buffer_ivt = result.pixels;
58 
59  // Index in the IVT image. This value will be increased by 3 per pixel.
60  int index_ivt = 0;
61 
62  // Index in the k4a image. This value increases by 4 per pixel.
63  int index_k4a = 0;
64 
65  for (unsigned int y = 0; y < ch; ++y)
66  {
67  for (unsigned int x = 0; x < cw; ++x)
68  {
69  rgb_buffer_ivt[index_ivt] = color_buffer[index_k4a + 2];
70  rgb_buffer_ivt[index_ivt + 1] = color_buffer[index_k4a + 1];
71  rgb_buffer_ivt[index_ivt + 2] = color_buffer[index_k4a + 0];
72  index_ivt += 3;
73  index_k4a += 4;
74  }
75  }
76  }
77 
78 
79 #ifdef INCLUDE_BODY_TRACKING
80  void
81  printBodyInformation(k4abt_body_t body)
82  {
83  ARMARX_VERBOSE << "Body ID: " << body.id;
84  for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
85  {
86  const k4a_float3_t position = body.skeleton.joints[i].position;
87  const k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
88  const k4abt_joint_confidence_level_t confidence_level =
89  body.skeleton.joints[i].confidence_level;
90 
91  ARMARX_VERBOSE << "Joint" << i << ": "
92  << "Position[mm] (" << position.v[0] << "," << position.v[1] << ","
93  << position.v[2] << "); "
94  << "Orientation (" << orientation.v[0] << "," << orientation.v[1] << ","
95  << orientation.v[2] << "," << orientation.v[3] << "); "
96  << "Confidence Level " << confidence_level;
97  }
98  }
99 #endif
100 } // namespace
101 
102 namespace visionx
103 {
104 
105  std::function<void(armarx::Duration)>
106  AzureKinectPointCloudProvider::createSwCallback(const std::string& description)
107  {
108  return [=](armarx::Duration duration)
109  {
110  std::string name = "duration " + description + " in [ms]";
111  {
112  std::lock_guard g{metaInfoMtx};
113  setMetaInfo(name, new armarx::Variant{duration.toMilliSecondsDouble()});
114  }
115 
116  {
117  std::lock_guard g{debugObserverMtx};
118  setDebugObserverDatafield(name, duration.toMilliSecondsDouble());
119  }
120  };
121  }
122 
126  {
127  defineOptionalProperty<k4a_color_resolution_t>(
128  "ColorResolution", K4A_COLOR_RESOLUTION_720P, "Resolution of the RGB camera image.")
129  .map("0", K4A_COLOR_RESOLUTION_OFF) /** Color camera will be turned off */
130  .map("720", K4A_COLOR_RESOLUTION_720P) /** 1280x720 16:9 */
131  .map("1080", K4A_COLOR_RESOLUTION_1080P) /** 1920x1080 16:9 */
132  .map("1440", K4A_COLOR_RESOLUTION_1440P) /** 2560x1440 16:9 */
133  .map("1536", K4A_COLOR_RESOLUTION_1536P) /** 2048x1536 4:3 */
134  .map("2160", K4A_COLOR_RESOLUTION_2160P) /** 3840x2160 16:9 */
135  .map("3072", K4A_COLOR_RESOLUTION_3072P); /** 4096x3072 4:3 */
136  defineOptionalProperty<k4a_depth_mode_t>(
137  "DepthMode", K4A_DEPTH_MODE_NFOV_UNBINNED, "Resolution/mode of the depth camera image.")
138  .setCaseInsensitive(true)
139  .map("OFF", K4A_DEPTH_MODE_OFF)
140  .map("NFOV_2X2BINNED", K4A_DEPTH_MODE_NFOV_2X2BINNED)
141  .map("NFOV_UNBINNED", K4A_DEPTH_MODE_NFOV_UNBINNED)
142  .map("WFOV_2X2BINNED", K4A_DEPTH_MODE_WFOV_2X2BINNED)
143  .map("WFOV_UNBINNED", K4A_DEPTH_MODE_WFOV_UNBINNED)
144  .map("PASSIVE_IR", K4A_DEPTH_MODE_PASSIVE_IR);
145 
146  defineOptionalProperty<float>("MaxDepth",
147  6000.0,
148  "Max. allowed depth value in mm. Depth values above this "
149  "threshold will be set to nan.");
150 
151  defineOptionalProperty<float>("CaptureTimeOffset",
152  16.0f,
153  "In Milliseconds. Time offset between capturing the image on "
154  "the hardware and receiving the image in this process.",
156  }
157 
160  {
163 
164  defs->optional(
165  enableColorUndistortion,
166  "EnableColorUndistortion",
167  "Undistort the color images using the full 8 radial and tangential distortion "
168  "parameters provided by the Azure Kinect.\n"
169  "This can help for processing tasks which cannot handle radial parameters k3-k6.\n"
170  "Note that this drastically reduces the FPS (to something like 3).");
171  defs->optional(externalCalibrationFilePath,
172  "ExternalCalibrationFilePath",
173  "Path to an optional external"
174  " calibration file, which has a"
175  " camera matrix and distortion"
176  " parameters.");
177  defs->optional(mDeviceId, "device_id", "ID of the device.");
178 
179  defs->optional(robotName, "robotName");
180  defs->optional(bodyCameraFrameName, "bodyCameraFrameName");
181 
182 #ifdef INCLUDE_BODY_TRACKING
183  defs->optional(bodyTrackingEnabled,
184  "bodyTrackingEnabled",
185  "Whether the Azure Kinect Body Tracking SDK should be enabled or not.");
186  defs->optional(bodyTrackingRunAtStart,
187  "bodyTrackingRunAtStart",
188  "Whether the Azure Kinect Body Tracking SDK should directly run when the "
189  "component is startet."
190  "Otherwise it has to be activated by the ice interface.");
191  defs->optional(bodyTrackingModelFilename,
192  "bodyTrackingModelPath",
193  "Path where the .onnx DNN files can be found");
194  defs->optional(bodyTrackingGPUDeviceID, "bodyTrackingGPUDeviceID", "GPU Device ID.");
195  defs->optional(bodyTrackingTemporalSmoothingFactor,
196  "bodyTrackingTemporalSmoothingFactor",
197  "Temporal smoothing factor for Azure Kinect body tracking.");
198 
199  defs->optional(bodyTrackingDepthMaskMinX, "bodyTrackingDepthMaskMinX");
200  defs->optional(bodyTrackingDepthMaskMaxX, "bodyTrackingDepthMaskMaxX");
201  defs->optional(bodyTrackingDepthMaskMaxZ, "bodyTrackingDepthMaskMaxZ");
202 
203  humanPoseWriter.registerPropertyDefinitions(defs);
204 #endif
205 
206  defs->optional(enableHeartbeat, "enableHeartbeat");
207 
208  return defs;
209  }
210 
211  void
213  {
214  config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
215  auto fps = getProperty<float>("framerate").getValue();
216  if (fps == 5.0f)
217  {
218  config.camera_fps = K4A_FRAMES_PER_SECOND_5;
219  }
220  else if (fps == 15.0f)
221  {
222  config.camera_fps = K4A_FRAMES_PER_SECOND_15;
223  }
224  else if (fps == 30.0f)
225  {
226  config.camera_fps = K4A_FRAMES_PER_SECOND_30;
227  }
228  else
229  {
230  throw armarx::LocalException("Invalid framerate: ")
231  << fps << " - Only framerates 5, 15 and 30 are "
232  << "supported by Azure Kinect.";
233  }
234 
235  config.depth_mode = getProperty<k4a_depth_mode_t>("DepthMode");
236  config.color_format = K4A_IMAGE_FORMAT_COLOR_BGRA32;
237  config.color_resolution = getProperty<k4a_color_resolution_t>("ColorResolution");
238 
239  // This means that we'll only get captures that have both color and depth images, so we don't
240  // need to check if the capture contains a particular type of image.
241  config.synchronized_images_only = true;
242 
243  // Set number of images per frame.
244  setNumberImages(2);
245 
246  auto depth_dim = GetDepthDimensions(config.depth_mode);
247  auto color_dim = GetColorDimensions(config.color_resolution);
248 
249  ARMARX_INFO << "Depth image size: " << depth_dim.first << "x" << depth_dim.second;
250  ARMARX_INFO << "Color image size: " << color_dim.first << "x" << color_dim.second;
251 
252  alignedDepthImage =
253  k4a::image::create(K4A_IMAGE_FORMAT_DEPTH16,
254  color_dim.first,
255  color_dim.second,
256  color_dim.first * 2 * static_cast<int32_t>(sizeof(uint8_t)));
257 
258  setImageFormat(visionx::ImageDimension(color_dim.first, color_dim.second),
259  visionx::eRgb,
260  visionx::eBayerPatternGr);
261 
262  resultDepthImage.reset(visionx::tools::createByteImage(getImageFormat(), visionx::eRgb));
263  resultColorImage =
264  std::make_unique<CByteImage>(color_dim.first, color_dim.second, CByteImage::eRGB24);
265 
266  xyzImage = k4a::image::create(K4A_IMAGE_FORMAT_CUSTOM,
267  color_dim.first,
268  color_dim.second,
269  color_dim.first * 3 * static_cast<int32_t>(sizeof(int16_t)));
270 
271  pointcloud = std::make_unique<pcl::PointCloud<CloudPointType>>();
272  }
273 
274  void
276  {
277  ARMARX_DEBUG << "Connecting " << getName();
278 
280 
284  pointcloudTask->start();
285 
286 #ifdef INCLUDE_BODY_TRACKING
287  if (bodyTrackingEnabled)
288  {
289  humanPoseWriter.connect(memoryNameSystem());
290 
291  bodyTrackingPublishTask = new armarx::RunningTask<AzureKinectPointCloudProvider>(
292  this, &AzureKinectPointCloudProvider::runPublishBodyTrackingResults);
293  }
294 #endif
295 
296  if (enableHeartbeat)
297  {
298  ARMARX_CHECK_NOT_NULL(heartbeatPlugin);
299  heartbeatPlugin->signUp(armarx::Duration::MilliSeconds(500),
301  {"Vision", "Camera"},
302  "AzureKinectPointCloudProvider");
303  }
304 
305  ARMARX_VERBOSE << "Connected " << getName();
306  }
307 
308  void
310  {
311  ARMARX_DEBUG << "Disconnecting " << getName();
312 
313  // Stop task
314  {
315  std::lock_guard<std::mutex> lock(pointcloudProcMutex);
316  ARMARX_DEBUG << "Stopping pointcloud processing thread...";
317  const bool WAIT_FOR_JOIN = false;
318  pointcloudTask->stop(WAIT_FOR_JOIN);
319  pointcloudProcSignal.notify_all();
320  ARMARX_DEBUG << "Waiting for pointcloud processing thread to stop...";
321  pointcloudTask->waitForStop();
322  ARMARX_DEBUG << "Pointcloud processing thread stopped";
323  }
324 
325 #ifdef INCLUDE_BODY_TRACKING
326  if (bodyTrackingIsRunning)
327  {
328  bodyTrackingPublishTask->stop();
329  }
330 #endif
331 
332  ARMARX_DEBUG << "Disconnected " << getName();
333  }
334 
335  void
337  {
338  this->setPointCloudSyncMode(eCaptureSynchronization);
339  ARMARX_TRACE;
340  }
341 
342  void
344  {
345  ARMARX_INFO << "Closing Azure Kinect device";
346  device.close();
347  ARMARX_INFO << "Closed Azure Kinect device";
348  }
349 
350  void
352  {
353  ARMARX_TRACE;
354 
355  cloudFormat = getPointCloudFormat();
356 
357  // Check for devices.
358  const uint32_t DEVICE_COUNT = k4a::device::get_installed_count();
359  if (DEVICE_COUNT == 0)
360  {
361  getArmarXManager()->asyncShutdown();
362  throw armarx::LocalException("No Azure Kinect devices detected!");
363  }
364 
365  ARMARX_TRACE;
366 
367  device = k4a::device::open(static_cast<uint32_t>(mDeviceId));
368  ARMARX_DEBUG << "Opened device id #" << mDeviceId << " with serial number "
369  << device.get_serialnum() << ".";
370 
371  ARMARX_TRACE;
372 
373  auto depthDim = GetDepthDimensions(config.depth_mode);
374  auto colorDim = GetColorDimensions(config.color_resolution);
375  (void)depthDim;
376  (void)colorDim;
377 
378  k4aCalibration = device.get_calibration(config.depth_mode, config.color_resolution);
380  << "Color camera calibration:"
381  << "\n"
382  << "cx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cx
383  << "\n"
384  << "cy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cy
385  << "\n"
386  << "fx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fx
387  << "\n"
388  << "fy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fy
389  << "\n"
390  << "k1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k1
391  << "\n"
392  << "k2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k2
393  << "\n"
394  << "p1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p1
395  << "\n"
396  << "p2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p2
397  << "\n"
398  << "k3: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k3
399  << "\n"
400  << "k4: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k4
401  << "\n"
402  << "k5: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k5
403  << "\n"
404  << "k6: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k6;
405 
406 
407  auto c = getStereoCalibration(Ice::Current());
408  CCalibration* c_left = tools::convert(c.calibrationLeft);
409  c_left->PrintCameraParameters();
410  delete c_left;
411  CCalibration* c_right = tools::convert(c.calibrationRight);
412  c_right->PrintCameraParameters();
413  delete c_right;
414 
415  transformation = k4a::transformation(k4aCalibration);
416 
417 #ifdef INCLUDE_BODY_TRACKING
418  if (bodyTrackingEnabled)
419  {
420  // eventually, resolve environment variable
421  armarx::ArmarXDataPath::ReplaceEnvVars(bodyTrackingModelFilename);
422 
423  const bool found = armarx::ArmarXDataPath::getAbsolutePath(bodyTrackingModelFilename,
424  bodyTrackingModelFilename);
425  ARMARX_CHECK(found) << "Body tracking DNN model could not be found/resolved at `"
426  << bodyTrackingModelFilename << "`.";
427 
428  ARMARX_INFO << "Using body tracking DNN model from directory `"
429  << bodyTrackingModelFilename << "`.";
430 
431  ARMARX_CHECK(std::filesystem::exists(bodyTrackingModelFilename))
432  << "The path `" << bodyTrackingModelFilename << "` does not exist!";
433 
434  k4abt_tracker_configuration_t const bodyTrackingConfig{
435  .sensor_orientation = K4ABT_SENSOR_ORIENTATION_DEFAULT,
436  .processing_mode = K4ABT_TRACKER_PROCESSING_MODE_GPU_CUDA,
437  .gpu_device_id = bodyTrackingGPUDeviceID,
438  .model_path = bodyTrackingModelFilename.c_str()};
439 
440  bodyTracker = k4abt::tracker::create(k4aCalibration, bodyTrackingConfig);
441  bodyTracker.set_temporal_smoothing(bodyTrackingTemporalSmoothingFactor);
442 
443  if (bodyTrackingRunAtStart and not bodyTrackingIsRunning)
444  {
445  bodyTrackingIsRunning = true;
446  bodyTrackingPublishTask->start();
447  }
448  }
449 #endif
450 
451  ARMARX_TRACE;
452  device.set_color_control(K4A_COLOR_CONTROL_BRIGHTNESS, K4A_COLOR_CONTROL_MODE_MANUAL, 128);
453 
454  device.start_cameras(&config);
455 
456  ARMARX_TRACE;
457 
458  setMetaInfo("serialNumber", new armarx::Variant(device.get_serialnum()));
459  setMetaInfo("rgbVersion", new armarx::Variant(VersionToString(device.get_version().rgb)));
460  setMetaInfo("depthVersion",
461  new armarx::Variant(VersionToString(device.get_version().depth)));
462  setMetaInfo("depthSensorVersion",
463  new armarx::Variant(VersionToString(device.get_version().depth_sensor)));
464  setMetaInfo("audioVersion",
465  new armarx::Variant(VersionToString(device.get_version().audio)));
466 
467  ARMARX_TRACE;
468 
469 
470  // Color image calibration
471  {
472  // Load intrinsics from camera
473  const k4a_calibration_camera_t calibration{k4aCalibration.color_camera_calibration};
474  const k4a_calibration_intrinsic_parameters_t::_param param =
475  calibration.intrinsics.parameters.param;
476 
477  // // Scale intrinsics according to image scale
478  // param.fx *= image_scale_;
479  // param.fy *= image_scale_;
480 
481  // Calculate distortion map
482  cv::Mat1f camera_matrix(3, 3);
483  camera_matrix << param.fx, 0, param.cx, 0, param.fy, param.cy, 0, 0, 1;
484  cv::Mat1f new_camera_matrix(3, 3);
485  new_camera_matrix << param.fx, 0, param.cx, 0, param.fx, param.cy, 0, 0, 1;
486  cv::Mat1f distortion_coeff(1, 8);
487  distortion_coeff << param.k1, param.k2, param.p1, param.p2, param.k3, param.k4,
488  param.k5, param.k6;
489  cv::Mat map1, map2, map3;
490  cv::initUndistortRectifyMap(
491  camera_matrix,
492  distortion_coeff,
493  cv::Mat{},
494  new_camera_matrix,
495  cv::Size{calibration.resolution_width, calibration.resolution_height},
496  CV_32FC1,
497  map1,
498  map2);
499  cv::convertMaps(map1, map2, colorDistortionMap, map3, CV_16SC2, true);
500  }
501  }
502 
503  void
505  {
506  // pass
507  }
508 
509  bool
511  {
514 
515  ScopedStopWatch sw_total{createSwCallback("doCapture")};
516 
517  k4a::capture capture;
518  const std::chrono::milliseconds TIMEOUT{1000};
519 
520  StopWatch sw_get_capture;
521  bool status = false;
522  try
523  {
524  status = device.get_capture(&capture, TIMEOUT);
525  }
526  catch (const std::exception&)
527  {
528  ARMARX_WARNING << "Failed to get capture from device (#" << ++mDiagnostics.num_crashes
529  << "). Restarting camera.";
530  StopWatch sw;
531  device.stop_cameras();
532  device.start_cameras(&config);
533  ARMARX_INFO << "Restarting took " << sw.stop() << ".";
534  return false;
535  }
536 
537  if (status)
538  {
539  createSwCallback("waiting for get_capture")(sw_get_capture.stop());
540 
541  {
542  std::lock_guard g{metaInfoMtx};
543  setMetaInfo("temperature", new armarx::Variant(capture.get_temperature_c()));
544  }
545 
546  // see ROS: This will give INCORRECT timestamps until the first image.
547  const bool mustInitializeTimestampOffset = [&]()
548  {
549  std::lock_guard g{deviceToRealtimeOffsetMtx};
550  return device_to_realtime_offset_.count() == 0;
551  }();
552  if (mustInitializeTimestampOffset)
553  {
554  initializeTimestampOffset(capture.get_depth_image().get_device_timestamp());
555  }
556 
557  // next, we update the timestamp offset continuously
558  updateTimestampOffset(capture.get_ir_image().get_device_timestamp(),
559  capture.get_ir_image().get_system_timestamp());
560 
561  const k4a::image DEPTH_IMAGE = capture.get_depth_image();
562 
563  // This function assumes that the image is made of depth pixels (i.e. uint16_t's),
564  // which is only true for IR/depth images.
565  const k4a_image_format_t IMAGE_FORMAT = DEPTH_IMAGE.get_format();
566  if (IMAGE_FORMAT != K4A_IMAGE_FORMAT_DEPTH16 && IMAGE_FORMAT != K4A_IMAGE_FORMAT_IR16)
567  {
568  throw std::logic_error("Attempted to colorize a non-depth image!");
569  }
570 
571  // Provide data for pointcloud processing thread and signal to start processing.
572  {
573  ScopedStopWatch sw{createSwCallback("transform depth image to camera")};
574  // Acquire lock and write data needed by pointcloud thread (i.e.,
575  // alignedDepthImageScaled and depthImageReady).
576  {
577  std::lock_guard lock{pointcloudProcMutex};
578  transformation.depth_image_to_color_camera(DEPTH_IMAGE, &alignedDepthImage);
579 
580  // Signal that point cloud processing may proceed and reset processed flag.
581  depthImageReady = true;
582  }
583 
584  ARMARX_DEBUG << "Notifying pointcloud thread.";
585  pointcloudProcSignal.notify_one();
586  }
587 
588 #ifdef INCLUDE_BODY_TRACKING
589  if (bodyTrackingIsRunning)
590  {
591  std::scoped_lock lock(bodyTrackingParameterMutex);
592 
593  k4a::image ir_image = capture.get_ir_image();
594  std::uint8_t* ir_image_buffer = ir_image.get_buffer();
595 
596  k4a::image depth_image = capture.get_depth_image();
597  std::uint8_t* depth_image_buffer = depth_image.get_buffer();
598 
599  ARMARX_CHECK_EQUAL(ir_image.get_width_pixels(), depth_image.get_width_pixels());
600  ARMARX_CHECK_EQUAL(ir_image.get_height_pixels(), depth_image.get_height_pixels());
601 
602  const int stride = ir_image.get_stride_bytes() / ir_image.get_width_pixels();
603 
604  for (int x = 0; x < ir_image.get_width_pixels(); x++)
605  {
606  for (int y = 0; y < ir_image.get_height_pixels(); y++)
607  {
608  const int i = (y * ir_image.get_width_pixels() * stride) + (x * stride);
609  const int z = (static_cast<int>(depth_image_buffer[i])) +
610  (static_cast<int>(depth_image_buffer[i + 1]) << 8);
611 
612  if ((bodyTrackingDepthMaskMinX > 0 and x < bodyTrackingDepthMaskMinX) or
613  (bodyTrackingDepthMaskMaxX > 0 and x > bodyTrackingDepthMaskMaxX) or
614  (bodyTrackingDepthMaskMaxZ > 0 and z > bodyTrackingDepthMaskMaxZ))
615  {
616  ir_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
617  ir_image_buffer[i + 1] = std::numeric_limits<std::uint8_t>::max();
618  depth_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
619  depth_image_buffer[i + 1] = std::numeric_limits<std::uint8_t>::max();
620  }
621  }
622  }
623 
624  if (not bodyTracker.enqueue_capture(capture))
625  {
626  ARMARX_WARNING << "Add capture to tracker process queue timeout";
627  }
628  }
629 #endif
630 
631  const k4a::image COLOR_IMAGE = capture.get_color_image();
632 
633  auto real_time = IceUtil::Time::now();
634  auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
635  auto clock_diff = real_time - monotonic_time;
636 
637  auto image_monotonic_time =
638  IceUtil::Time::microSeconds(std::chrono::duration_cast<std::chrono::microseconds>(
639  DEPTH_IMAGE.get_system_timestamp())
640  .count());
641  long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
642 
643  imagesTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
644 
645  {
646  std::lock_guard g{metaInfoMtx};
647  setMetaInfo("image age in [ms]",
648  new armarx::Variant{(real_time - imagesTime).toMilliSecondsDouble()});
649  }
650 
651  {
652  std::lock_guard g{debugObserverMtx};
653  setDebugObserverDatafield("image age in [ms]",
654  (real_time - imagesTime).toMilliSecondsDouble());
655  }
656 
657  if (enableColorUndistortion)
658  {
659  // only the color image needs to be rectified
660  cv::Mat temp_image;
661 
662  cv::cvtColor(cv::Mat{cv::Size{COLOR_IMAGE.get_width_pixels(),
663  COLOR_IMAGE.get_height_pixels()},
664  CV_8UC4,
665  (void*)COLOR_IMAGE.get_buffer(),
666  cv::Mat::AUTO_STEP},
667  temp_image,
668  cv::COLOR_RGBA2RGB);
669 
670  cv::Mat cv_color_image_undistorted(
671  COLOR_IMAGE.get_width_pixels(), COLOR_IMAGE.get_height_pixels(), CV_8UC3);
672 
673  cv::remap(temp_image,
674  cv_color_image_undistorted,
675  colorDistortionMap,
676  cv::Mat{},
677  cv::INTER_NEAREST,
678  cv::BORDER_CONSTANT);
679 
680  visionx::imrec::convert(cv_color_image_undistorted, *resultColorImage);
681  }
682  else
683  {
684  // Convert the k4a image to an IVT image.
685  ScopedStopWatch sw{createSwCallback("convert k4a image to IVT")};
686  ::k4aToIvtImage(COLOR_IMAGE, *resultColorImage);
687  }
688 
689 
690  // Prepare result depth image.
691  {
692  ScopedStopWatch sw{createSwCallback("prepare result depth image")};
693 
694  const int DW = alignedDepthImage.get_width_pixels();
695  const int DH = alignedDepthImage.get_height_pixels();
696 
697  ARMARX_CHECK_EXPRESSION(resultDepthImage);
698  ARMARX_CHECK_EQUAL(resultDepthImage->width, DW);
699  ARMARX_CHECK_EQUAL(resultDepthImage->height, DH);
700 
701  auto result_depth_buffer = resultDepthImage->pixels;
702  const auto* depth_buffer =
703  reinterpret_cast<const uint16_t*>(alignedDepthImage.get_buffer());
704 
705  int index = 0;
706  int index_2 = 0;
707  for (int y = 0; y < DH; ++y)
708  {
709  for (int x = 0; x < DW; ++x)
710  {
711  uint16_t depth_value = depth_buffer[index_2];
713  result_depth_buffer[index],
714  result_depth_buffer[index + 1],
715  result_depth_buffer[index + 2]);
716  index += 3;
717  index_2 += 1;
718  }
719  }
720  }
721 
722  // Broadcast RGB-D image.
723  {
724  ScopedStopWatch sw{createSwCallback("broadcast RGB-D image")};
725  CByteImage* images[2] = {resultColorImage.get(), resultDepthImage.get()};
726  provideImages(images, imagesTime);
727  }
728 
729  // Wait until `alignedDepthImage` was processed and can be overridden again.
730  {
731  std::unique_lock signal_lock{pointcloudProcMutex};
732  ARMARX_DEBUG << "Capturing thread waiting for signal...";
733  pointcloudProcSignal.wait(signal_lock, [&] { return depthImageProcessed; });
734  ARMARX_DEBUG << "Capturing thread received signal.";
735  }
736 
737  {
738  std::lock_guard g{debugObserverMtx};
740  }
741 
742  if (enableHeartbeat)
743  {
744  heartbeatPlugin->heartbeat();
745  }
746 
747  return true;
748  }
749  else
750  {
751  ARMARX_INFO << deactivateSpam(30) << "Did not get frame until timeout of " << TIMEOUT;
752 
753  {
754  std::lock_guard g{debugObserverMtx};
756  }
757 
758  return false;
759  }
760  }
761 
762 #ifdef INCLUDE_BODY_TRACKING
763  void
764  AzureKinectPointCloudProvider::runPublishBodyTrackingResults()
765  {
767  while (bodyTrackingIsRunning)
768  {
769 
770  // body frames might just not be available.
771  const k4abt::frame body_frame = [&]() -> k4abt::frame
772  {
773  try
774  {
775  auto result = bodyTracker.pop_result();
776  return result;
777  }
778  catch (...)
779  {
780  ARMARX_VERBOSE << deactivateSpam(1) << "Exception in body tracking publishing: "
782  return {};
783  }
784  }();
785 
786  if (body_frame != nullptr)
787  {
789  createSwCallback("publish body tracking result")};
790  // see https://github.com/microsoft/Azure_Kinect_ROS_Driver/blob/melodic/src/k4a_ros_device.cpp
791  const armarx::DateTime timestamp =
792  timestampToArmarX(body_frame.get_device_timestamp());
793 
794  {
795  auto real_time = IceUtil::Time::now();
796  auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
797  auto clock_diff = real_time - monotonic_time;
798 
799  auto image_monotonic_time = IceUtil::Time::microSeconds(
800  std::chrono::duration_cast<std::chrono::microseconds>(
801  body_frame.get_system_timestamp())
802  .count());
803  // long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
804 
805  // IceUtil::Time imageTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
806  IceUtil::Time imageTime = image_monotonic_time + clock_diff;
807 
808  {
809  std::lock_guard g{debugObserverMtx};
810  setDebugObserverDatafield("ros_vs_ice_timestamp [µs]",
811  imageTime.toMicroSeconds());
812  }
813  }
814 
815  {
816  const armarx::Clock realtimeClock = armarx::Clock(armarx::ClockType::Realtime);
817  const armarx::Clock monotonicClock =
818  armarx::Clock(armarx::ClockType::Monotonic);
819 
820  auto real_time = realtimeClock.now();
821  auto monotonic_time = monotonicClock.now();
822  auto clock_diff = real_time - monotonic_time;
823 
824  auto image_monotonic_time = armarx::Duration::MicroSeconds(
825  std::chrono::duration_cast<std::chrono::microseconds>(
826  body_frame.get_system_timestamp())
827  .count());
828  // long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
829 
830  // IceUtil::Time imageTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
831  auto imageTime = image_monotonic_time + clock_diff;
832 
833  armarx::DateTime imageTimestamp = armarx::DateTime(imageTime);
834 
835  {
836  std::lock_guard g{debugObserverMtx};
837  setDebugObserverDatafield("ros_vs_armarx_timestamp [µs]",
838  imageTime.toMicroSeconds());
839  }
840  }
841 
842 
843  std::uint32_t num_bodies = body_frame.get_num_bodies();
844  {
845  std::lock_guard g{debugObserverMtx};
846  setDebugObserverDatafield("n_bodies_detected", num_bodies);
847  }
848 
849  std::vector<armarx::armem::human::HumanPose> humanPoses;
850  humanPoses.reserve(num_bodies);
851 
852  for (std::uint32_t i = 0; i < num_bodies; i++)
853  {
854  k4abt_body_t body = body_frame.get_body(i);
855  printBodyInformation(body);
856 
858  humanPose.humanTrackingId = std::to_string(body.id);
859  humanPose.cameraFrameName = bodyCameraFrameName;
861 
862  for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
863  {
864  const auto joint =
866  const auto name =
868 
869  k4a_float3_t position = body.skeleton.joints[i].position;
870  k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
871  k4abt_joint_confidence_level_t confidence_level =
872  body.skeleton.joints[i].confidence_level;
873 
875  .label = name,
876  .confidence = static_cast<float>(static_cast<int>(confidence_level)),
877  .positionCamera = armarx::FramedPosition(
878  Eigen::Vector3f{position.v[0], position.v[1], position.v[2]},
879  bodyCameraFrameName,
880  robotName),
881  .orientationCamera =
883  orientation.wxyz.x,
884  orientation.wxyz.y,
885  orientation.wxyz.z)
886  .toRotationMatrix(),
887  bodyCameraFrameName,
888  robotName)};
889  }
890 
891  humanPoses.push_back(humanPose);
892  }
893 
894  {
895  std::lock_guard g{debugObserverMtx};
896  setDebugObserverDatafield("bodyTrackingCaptureUntilPublish [ms]",
897  (armarx::Clock::Now() - timestamp).toMilliSeconds());
898  }
899 
900  humanPoseWriter.commitHumanPosesInCameraFrame(humanPoses, getName(), timestamp);
901 
902 
903  // k4a::image body_index_map = body_frame.get_body_index_map();
904  // if (body_index_map != nullptr)
905  // {
906  // //print_body_index_map_middle_line(body_index_map);
907  // }
908  // else
909  // {
910  // ARMARX_WARNING << "Error: Failed to generate bodyindex map!";
911  // }
912  }
913  else
914  {
915  // It should never hit timeout when K4A_WAIT_INFINITE is set.
916  ARMARX_VERBOSE << "Error! Pop body frame result time out!";
917  }
918 
919  metronome.waitForNextTick();
920  }
921  }
922 #endif
923 
924  void
926  {
927  ARMARX_DEBUG << "Started pointcloud processing task.";
928 
929  IceUtil::Time time;
930 
931  // Main pointcloud processing loop.
932  while (not pointcloudTask->isStopped())
933  {
934  // Wait for data.
935  std::unique_lock signal_lock{pointcloudProcMutex};
936  ARMARX_DEBUG << "Pointcloud thread waiting for signal...";
937  pointcloudProcSignal.wait(
938  signal_lock, [&] { return pointcloudTask->isStopped() or depthImageReady; });
939  ARMARX_DEBUG << "Pointcloud thread received signal.";
940 
941  // Assess timings, reset flags.
942  const IceUtil::Time TIMESTAMP = imagesTime;
943  depthImageReady = false;
944  depthImageProcessed = false;
945 
946  // Transform depth image to pointcloud.
947  {
949  transformation.depth_image_to_point_cloud(
950  alignedDepthImage, K4A_CALIBRATION_TYPE_COLOR, &xyzImage);
951  ARMARX_DEBUG << "Transforming depth image to point cloud took "
953  }
954 
955  // Construct PCL pointcloud.
956  {
958 
959  ARMARX_CHECK_EXPRESSION(pointcloud);
960 
961  pointcloud->width = static_cast<uint32_t>(xyzImage.get_width_pixels());
962  pointcloud->height = static_cast<uint32_t>(xyzImage.get_height_pixels());
963 
964  ARMARX_TRACE;
965 
966  pointcloud->is_dense = false;
967  pointcloud->points.resize(pointcloud->width * pointcloud->height);
968 
969  auto k4a_cloud_buffer = reinterpret_cast<const int16_t*>(xyzImage.get_buffer());
970 
971  ARMARX_CHECK_EXPRESSION(k4a_cloud_buffer);
972 
973  unsigned char* color_buffer = resultColorImage->pixels;
974  while (color_buffer == nullptr)
975  {
976  ARMARX_WARNING << "color_buffer is null. This should never happen. There is "
977  "probably a race condition somewhere that needs to be fixed. "
978  "Temporarily we ignore this and continue.\n Timestamp: "
980  color_buffer = resultColorImage->pixels;
981  }
982  ARMARX_CHECK_NOT_NULL(color_buffer);
983 
984  ARMARX_CHECK_EQUAL(xyzImage.get_width_pixels(), resultColorImage->width);
985  ARMARX_CHECK_EQUAL(xyzImage.get_height_pixels(), resultColorImage->height);
986  ARMARX_CHECK_EQUAL(pointcloud->points.size(),
987  pointcloud->width * pointcloud->height);
988 
989  ARMARX_TRACE;
990 
991  size_t index = 0;
992  float max_depth = getProperty<float>("MaxDepth").getValue();
993  for (auto& p : pointcloud->points)
994  {
995  p.r = color_buffer[index];
996  p.x = k4a_cloud_buffer[index];
997 
998  index++;
999 
1000  p.g = color_buffer[index];
1001  p.y = k4a_cloud_buffer[index];
1002 
1003  index++;
1004 
1005  p.b = color_buffer[index];
1006  auto z = k4a_cloud_buffer[index];
1007 
1008  index++;
1009 
1010  if (z <= max_depth and z != 0)
1011  {
1012  p.z = z;
1013  }
1014  else
1015  {
1016  p.z = std::numeric_limits<float>::quiet_NaN();
1017  }
1018  }
1019 
1020  ARMARX_DEBUG << "Constructing point cloud took "
1022  }
1023 
1024  // Notify capturing thread that data was processed and may be overridden.
1025  depthImageProcessed = true;
1026  signal_lock.unlock();
1027  ARMARX_DEBUG << "Notifying capturing thread...";
1028  pointcloudProcSignal.notify_all();
1029 
1030  ARMARX_TRACE;
1031 
1032  // Broadcast PCL pointcloud.
1033  {
1035  pointcloud->header.stamp = static_cast<unsigned long>(TIMESTAMP.toMicroSeconds());
1036  providePointCloud(pointcloud);
1037  ARMARX_DEBUG << "Broadcasting pointcloud took "
1039  }
1040  }
1041 
1042  ARMARX_DEBUG << "Stopped pointcloud processing task.";
1043  }
1044 
1045  std::string
1047  {
1048  return "AzureKinectPointCloudProvider";
1049  }
1050 
1051  void
1053  {
1056  }
1057 
1058  void
1060  {
1063  }
1064 
1065  void
1067  {
1068  captureEnabled = false;
1069 
1072  }
1073 
1074  void
1076  {
1079  }
1080 
1081  StereoCalibration
1083  {
1084  using namespace Eigen;
1085  using Matrix3FRowMajor = Matrix<float, 3, 3, StorageOptions::RowMajor>;
1087 
1088  // TODO: use the externally used cameraMatrix and distCoeffs
1089  const auto convert_calibration =
1090  [](const k4a_calibration_camera_t& k4a_calib, float scale = 1.f)
1091  {
1092  MonocularCalibration monocular_calibration;
1093 
1094  const k4a_calibration_intrinsic_parameters_t& params = k4a_calib.intrinsics.parameters;
1095  monocular_calibration.cameraParam.principalPoint = {params.param.cx * scale,
1096  params.param.cy * scale};
1097  monocular_calibration.cameraParam.focalLength = {params.param.fx * scale,
1098  params.param.fy * scale};
1099  // TODO: Figure out convertions. IVT (Calibration.h) expects 4 parameters:
1100  // - The first radial lens distortion parameter.
1101  // - The second radial lens distortion parameter.
1102  // - The first tangential lens distortion parameter.
1103  // - The second tangential lens distortion parameter.
1104  // However, the Kinect offers k1-k6 radial distortion coefficients and 2 p1-p2
1105  // tangential distortion parameters, which means that k3-k6 are unused.
1106  // It is even unclear whether this is correct now, as previously it was a vector of all
1107  // 6 k-params, which resulted in failed assertions in TypeMapping.cpp in the function
1108  // CStereoCalibration* visionx::tools::convert(const visionx::StereoCalibration& stereoCalibration)
1109  // lin 314 at time of this commit.
1110  // See: https://microsoft.github.io/Azure-Kinect-Sensor-SDK/master/structk4a__calibration__intrinsic__parameters__t_1_1__param.html
1111  monocular_calibration.cameraParam.distortion = {
1112  params.param.k1,
1113  params.param.k2,
1114  params.param.p1,
1115  params.param.p2,
1116  };
1117 
1118  // TODO this needs to be scaled! Or why shouldn't it ?
1119  monocular_calibration.cameraParam.width =
1120  std::floor(float(k4a_calib.resolution_width) * scale);
1121  monocular_calibration.cameraParam.height =
1122  std::floor(float(k4a_calib.resolution_height) * scale);
1123 
1124  const Matrix3FRowMajor rotation =
1125  Map<const Matrix3FRowMajor>{k4a_calib.extrinsics.rotation};
1126 
1127  monocular_calibration.cameraParam.rotation = convertEigenMatToVisionX(rotation);
1128  monocular_calibration.cameraParam.translation = {k4a_calib.extrinsics.translation,
1129  k4a_calib.extrinsics.translation + 3};
1130 
1131  return monocular_calibration;
1132  };
1133 
1134  StereoCalibration stereo_calibration;
1135 
1136  stereo_calibration.calibrationLeft =
1137  convert_calibration(k4aCalibration.color_camera_calibration);
1138 
1139  // if the camera images are rectified, the distortion params are 0
1140  if (enableColorUndistortion)
1141  {
1142  auto& colorDistortionParams = stereo_calibration.calibrationLeft.cameraParam.distortion;
1143  std::fill(colorDistortionParams.begin(), colorDistortionParams.end(), 0);
1144  }
1145 
1146  // the depth image is rectified by default
1147  {
1148  // The depth image has been warped to to color camera frame. See depth_image_to_color_camera() above.
1149  // Therefore we use the calibration of the color camera (focal length etc) and set the distortion parameters to 0.
1150  stereo_calibration.calibrationRight =
1151  convert_calibration(k4aCalibration.color_camera_calibration);
1152 
1153  auto& depthDistortionParams =
1154  stereo_calibration.calibrationRight.cameraParam.distortion;
1155  std::fill(depthDistortionParams.begin(), depthDistortionParams.end(), 0);
1156  }
1157 
1158  stereo_calibration.rectificationHomographyLeft =
1160  stereo_calibration.rectificationHomographyRight =
1162 
1163  return stereo_calibration;
1164  }
1165 
1166  bool
1168  {
1169  return enableColorUndistortion;
1170  }
1171 
1172  std::string
1174  {
1175  return getProperty<std::string>("frameName");
1176  }
1177 
1178  std::vector<imrec::ChannelPreferences>
1180  {
1181  ARMARX_TRACE;
1182 
1183  imrec::ChannelPreferences rgb;
1184  rgb.requiresLossless = false;
1185  rgb.name = "rgb";
1186 
1187  imrec::ChannelPreferences depth;
1188  depth.requiresLossless = true;
1189  depth.name = "depth";
1190 
1191  return {rgb, depth};
1192  }
1193 
1194  void
1196  const armarx::EnableHumanPoseEstimationInput& input,
1197  const Ice::Current&)
1198  {
1199 #ifndef INCLUDE_BODY_TRACKING
1200  {
1201  ARMARX_ERROR << "INCLUDE_BODY_TRACKING is not defined.";
1202  return;
1203  }
1204 #endif
1205 
1206 #ifdef INCLUDE_BODY_TRACKING
1207  if (bodyTrackingEnabled)
1208  {
1209  // should not require a mutex
1210  if (not bodyTrackingIsRunning and input.enable3d)
1211  {
1212  bodyTrackingIsRunning = true;
1213  bodyTrackingPublishTask->start();
1214  }
1215  else if (bodyTrackingIsRunning and not input.enable3d)
1216  {
1217  bodyTrackingIsRunning = false;
1218  bodyTrackingPublishTask->stop();
1219  }
1220  }
1221  else
1222  {
1223  ARMARX_ERROR << "Azure Kinect Body Tracking is not enabled";
1224  }
1225 #endif
1226  }
1227 
1228  void
1229  AzureKinectPointCloudProvider::setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current&)
1230  {
1231  std::scoped_lock lock(bodyTrackingParameterMutex);
1232  bodyTrackingDepthMaskMaxZ = maxDepthInMM;
1233  }
1234 
1235  void
1237  int maxXinPixel,
1238  const Ice::Current&)
1239  {
1240  std::scoped_lock lock(bodyTrackingParameterMutex);
1241  bodyTrackingDepthMaskMinX = minXinPixel;
1242  bodyTrackingDepthMaskMaxX = maxXinPixel;
1243  }
1244 
1245  // the code below is taken from https://github.com/microsoft/Azure_Kinect_ROS_Driver
1246  // -> MIT license
1247 
1248  void
1250  const std::chrono::microseconds& k4a_device_timestamp_us,
1251  const std::chrono::nanoseconds& k4a_system_timestamp_ns)
1252  {
1253  // System timestamp is on monotonic system clock.
1254  // Device time is on AKDK hardware clock.
1255  // We want to continuously estimate diff between realtime and AKDK hardware clock as low-pass offset.
1256  // This consists of two parts: device to monotonic, and monotonic to realtime.
1257 
1258  // First figure out realtime to monotonic offset. This will change to keep updating it.
1259  std::chrono::nanoseconds realtime_clock =
1260  std::chrono::system_clock::now().time_since_epoch();
1261  std::chrono::nanoseconds monotonic_clock =
1262  std::chrono::steady_clock::now().time_since_epoch();
1263 
1264  std::chrono::nanoseconds monotonic_to_realtime = realtime_clock - monotonic_clock;
1265 
1266  // Next figure out the other part (combined).
1267  std::chrono::nanoseconds device_to_realtime =
1268  k4a_system_timestamp_ns - k4a_device_timestamp_us + monotonic_to_realtime;
1269 
1270  {
1271  std::lock_guard g{deviceToRealtimeOffsetMtx};
1272 
1273  {
1274  std::lock_guard g{debugObserverMtx};
1275  setDebugObserverDatafield("device_to_realtime_offset [ms]",
1276  device_to_realtime_offset_.count() /
1277  1'000'000.f); // [ns] -> [ms]
1279  "clock_error",
1280  std::abs<float>((device_to_realtime_offset_ - device_to_realtime).count()) /
1281  1'000.f); // [ns] -> [µs]
1282  }
1283 
1284  const std::int64_t timeOffsetThreshold = 1e7; // 10 ms
1285 
1286  // If we're over a certain time off, just snap into place.
1287  if (device_to_realtime_offset_.count() == 0 ||
1288  std::abs((device_to_realtime_offset_ - device_to_realtime).count()) >
1289  timeOffsetThreshold)
1290  {
1291  ARMARX_VERBOSE << deactivateSpam()
1292  << "Initializing or re-initializing the device to realtime offset: "
1293  << device_to_realtime.count() << " ns";
1294  device_to_realtime_offset_ = device_to_realtime;
1295  }
1296  else
1297  {
1298  // Low-pass filter!
1299  constexpr double alpha = 0.10;
1300 
1301  const std::chrono::nanoseconds timeCorrection(static_cast<int64_t>(
1302  std::floor(alpha * (device_to_realtime - device_to_realtime_offset_).count())));
1303  device_to_realtime_offset_ = device_to_realtime_offset_ + timeCorrection;
1304 
1305  {
1306  std::lock_guard g{debugObserverMtx};
1307  setDebugObserverDatafield("timeCorrection [µs]",
1308  timeCorrection.count() / 1'000); // [ns] -> [µs]
1309  }
1310  }
1311  }
1312  }
1313 
1314  armarx::DateTime
1315  AzureKinectPointCloudProvider::timestampToArmarX(
1316  const std::chrono::microseconds& k4a_timestamp_us)
1317  {
1318  std::lock_guard g{deviceToRealtimeOffsetMtx};
1319 
1320  // must be initialized beforehand
1321  ARMARX_CHECK(device_to_realtime_offset_.count() != 0);
1322 
1323  std::chrono::nanoseconds timestamp_in_realtime =
1324  k4a_timestamp_us + device_to_realtime_offset_;
1325 
1326  return armarx::Duration::MicroSeconds(timestamp_in_realtime.count() /
1327  1'000); // [ns] -> [µs]
1328  }
1329 
1330  void
1332  const std::chrono::microseconds& k4a_device_timestamp_us)
1333  {
1334  std::lock_guard g{deviceToRealtimeOffsetMtx};
1335 
1336  // We have no better guess than "now".
1337  std::chrono::nanoseconds realtime_clock =
1338  std::chrono::system_clock::now().time_since_epoch();
1339 
1340  device_to_realtime_offset_ = realtime_clock - k4a_device_timestamp_us;
1341 
1342  ARMARX_WARNING << "Initializing the device to realtime offset based on wall clock: "
1343  << device_to_realtime_offset_.count() << " ns";
1344  }
1345 
1347  {
1348  addPlugin(heartbeatPlugin);
1349  }
1350 } // namespace visionx
armarx::plugins::HeartbeatComponentPlugin::heartbeat
void heartbeat()
Sends out a heartbeat using the default config.
Definition: HeartbeatComponentPlugin.cpp:80
visionx::AzureKinectPointCloudProviderPropertyDefinitions
Definition: AzureKinectPointCloudProvider.h:73
armarx::Variant
The Variant class is described here: Variants.
Definition: Variant.h:224
ARMARX_VERBOSE
#define ARMARX_VERBOSE
Definition: Logging.h:180
Eigen
Definition: Elements.h:36
visionx::PointCloudProvider::onConnectComponent
void onConnectComponent() override
Definition: PointCloudProvider.cpp:77
visionx::AzureKinectPointCloudProvider::GetDepthDimensions
static std::pair< int, int > GetDepthDimensions(const k4a_depth_mode_t depth_mode)
Returns the dimension of the depth images that will be produced for a certain resolution.
Definition: AzureKinectPointCloudProvider.h:223
armarx::plugins::HeartbeatComponentPlugin::signUp
void signUp(const std::string &channelName="", const std::vector< std::string > &aliases={}, const std::string &description="")
register component to heartbeat
Definition: HeartbeatComponentPlugin.cpp:14
armarx::human::pose::model::k4a_bt_body_32::Joints
Joints
Joints with index as defined in the body model.
Definition: k4a_bt_body_32.h:39
armarx::ManagedIceObject::setMetaInfo
void setMetaInfo(const std::string &id, const VariantBasePtr &value)
Allows to set meta information that can be queried live via Ice interface on the ArmarXManager.
Definition: ManagedIceObject.cpp:749
LocalException.h
visionx
ArmarX headers.
Definition: OpenPoseStressTest.h:38
AzureKinectPointCloudProvider.h
index
uint8_t index
Definition: EtherCATFrame.h:59
StopWatch.h
visionx::AzureKinectPointCloudProvider::doCapture
bool doCapture() override
Main capturing function.
Definition: AzureKinectPointCloudProvider.cpp:510
armarx::DebugObserverComponentPluginUser::setDebugObserverBatchModeEnabled
void setDebugObserverBatchModeEnabled(bool enable)
Definition: DebugObserverComponentPlugin.cpp:118
visionx::AzureKinectPointCloudProvider::getStereoCalibration
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1082
ARMARX_CHECK_NOT_NULL
#define ARMARX_CHECK_NOT_NULL(ptr)
This macro evaluates whether ptr is not null and if it turns out to be false it will throw an Express...
Definition: ExpressionException.h:206
visionx::CapturingPointCloudProvider::setPointCloudSyncMode
void setPointCloudSyncMode(ImageSyncMode pointCloudSyncMode)
Sets the point cloud synchronization mode.
Definition: CapturingPointCloudProvider.cpp:190
visionx::AzureKinectPointCloudProvider::VersionToString
static std::string VersionToString(const k4a_version_t &version)
Creates a string from a k4a_version_t.
Definition: AzureKinectPointCloudProvider.h:249
armarx::ManagedIceObject::getArmarXManager
ArmarXManagerPtr getArmarXManager() const
Returns the ArmarX manager used to add and remove components.
Definition: ManagedIceObject.cpp:348
trace.h
armarx::armem::human::PoseKeypoint::label
std::string label
Definition: types.h:18
visionx::ImageProvider::onExitComponent
void onExitComponent() override
Definition: ImageProvider.cpp:152
c
constexpr T c
Definition: UnscentedKalmanFilterTest.cpp:43
visionx::AzureKinectPointCloudProvider::initializeTimestampOffset
void initializeTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us)
Definition: AzureKinectPointCloudProvider.cpp:1331
armarx::ManagedIceObject::addPlugin
PluginT * addPlugin(const std::string prefix="", ParamsT &&...params)
Definition: ManagedIceObject.h:182
visionx::AzureKinectPointCloudProvider::GetColorDimensions
static std::pair< int, int > GetColorDimensions(const k4a_color_resolution_t resolution)
Returns the dimension of the color images that will be produced for a certain resolution.
Definition: AzureKinectPointCloudProvider.h:195
visionx::ImageProvider::setImageFormat
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
Definition: ImageProvider.cpp:275
visionx::AzureKinectPointCloudProvider::getImageRecordingChannelPreferences
std::vector< imrec::ChannelPreferences > getImageRecordingChannelPreferences(const Ice::Current &) override
Definition: AzureKinectPointCloudProvider.cpp:1179
visionx::AzureKinectPointCloudProvider::updateTimestampOffset
void updateTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us, const std::chrono::nanoseconds &k4a_system_timestamp_ns)
Definition: AzureKinectPointCloudProvider.cpp:1249
visionx::AzureKinectPointCloudProvider::createSwCallback
std::function< void(armarx::Duration)> createSwCallback(const std::string &description)
Definition: AzureKinectPointCloudProvider.cpp:106
visionx::ImageProvider::getImageFormat
ImageFormatInfo getImageFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the entire image format info struct via Ice.
Definition: ImageProvider.cpp:75
armarx::RunningTask
Definition: ArmarXMultipleObjectsScheduler.h:35
armarx::core::time::Clock
Clock to get date/time from a specific clock type or wait for certain durations or until certain date...
Definition: Clock.h:26
k4a_bt_body_32.h
ARMARX_CHECK
#define ARMARX_CHECK(expression)
Shortcut for ARMARX_CHECK_EXPRESSION.
Definition: ExpressionException.h:82
visionx::tools::createByteImage
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.
visionx::PointCloudProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: PointCloudProvider.cpp:95
visionx::PointCloudProvider::onExitComponent
void onExitComponent() override
Definition: PointCloudProvider.cpp:106
armarx::TimeMode::SystemTime
@ SystemTime
visionx::ImageProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: ImageProvider.cpp:141
visionx::tools::convert
CByteImage::ImageType convert(const ImageType visionxImageType)
Converts a VisionX image type into an image type of IVT's ByteImage.
Definition: TypeMapping.cpp:95
visionx::AzureKinectPointCloudProvider::setWidthBodyTracking
void setWidthBodyTracking(int minXinPixel, int maxXinPixel, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1236
visionx::CapturingPointCloudProvider::capture
virtual void capture()
Definition: CapturingPointCloudProvider.cpp:147
visionx::AzureKinectPointCloudProvider::onStopCapture
void onStopCapture() override
This is called when the point cloud provider capturing has been stopped.
Definition: AzureKinectPointCloudProvider.cpp:504
armarx::armem::human::HumanPose::poseModelId
std::string poseModelId
Definition: types.h:31
armarx::GetHandledExceptionString
std::string GetHandledExceptionString()
Definition: Exception.cpp:147
ARMARX_TRACE
#define ARMARX_TRACE
Definition: trace.h:69
armarx::status
status
Definition: FiniteStateMachine.h:259
GfxTL::Identity
void Identity(MatrixXX< N, N, T > *a)
Definition: MatrixXX.h:523
visionx::CapturingPointCloudProvider::captureEnabled
std::atomic_bool captureEnabled
Indicates that capturing is enabled and running.
Definition: CapturingPointCloudProvider.h:224
armarx::ctrlutil::a
double a(double t, double a0, double j)
Definition: CtrlUtil.h:45
armarx::armem::human::PoseKeypoint
Definition: types.h:16
visionx::imrec::convert
void convert(const CByteImage &in, cv::Mat &out)
Converts an IVT CByteImage to OpenCV's Mat.
Definition: helper.cpp:41
types.h
armarx::abs
std::vector< T > abs(const std::vector< T > &v)
Definition: VectorHelpers.h:253
armarx::human::pose::model::k4a_bt_body_32::JointNames
const simox::meta::EnumNames< Joints > JointNames
Names of the joints as defined in the body model.
Definition: k4a_bt_body_32.h:78
visionx::AzureKinectPointCloudProvider::timestampToArmarX
armarx::DateTime timestampToArmarX(const std::chrono::microseconds &k4a_timestamp_us)
Definition: AzureKinectPointCloudProvider.cpp:1315
if
if(!yyvaluep)
Definition: Grammar.cpp:724
FramedPose.h
visionx::CapturingPointCloudProviderPropertyDefinitions
Definition: CapturingPointCloudProvider.h:38
visionx::PointCloudProvider::getPointCloudFormat
MetaPointCloudFormatPtr getPointCloudFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the point cloud format info struct via Ice.
Definition: PointCloudProvider.cpp:46
visionx::AzureKinectPointCloudProvider::runPointcloudPublishing
void runPointcloudPublishing()
Definition: AzureKinectPointCloudProvider.cpp:925
ARMARX_DEBUG
#define ARMARX_DEBUG
Definition: Logging.h:177
visionx::tools::depthValueToRGB
void depthValueToRGB(unsigned int depthInMM, unsigned char &r, unsigned char &g, unsigned char &b, bool noiseResistant=false)
Definition: ImageUtil.h:176
visionx::AzureKinectPointCloudProvider::onExitCapturingPointCloudProvider
void onExitCapturingPointCloudProvider() override
This is called when the Component::onExitComponent() setup is called.
Definition: AzureKinectPointCloudProvider.cpp:343
armarx::aron::input
ReaderT::InputType & input
Definition: rw.h:19
visionx::AzureKinectPointCloudProviderPropertyDefinitions::AzureKinectPointCloudProviderPropertyDefinitions
AzureKinectPointCloudProviderPropertyDefinitions(std::string prefix)
Definition: AzureKinectPointCloudProvider.cpp:124
armarx::armem::human::HumanPose::cameraFrameName
std::string cameraFrameName
Definition: types.h:37
armarx::armem::server::ltm::mongodb::constantes::TIMESTAMP
const std::string TIMESTAMP
Definition: operations.h:16
visionx::AzureKinectPointCloudProvider::setMaxDepthBodyTracking
void setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1229
visionx::AzureKinectPointCloudProvider::getReferenceFrame
std::string getReferenceFrame(const Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1173
visionx::PointCloudProvider::onInitComponent
void onInitComponent() override
Definition: PointCloudProvider.cpp:59
visionx::AzureKinectPointCloudProvider::onDisconnectComponent
void onDisconnectComponent() override
Hook for subclass.
Definition: AzureKinectPointCloudProvider.cpp:1066
visionx::AzureKinectPointCloudProvider::AzureKinectPointCloudProvider
AzureKinectPointCloudProvider()
Definition: AzureKinectPointCloudProvider.cpp:1346
Metronome.h
max
T max(T t1, T t2)
Definition: gdiam.h:48
ARMARX_ERROR
#define ARMARX_ERROR
Definition: Logging.h:189
armarx::FramedOrientation
The FramedOrientation class.
Definition: FramedPose.h:199
visionx::AzureKinectPointCloudProvider::getImagesAreUndistorted
bool getImagesAreUndistorted(const ::Ice::Current &c) override
Definition: AzureKinectPointCloudProvider.cpp:1167
visionx::ImageProvider::provideImages
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
Definition: ImageProvider.cpp:319
armarx::armem::human::HumanPose
Definition: types.h:29
visionx::AzureKinectPointCloudProvider::onDisconnectImageProvider
void onDisconnectImageProvider() override
Definition: AzureKinectPointCloudProvider.cpp:309
armarx::armem::Time
armarx::core::time::DateTime Time
Definition: forward_declarations.h:13
armarx::to_string
const std::string & to_string(const std::string &s)
Definition: StringHelpers.h:40
forward_declarations.h
visionx::AzureKinectPointCloudProvider::onInitCapturingPointCloudProvider
void onInitCapturingPointCloudProvider() override
This is called when the Component::onInitComponent() is called.
Definition: AzureKinectPointCloudProvider.cpp:336
armarx::armem::human::HumanPose::humanTrackingId
std::optional< std::string > humanTrackingId
Definition: types.h:35
armarx::human::pose::model::k4a_bt_body_32::ModelId
const std::string ModelId
Definition: k4a_bt_body_32.h:34
visionx::AzureKinectPointCloudProvider::createPropertyDefinitions
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
Definition: AzureKinectPointCloudProvider.cpp:159
visionx::ImageProvider::setNumberImages
void setNumberImages(int numberImages)
Sets the number of images on each capture.
Definition: ImageProvider.cpp:313
armarx::TimeUtil::GetTime
static IceUtil::Time GetTime(TimeMode timeMode=TimeMode::VirtualTime)
Get the current time.
Definition: TimeUtil.cpp:42
armarx::core::time::DateTime::toMilliSecondsSinceEpoch
std::int64_t toMilliSecondsSinceEpoch() const
Definition: DateTime.cpp:102
armarx::core::time::DateTime
Represents a point in time.
Definition: DateTime.h:24
armarx::Component::getConfigIdentifier
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition: Component.cpp:74
visionx::AzureKinectPointCloudProvider::onInitComponent
void onInitComponent() override
Pure virtual hook for the subclass.
Definition: AzureKinectPointCloudProvider.cpp:1052
armarx::TimeUtil::GetTimeSince
static IceUtil::Time GetTimeSince(IceUtil::Time referenceTime, TimeMode timeMode=TimeMode::VirtualTime)
Get the difference between the current time and a reference time.
Definition: TimeUtil.cpp:66
visionx::AzureKinectPointCloudProvider::enableHumanPoseEstimation
void enableHumanPoseEstimation(const armarx::EnableHumanPoseEstimationInput &input, const Ice::Current &=Ice::emptyCurrent) override
Definition: AzureKinectPointCloudProvider.cpp:1195
ARMARX_CHECK_EXPRESSION
#define ARMARX_CHECK_EXPRESSION(expression)
This macro evaluates the expression and if it turns out to be false it will throw an ExpressionExcept...
Definition: ExpressionException.h:73
visionx::PointCloudProvider::providePointCloud
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
Definition: PointCloudProvider.h:122
visionx::ImageProvider::onConnectComponent
void onConnectComponent() override
Definition: ImageProvider.cpp:104
TimeUtil.h
ARMARX_INFO
#define ARMARX_INFO
Definition: Logging.h:174
std
Definition: Application.h:66
armarx::core::time::ScopedStopWatch
Measures the time this stop watch was inside the current scope.
Definition: ScopedStopWatch.h:32
HumanPoseWriter.h
armarx::Quaternion< float, 0 >
visionx::AzureKinectPointCloudProvider::onConnectImageProvider
void onConnectImageProvider() override
This is called when the Component::onConnectComponent() setup is called.
Definition: AzureKinectPointCloudProvider.cpp:275
armarx::DebugObserverComponentPluginUser::sendDebugObserverBatch
void sendDebugObserverBatch()
Definition: DebugObserverComponentPlugin.cpp:122
IceUtil::Handle< class PropertyDefinitionContainer >
armarx::core::time::Metronome
Simple rate limiter for use in loops to maintain a certain frequency given a clock.
Definition: Metronome.h:35
visionx::AzureKinectPointCloudProvider::getDefaultName
std::string getDefaultName() const override
Definition: AzureKinectPointCloudProvider.cpp:1046
visionx::ImageProvider::onInitComponent
void onInitComponent() override
Definition: ImageProvider.cpp:88
visionx::tools::convertEigenMatToVisionX
visionx::types::Mat convertEigenMatToVisionX(Eigen::MatrixXf m)
Definition: TypeMapping.cpp:667
armarx::ArmarXDataPath::ReplaceEnvVars
static bool ReplaceEnvVars(std::string &string)
ReplaceEnvVars replaces environment variables in a string with their values, if the env.
Definition: ArmarXDataPath.cpp:483
armarx::core::time::Duration
Represents a duration.
Definition: Duration.h:17
ImageUtil.h
visionx::AzureKinectPointCloudProvider::onExitComponent
void onExitComponent() override
Hook for subclass.
Definition: AzureKinectPointCloudProvider.cpp:1075
armarx::core::time::Clock::Now
static DateTime Now()
Current time on the virtual clock.
Definition: Clock.cpp:97
armarx::Logging::deactivateSpam
SpamFilterDataPtr deactivateSpam(float deactivationDurationSec=10.0f, const std::string &identifier="", bool deactivate=true) const
disables the logging for the current line for the given amount of seconds.
Definition: Logging.cpp:92
Eigen::Matrix
Definition: EigenForwardDeclarations.h:27
armarx::ManagedIceObject::getName
std::string getName() const
Retrieve name of object.
Definition: ManagedIceObject.cpp:107
armarx::RunningTask::pointer_type
IceUtil::Handle< RunningTask< T > > pointer_type
Shared pointer type for convenience.
Definition: RunningTask.h:178
visionx::AzureKinectPointCloudProvider::onStartCapture
void onStartCapture(float frames_per_second) override
This is called when the point cloud provider capturing has been started.
Definition: AzureKinectPointCloudProvider.cpp:351
armarx::core::time::StopWatch
Measures the passed time between the construction or calling reset() and stop().
Definition: StopWatch.h:42
TypeMapping.h
armarx::core::time::Frequency::Hertz
static Frequency Hertz(std::int64_t hertz)
Definition: Frequency.cpp:23
armarx::ArmarXDataPath::getAbsolutePath
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
Definition: ArmarXDataPath.cpp:111
armarx::armem::human::HumanPose::keypoints
KeyPointMap keypoints
Definition: types.h:34
Logging.h
armarx::VariantType::FramedPosition
const VariantTypeId FramedPosition
Definition: FramedPose.h:39
ARMARX_CHECK_EQUAL
#define ARMARX_CHECK_EQUAL(lhs, rhs)
This macro evaluates whether lhs is equal (==) rhs and if it turns out to be false it will throw an E...
Definition: ExpressionException.h:130
ARMARX_WARNING
#define ARMARX_WARNING
Definition: Logging.h:186
ScopedStopWatch.h
helper.h
armarx::core::time::Duration::MicroSeconds
static Duration MicroSeconds(std::int64_t microSeconds)
Constructs a duration in microseconds.
Definition: Duration.cpp:27
armarx::PropertyDefinitionBase::eModifiable
@ eModifiable
Definition: PropertyDefinitionInterface.h:57
Variant.h
stopwatch::Clock
std::chrono::system_clock Clock
Definition: Stopwatch.h:10
visionx::AzureKinectPointCloudProvider::onInitImageProvider
void onInitImageProvider() override
This is called when the Component::onInitComponent() is called.
Definition: AzureKinectPointCloudProvider.cpp:212
armarx::core::time::Clock::now
DateTime now() const
Current date/time of the clock.
Definition: Clock.cpp:23
armarx::core::time::Duration::MilliSeconds
static Duration MilliSeconds(std::int64_t milliSeconds)
Constructs a duration in milliseconds.
Definition: Duration.cpp:55
armarx::DebugObserverComponentPluginUser::setDebugObserverDatafield
void setDebugObserverDatafield(Ts &&...ts) const
Definition: DebugObserverComponentPlugin.h:97
visionx::AzureKinectPointCloudProvider::onConnectComponent
void onConnectComponent() override
Pure virtual hook for the subclass.
Definition: AzureKinectPointCloudProvider.cpp:1059