AzureKinectPointCloudProvider.cpp
Go to the documentation of this file.
2
3#include <cstdio>
4#include <filesystem>
5#include <functional>
6#include <mutex>
7#include <utility>
8
9#include <IceUtil/Time.h>
10
11#include <opencv2/core/hal/interface.h>
12#include <opencv2/imgproc/imgproc.hpp>
13
23
25
31
32#include <Calibration/Calibration.h>
33#include <Image/ImageProcessor.h>
34
35#ifdef INCLUDE_BODY_TRACKING
37#endif
38
39
40namespace
41{
42 /**
43 * @brief Converts a k4a image to an IVT image.
44 * @param color_image Reference to a k4a image containing the color data that should be converted.
45 * @param result IVT image the converted image should be stored in. The size of this image and the color_image need to match.
46 */
47 [[maybe_unused]] void
48 k4aToIvtImage(const k4a::image& color_image, ::CByteImage& result)
49 {
50 if (color_image.get_format() == K4A_IMAGE_FORMAT_COLOR_YUY2)
51 {
52 // Convert YUY2 to RGB using OpenCV
53 cv::Mat yuy2_image(color_image.get_height_pixels(),
54 color_image.get_width_pixels(),
55 CV_8UC2,
56 const_cast<uint8_t*>(color_image.get_buffer()));
57 cv::Mat rgb_image;
58 cv::cvtColor(yuy2_image, rgb_image, cv::COLOR_YUV2RGB_YUY2);
59
60 // Convert the OpenCV Mat to the IVT CByteImage format
61 visionx::imrec::convert(rgb_image, result);
62 }
63 else if (color_image.get_format() == K4A_IMAGE_FORMAT_COLOR_BGRA32)
64 {
65 auto cw = static_cast<unsigned int>(color_image.get_width_pixels());
66 auto ch = static_cast<unsigned int>(color_image.get_height_pixels());
67 ARMARX_CHECK_EQUAL(static_cast<unsigned int>(result.width), cw);
68 ARMARX_CHECK_EQUAL(static_cast<unsigned int>(result.height), ch);
69
70 auto color_buffer = color_image.get_buffer();
71 auto rgb_buffer_ivt = result.pixels;
72
73 // Index in the IVT image. This value will be increased by 3 per pixel.
74 int index_ivt = 0;
75
76 // Index in the k4a image. This value increases by 4 per pixel.
77 int index_k4a = 0;
78
79 for (unsigned int y = 0; y < ch; ++y)
80 {
81 for (unsigned int x = 0; x < cw; ++x)
82 {
83 rgb_buffer_ivt[index_ivt] = color_buffer[index_k4a + 2];
84 rgb_buffer_ivt[index_ivt + 1] = color_buffer[index_k4a + 1];
85 rgb_buffer_ivt[index_ivt + 2] = color_buffer[index_k4a + 0];
86 index_ivt += 3;
87 index_k4a += 4;
88 }
89 }
90 }
91 else
92 {
93 throw std::runtime_error("Unsupported color format in k4a image");
94 }
95 }
96
97
98#ifdef INCLUDE_BODY_TRACKING
99 void
100 printBodyInformation(k4abt_body_t body)
101 {
102 ARMARX_VERBOSE << "Body ID: " << body.id;
103 for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
104 {
105 const k4a_float3_t position = body.skeleton.joints[i].position;
106 const k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
107 const k4abt_joint_confidence_level_t confidence_level =
108 body.skeleton.joints[i].confidence_level;
109
110 ARMARX_VERBOSE << "Joint" << i << ": "
111 << "Position[mm] (" << position.v[0] << "," << position.v[1] << ","
112 << position.v[2] << "); "
113 << "Orientation (" << orientation.v[0] << "," << orientation.v[1] << ","
114 << orientation.v[2] << "," << orientation.v[3] << "); "
115 << "Confidence Level " << confidence_level;
116 }
117 }
118#endif
119
120 const char*
121 k4aImageFormatToString(k4a_image_format_t format)
122 {
123 switch (format)
124 {
125 case K4A_IMAGE_FORMAT_COLOR_MJPG:
126 return "COLOR_MJPG";
127 case K4A_IMAGE_FORMAT_COLOR_NV12:
128 return "COLOR_NV12";
129 case K4A_IMAGE_FORMAT_COLOR_YUY2:
130 return "COLOR_YUY2";
131 case K4A_IMAGE_FORMAT_COLOR_BGRA32:
132 return "COLOR_BGRA32";
133 case K4A_IMAGE_FORMAT_DEPTH16:
134 return "DEPTH16";
135 case K4A_IMAGE_FORMAT_IR16:
136 return "IR16";
137 case K4A_IMAGE_FORMAT_CUSTOM:
138 return "CUSTOM";
139 default:
140 return "UNKNOWN";
141 }
142 }
143
144} // namespace
145
146namespace visionx
147{
148
149 std::function<void(armarx::Duration)>
151 {
152 return [description, this](armarx::Duration duration)
153 {
154 std::string name = "duration " + description + " in [ms]";
155 {
156 std::lock_guard g{metaInfoMtx};
157 setMetaInfo(name, new armarx::Variant{duration.toMilliSecondsDouble()});
158 }
159
160 {
161 std::lock_guard g{debugObserverMtx};
162 setDebugObserverDatafield(name, duration.toMilliSecondsDouble());
163 }
164 };
165 }
166
170 {
172 K4A_COLOR_RESOLUTION_720P,
173 "Resolution of the RGB camera image.")
174 .map("0", K4A_COLOR_RESOLUTION_OFF) /** Color camera will be turned off */
175 .map("720", K4A_COLOR_RESOLUTION_720P) /** 1280x720 16:9 */
176 .map("1080", K4A_COLOR_RESOLUTION_1080P) /** 1920x1080 16:9 */
177 .map("1440", K4A_COLOR_RESOLUTION_1440P) /** 2560x1440 16:9 */
178 .map("1536", K4A_COLOR_RESOLUTION_1536P) /** 2048x1536 4:3 */
179 .map("2160", K4A_COLOR_RESOLUTION_2160P) /** 3840x2160 16:9 */
180 .map("3072", K4A_COLOR_RESOLUTION_3072P); /** 4096x3072 4:3 */
182 K4A_DEPTH_MODE_NFOV_UNBINNED,
183 "Resolution/mode of the depth camera image.")
184 .setCaseInsensitive(true)
185 .map("OFF", K4A_DEPTH_MODE_OFF)
186 .map("NFOV_2X2BINNED", K4A_DEPTH_MODE_NFOV_2X2BINNED)
187 .map("NFOV_UNBINNED", K4A_DEPTH_MODE_NFOV_UNBINNED)
188 .map("WFOV_2X2BINNED", K4A_DEPTH_MODE_WFOV_2X2BINNED)
189 .map("WFOV_UNBINNED", K4A_DEPTH_MODE_WFOV_UNBINNED)
190 .map("PASSIVE_IR", K4A_DEPTH_MODE_PASSIVE_IR);
191
193 6000.0,
194 "Max. allowed depth value in mm. Depth values above this "
195 "threshold will be set to nan.");
196
197 defineOptionalProperty<float>("CaptureTimeOffset",
198 16.0f,
199 "In Milliseconds. Time offset between capturing the image on "
200 "the hardware and receiving the image in this process.",
202
204 K4A_IMAGE_FORMAT_COLOR_BGRA32,
205 "Color format of the RGB camera image.")
206 .map("BGRA", K4A_IMAGE_FORMAT_COLOR_BGRA32)
207 .map("YUY2", K4A_IMAGE_FORMAT_COLOR_YUY2);
208 }
209
212 {
215
216 defs->optional(
217 enableColorUndistortion,
218 "EnableColorUndistortion",
219 "Undistort the color images using the full 8 radial and tangential distortion "
220 "parameters provided by the Azure Kinect.\n"
221 "This can help for processing tasks which cannot handle radial parameters k3-k6.\n"
222 "Note that this drastically reduces the FPS (to something like 3).");
223 defs->optional(externalCalibrationFilePath,
224 "ExternalCalibrationFilePath",
225 "Path to an optional external"
226 " calibration file, which has a"
227 " camera matrix and distortion"
228 " parameters.");
229 defs->optional(mDeviceId, "device_id", "ID of the device.");
230
231 defs->optional(robotName, "robotName");
232 defs->optional(bodyCameraFrameName, "bodyCameraFrameName");
233 defs->optional(framerate.value,
234 "framerate.images",
235 "The framerate of RGB-D images [frames per second]."
236 "\nNote that the point cloud and body tracking frame rates are controlled by"
237 " the properties 'framerate' (point cloud) and 'framerate.bodyTracking'"
238 " (body tracking), respectively.")
239 .setMin(5)
240 .setMax(30);
241
242#ifdef INCLUDE_BODY_TRACKING
243 defs->optional(bodyTrackingEnabled,
244 "bodyTrackingEnabled",
245 "Whether the Azure Kinect Body Tracking SDK should be enabled or not.");
246 defs->optional(bodyTrackingRunAtStart,
247 "bodyTrackingRunAtStart",
248 "Whether the Azure Kinect Body Tracking SDK should directly run when the "
249 "component is startet."
250 "Otherwise it has to be activated by the ice interface.");
251 defs->optional(bodyTrackingModelFilename,
252 "bodyTrackingModelPath",
253 "Path where the .onnx DNN files can be found");
254 defs->optional(bodyTrackingGPUDeviceID, "bodyTrackingGPUDeviceID", "GPU Device ID.");
255 defs->optional(bodyTrackingTemporalSmoothingFactor,
256 "bodyTrackingTemporalSmoothingFactor",
257 "Temporal smoothing factor for Azure Kinect body tracking.");
258 defs->optional(useCPU, "useCPU", "Whether to use cpu");
259
260 defs->optional(bodyTrackingDepthMaskMinX, "bodyTrackingDepthMaskMinX");
261 defs->optional(bodyTrackingDepthMaskMaxX, "bodyTrackingDepthMaskMaxX");
262 defs->optional(bodyTrackingDepthMaskMaxZ, "bodyTrackingDepthMaskMaxZ");
263 defs->optional(framerate.bodyTracking.value,
264 "framerate.bodyTracking",
265 "The framerate with with the body tracking is run [frames per second]."
266 "\nNote that the RGB-D image and point cloud frame rates are controlled by"
267 " the properties 'framerate.image' (RGB-D images) and 'framerate'"
268 " (point cloud), respectively.")
269 .setMin(1.)
270 .setMax(30.);
271
272 defs->optional(startIMU, "startIMU");
273
274 humanPoseWriter.registerPropertyDefinitions(defs);
275#endif
276
277 defs->optional(enableHeartbeat, "enableHeartbeat");
278
279 return defs;
280 }
281
282 void
284 {
285 config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
286 if (framerate.value == 5)
287 {
288 config.camera_fps = K4A_FRAMES_PER_SECOND_5;
289 }
290 else if (framerate.value == 15)
291 {
292 config.camera_fps = K4A_FRAMES_PER_SECOND_15;
293 }
294 else if (framerate.value == 30)
295 {
296 config.camera_fps = K4A_FRAMES_PER_SECOND_30;
297 }
298 else
299 {
300 throw armarx::LocalException("Invalid image framerate (property 'framerate.images'): ")
301 << framerate.value << ". Only framerates 5, 15 and 30 are "
302 << "supported by Azure Kinect.";
303 }
304
305 framerate.pointCloud.value =
306 getProperty<float>("framerate"); // point cloud provider framerate
307 framerate.pointCloud.update(framerate.value);
308
309#ifdef INCLUDE_BODY_TRACKING
310 framerate.bodyTracking.update(framerate.value);
311#endif
312
313 config.depth_mode = getProperty<k4a_depth_mode_t>("DepthMode");
314 config.color_format = getProperty<k4a_image_format_t>("ColorFormat");
315 config.color_resolution = getProperty<k4a_color_resolution_t>("ColorResolution");
316
317 // This means that we'll only get captures that have both color and depth images, so we don't
318 // need to check if the capture contains a particular type of image.
319 config.synchronized_images_only = true;
320
321 // Set number of images per frame.
323
324 auto depth_dim = GetDepthDimensions(config.depth_mode);
325 auto color_dim = GetColorDimensions(config.color_resolution);
326
327 ARMARX_INFO << "Depth image size: " << depth_dim.first << "x" << depth_dim.second;
328 ARMARX_INFO << "Color image size: " << color_dim.first << "x" << color_dim.second;
329
330 alignedDepthImage =
331 k4a::image::create(K4A_IMAGE_FORMAT_DEPTH16,
332 color_dim.first,
333 color_dim.second,
334 color_dim.first * 2 * static_cast<int32_t>(sizeof(uint8_t)));
335
336 setImageFormat(visionx::ImageDimension(color_dim.first, color_dim.second),
337 visionx::eRgb,
338 visionx::eBayerPatternGr);
339
340 resultDepthImage.reset(visionx::tools::createByteImage(getImageFormat(), visionx::eRgb));
341 resultColorImage =
342 std::make_unique<CByteImage>(color_dim.first, color_dim.second, CByteImage::eRGB24);
343
344 xyzImage = k4a::image::create(K4A_IMAGE_FORMAT_CUSTOM,
345 color_dim.first,
346 color_dim.second,
347 color_dim.first * 3 * static_cast<int32_t>(sizeof(int16_t)));
348
349 pointcloud = std::make_unique<pcl::PointCloud<CloudPointType>>();
350 }
351
352 void
354 {
355 ARMARX_DEBUG << "Connecting " << getName();
356
358
361 this,
363 pointcloudTask->start();
364
365#ifdef INCLUDE_BODY_TRACKING
366 if (bodyTrackingEnabled)
367 {
368 ARMARX_INFO << "Connecting to human memory ...";
369 humanPoseWriter.connect(memoryNameSystem());
370 ARMARX_INFO << "Connected to human memory.";
371
372 bodyTrackingPublishTask = new armarx::RunningTask<AzureKinectPointCloudProvider>(
373 this,
374 &AzureKinectPointCloudProvider::runPublishBodyTrackingResults);
375 }
376#endif
377
378 if (enableHeartbeat)
379 {
380 ARMARX_CHECK_NOT_NULL(heartbeatPlugin);
381 heartbeatPlugin->signUp(armarx::Duration::MilliSeconds(500),
383 {"Vision", "Camera"},
384 "AzureKinectPointCloudProvider");
385 }
386
387 ARMARX_VERBOSE << "Connected " << getName();
388 }
389
390 void
392 {
393 ARMARX_DEBUG << "Disconnecting " << getName();
394
395 // Stop task
396 {
397 std::lock_guard<std::mutex> lock(pointcloudProcMutex);
398 ARMARX_DEBUG << "Stopping pointcloud processing thread...";
399 const bool WAIT_FOR_JOIN = false;
400 pointcloudTask->stop(WAIT_FOR_JOIN);
401 pointcloudProcSignal.notify_all();
402 ARMARX_DEBUG << "Waiting for pointcloud processing thread to stop...";
403 pointcloudTask->waitForStop();
404 ARMARX_DEBUG << "Pointcloud processing thread stopped";
405 }
406
407#ifdef INCLUDE_BODY_TRACKING
408 if (bodyTrackingIsRunning)
409 {
410 bodyTrackingPublishTask->stop();
411 }
412#endif
413
414 ARMARX_DEBUG << "Disconnected " << getName();
415 }
416
417 void
423
424 void
426 {
427 ARMARX_INFO << "Closing Azure Kinect device";
428 device.close();
429 ARMARX_INFO << "Closed Azure Kinect device";
430 }
431
432 void
434 {
436
437 cloudFormat = getPointCloudFormat();
438
439 // Check for devices.
440 const uint32_t DEVICE_COUNT = k4a::device::get_installed_count();
441 if (DEVICE_COUNT == 0)
442 {
443 getArmarXManager()->asyncShutdown();
444 throw armarx::LocalException("No Azure Kinect devices detected!");
445 }
446
448
449 device = k4a::device::open(static_cast<uint32_t>(mDeviceId));
450 ARMARX_DEBUG << "Opened device id #" << mDeviceId << " with serial number "
451 << device.get_serialnum() << ".";
452
454
455 auto depthDim = GetDepthDimensions(config.depth_mode);
456 auto colorDim = GetColorDimensions(config.color_resolution);
457 (void)depthDim;
458 (void)colorDim;
459
460 k4aCalibration = device.get_calibration(config.depth_mode, config.color_resolution);
462 << "Color camera calibration:"
463 << "\n"
464 << "cx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cx
465 << "\n"
466 << "cy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cy
467 << "\n"
468 << "fx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fx
469 << "\n"
470 << "fy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fy
471 << "\n"
472 << "k1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k1
473 << "\n"
474 << "k2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k2
475 << "\n"
476 << "p1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p1
477 << "\n"
478 << "p2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p2
479 << "\n"
480 << "k3: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k3
481 << "\n"
482 << "k4: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k4
483 << "\n"
484 << "k5: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k5
485 << "\n"
486 << "k6: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k6;
487
488
489 auto c = getStereoCalibration(Ice::Current());
490 CCalibration* c_left = tools::convert(c.calibrationLeft);
491 c_left->PrintCameraParameters();
492 delete c_left;
493 CCalibration* c_right = tools::convert(c.calibrationRight);
494 c_right->PrintCameraParameters();
495 delete c_right;
496
497 transformation = k4a::transformation(k4aCalibration);
498
499#ifdef INCLUDE_BODY_TRACKING
500 if (bodyTrackingEnabled)
501 {
502 // eventually, resolve environment variable
503 armarx::ArmarXDataPath::ReplaceEnvVars(bodyTrackingModelFilename);
504
505 const bool found = armarx::ArmarXDataPath::getAbsolutePath(bodyTrackingModelFilename,
506 bodyTrackingModelFilename);
507 ARMARX_CHECK(found) << "Body tracking DNN model could not be found/resolved at `"
508 << bodyTrackingModelFilename << "`.";
509
510 ARMARX_INFO << "Using body tracking DNN model from directory `"
511 << bodyTrackingModelFilename << "`.";
512
513 ARMARX_CHECK(std::filesystem::exists(bodyTrackingModelFilename))
514 << "The path `" << bodyTrackingModelFilename << "` does not exist!";
515
516 k4abt_tracker_configuration_t const bodyTrackingConfig{
517 .sensor_orientation = K4ABT_SENSOR_ORIENTATION_DEFAULT,
518 .processing_mode = useCPU ? K4ABT_TRACKER_PROCESSING_MODE_CPU
519 : K4ABT_TRACKER_PROCESSING_MODE_GPU_CUDA,
520 .gpu_device_id = bodyTrackingGPUDeviceID,
521 .model_path = bodyTrackingModelFilename.c_str()};
522
523 bodyTracker = k4abt::tracker::create(k4aCalibration, bodyTrackingConfig);
524 bodyTracker.set_temporal_smoothing(bodyTrackingTemporalSmoothingFactor);
525
526 if (bodyTrackingRunAtStart and not bodyTrackingIsRunning)
527 {
528 bodyTrackingIsRunning = true;
529 bodyTrackingPublishTask->start();
530 }
531 }
532#endif
533
535 device.set_color_control(K4A_COLOR_CONTROL_BRIGHTNESS, K4A_COLOR_CONTROL_MODE_MANUAL, 128);
536
537 device.start_cameras(&config);
538
540
541 if (startIMU)
542 {
543 device.start_imu();
544 ARMARX_INFO << "IMU is active" << std::endl;
545
547 }
548
549 setMetaInfo("serialNumber", new armarx::Variant(device.get_serialnum()));
550 setMetaInfo("rgbVersion", new armarx::Variant(VersionToString(device.get_version().rgb)));
551 setMetaInfo("depthVersion",
552 new armarx::Variant(VersionToString(device.get_version().depth)));
553 setMetaInfo("depthSensorVersion",
554 new armarx::Variant(VersionToString(device.get_version().depth_sensor)));
555 setMetaInfo("audioVersion",
556 new armarx::Variant(VersionToString(device.get_version().audio)));
557
559
560
561 // Color image calibration
562 {
563 // Load intrinsics from camera
564 const k4a_calibration_camera_t calibration{k4aCalibration.color_camera_calibration};
565 const k4a_calibration_intrinsic_parameters_t::_param param =
566 calibration.intrinsics.parameters.param;
567
568 // // Scale intrinsics according to image scale
569 // param.fx *= image_scale_;
570 // param.fy *= image_scale_;
571
572 // Calculate distortion map
573 cv::Mat1f camera_matrix(3, 3);
574 camera_matrix << param.fx, 0, param.cx, 0, param.fy, param.cy, 0, 0, 1;
575 cv::Mat1f new_camera_matrix(3, 3);
576 new_camera_matrix << param.fx, 0, param.cx, 0, param.fx, param.cy, 0, 0, 1;
577 cv::Mat1f distortion_coeff(1, 8);
578 distortion_coeff << param.k1, param.k2, param.p1, param.p2, param.k3, param.k4,
579 param.k5, param.k6;
580 cv::Mat map1, map2, map3;
581 cv::initUndistortRectifyMap(
582 camera_matrix,
583 distortion_coeff,
584 cv::Mat{},
585 new_camera_matrix,
586 cv::Size{calibration.resolution_width, calibration.resolution_height},
587 CV_32FC1,
588 map1,
589 map2);
590 cv::convertMaps(map1, map2, colorDistortionMap, map3, CV_16SC2, true);
591 }
592 }
593
594 void
596 {
597 if (startIMU)
598 {
599 device.stop_imu();
600 }
601 }
602
603 bool
605 {
608
609 ScopedStopWatch sw_total{createSwCallback("doCapture")};
610
611 k4a::capture capture;
612 const std::chrono::milliseconds TIMEOUT{1000};
613
614 StopWatch sw_get_capture;
615 bool status = false;
616 try
617 {
618 ARMARX_DEBUG << "Try capture.";
619 status = device.get_capture(&capture, TIMEOUT);
620 ARMARX_DEBUG << "Got capture.";
621 }
622 catch (const std::exception&)
623 {
624 ARMARX_WARNING << "Failed to get capture from device (#" << ++mDiagnostics.num_crashes
625 << "). Restarting camera.";
626 StopWatch sw;
627 device.stop_cameras();
628 device.start_cameras(&config);
629 ARMARX_INFO << "Restarting took " << sw.stop() << ".";
630 return false;
631 }
632
633 if (status)
634 {
635 createSwCallback("waiting for get_capture")(sw_get_capture.stop());
636
637 {
638 std::lock_guard g{metaInfoMtx};
639 setMetaInfo("temperature", new armarx::Variant(capture.get_temperature_c()));
640 }
641
642 // see ROS: This will give INCORRECT timestamps until the first image.
643 const bool mustInitializeTimestampOffset = [&]()
644 {
645 std::lock_guard g{deviceToRealtimeOffsetMtx};
646 return device_to_realtime_offset_.count() == 0;
647 }();
648 if (mustInitializeTimestampOffset)
649 {
650 initializeTimestampOffset(capture.get_depth_image().get_device_timestamp());
651 }
652
653 // next, we update the timestamp offset continuously
654 updateTimestampOffset(capture.get_ir_image().get_device_timestamp(),
655 capture.get_ir_image().get_system_timestamp());
656
657 const k4a::image DEPTH_IMAGE = capture.get_depth_image();
658
659 // This function assumes that the image is made of depth pixels (i.e. uint16_t's),
660 // which is only true for IR/depth images.
661 const k4a_image_format_t IMAGE_FORMAT = DEPTH_IMAGE.get_format();
662 if (IMAGE_FORMAT != K4A_IMAGE_FORMAT_DEPTH16 && IMAGE_FORMAT != K4A_IMAGE_FORMAT_IR16)
663 {
664 const char* format_str = ::k4aImageFormatToString(IMAGE_FORMAT);
665 std::stringstream error_msg;
666 error_msg << "Attempted to colorize a non-depth image with format: " << format_str;
667 throw std::logic_error(error_msg.str());
668 }
669
670 // Provide data for pointcloud processing thread and signal to start processing.
671 if (not framerate.pointCloud.skip())
672 {
673 ScopedStopWatch sw{createSwCallback("transform depth image to camera")};
674 // Acquire lock and write data needed by pointcloud thread (i.e.,
675 // alignedDepthImageScaled and depthImageReady).
676 {
677 std::lock_guard lock{pointcloudProcMutex};
678 transformation.depth_image_to_color_camera(DEPTH_IMAGE, &alignedDepthImage);
679
680 // Signal that point cloud processing may proceed and reset processed flag.
681 depthImageReady = true;
682 }
683
684 ARMARX_DEBUG << "Notifying pointcloud thread.";
685 pointcloudProcSignal.notify_one();
686 }
687
688#ifdef INCLUDE_BODY_TRACKING
689 if (bodyTrackingIsRunning)
690 {
691 ARMARX_DEBUG << "body Tracking Is Running.";
692 std::scoped_lock lock(bodyTrackingParameterMutex);
693
694 if (not framerate.bodyTracking.skip())
695 {
696 k4a::image ir_image = capture.get_ir_image();
697 std::uint8_t* ir_image_buffer = ir_image.get_buffer();
698
699 k4a::image depth_image = capture.get_depth_image();
700 std::uint8_t* depth_image_buffer = depth_image.get_buffer();
701
702 ARMARX_CHECK_EQUAL(ir_image.get_width_pixels(), depth_image.get_width_pixels());
703 ARMARX_CHECK_EQUAL(ir_image.get_height_pixels(),
704 depth_image.get_height_pixels());
705
706 const int stride = ir_image.get_stride_bytes() / ir_image.get_width_pixels();
707
708 for (int x = 0; x < ir_image.get_width_pixels(); x++)
709 {
710 for (int y = 0; y < ir_image.get_height_pixels(); y++)
711 {
712 const int i = (y * ir_image.get_width_pixels() * stride) + (x * stride);
713 const int z = (static_cast<int>(depth_image_buffer[i])) +
714 (static_cast<int>(depth_image_buffer[i + 1]) << 8);
715
716 if ((bodyTrackingDepthMaskMinX > 0 and x < bodyTrackingDepthMaskMinX) or
717 (bodyTrackingDepthMaskMaxX > 0 and x > bodyTrackingDepthMaskMaxX) or
718 (bodyTrackingDepthMaskMaxZ > 0 and z > bodyTrackingDepthMaskMaxZ))
719 {
720 ir_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
721 ir_image_buffer[i + 1] = std::numeric_limits<std::uint8_t>::max();
722 depth_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
723 depth_image_buffer[i + 1] =
724 std::numeric_limits<std::uint8_t>::max();
725 }
726 }
727 }
728
729 if (not bodyTracker.enqueue_capture(capture))
730 {
731 ARMARX_WARNING << "Add capture to tracker process queue timeout";
732 }
733 }
734 }
735#endif
736
737 const k4a::image COLOR_IMAGE = capture.get_color_image();
738 ARMARX_DEBUG << "Got COLOR image.";
739
740 auto real_time = IceUtil::Time::now();
741 auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
742 auto clock_diff = real_time - monotonic_time;
743
744 auto image_monotonic_time =
745 IceUtil::Time::microSeconds(std::chrono::duration_cast<std::chrono::microseconds>(
746 DEPTH_IMAGE.get_system_timestamp())
747 .count());
748 long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
749
750 imagesTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
751
752 {
753 std::lock_guard g{metaInfoMtx};
754 setMetaInfo("image age in [ms]",
755 new armarx::Variant{(real_time - imagesTime).toMilliSecondsDouble()});
756 }
757
758 {
759 std::lock_guard g{debugObserverMtx};
760 setDebugObserverDatafield("image age in [ms]",
761 (real_time - imagesTime).toMilliSecondsDouble());
762 }
763
764 if (enableColorUndistortion)
765 {
766 // only the color image needs to be rectified
767 cv::Mat tmp_rgb_image;
768
769 if (COLOR_IMAGE.get_format() == K4A_IMAGE_FORMAT_COLOR_YUY2)
770 {
771 ARMARX_DEBUG << "Converting YUY2 image.";
772 cv::Mat yuy2_image(COLOR_IMAGE.get_height_pixels(),
773 COLOR_IMAGE.get_width_pixels(),
774 CV_8UC2,
775 const_cast<uint8_t*>(COLOR_IMAGE.get_buffer()));
776
777 cv::cvtColor(yuy2_image, tmp_rgb_image, cv::COLOR_YUV2RGB_YUY2);
778 }
779 else if (COLOR_IMAGE.get_format() == K4A_IMAGE_FORMAT_COLOR_BGRA32)
780 {
781 cv::cvtColor(cv::Mat{cv::Size{COLOR_IMAGE.get_width_pixels(),
782 COLOR_IMAGE.get_height_pixels()},
783 CV_8UC4,
784 (void*)COLOR_IMAGE.get_buffer(),
785 cv::Mat::AUTO_STEP},
786 tmp_rgb_image,
787 cv::COLOR_BGRA2RGB);
788 }
789 else
790 {
791 throw std::runtime_error("Unsupported color format in k4a image");
792 }
793
794 cv::Mat cv_color_image_undistorted(COLOR_IMAGE.get_width_pixels(),
795 COLOR_IMAGE.get_height_pixels(),
796 CV_8UC3);
797
798 cv::remap(tmp_rgb_image,
799 cv_color_image_undistorted,
800 colorDistortionMap,
801 cv::Mat{},
802 cv::INTER_NEAREST,
803 cv::BORDER_CONSTANT);
804
805 visionx::imrec::convert_rgb2cbi(cv_color_image_undistorted, *resultColorImage);
806 }
807 else
808 {
809 // Convert the k4a image to an IVT image.
810 ScopedStopWatch sw{createSwCallback("convert k4a image to IVT")};
811 ::k4aToIvtImage(COLOR_IMAGE, *resultColorImage);
812 }
813
814
815 // Prepare result depth image.
816 {
817 ScopedStopWatch sw{createSwCallback("prepare result depth image")};
818
819 const int DW = alignedDepthImage.get_width_pixels();
820 const int DH = alignedDepthImage.get_height_pixels();
821
822 ARMARX_CHECK_EXPRESSION(resultDepthImage);
823 ARMARX_CHECK_EQUAL(resultDepthImage->width, DW);
824 ARMARX_CHECK_EQUAL(resultDepthImage->height, DH);
825
826 auto result_depth_buffer = resultDepthImage->pixels;
827 const auto* depth_buffer =
828 reinterpret_cast<const uint16_t*>(alignedDepthImage.get_buffer());
829
830 int index = 0;
831 int index_2 = 0;
832 for (int y = 0; y < DH; ++y)
833 {
834 for (int x = 0; x < DW; ++x)
835 {
836 uint16_t depth_value = depth_buffer[index_2];
838 result_depth_buffer[index],
839 result_depth_buffer[index + 1],
840 result_depth_buffer[index + 2]);
841 index += 3;
842 index_2 += 1;
843 }
844 }
845 }
846
847 // Broadcast RGB-D image.
848 {
849 ScopedStopWatch sw{createSwCallback("broadcast RGB-D image")};
850 CByteImage* images[2] = {resultColorImage.get(), resultDepthImage.get()};
851 provideImages(images, imagesTime);
852 }
853
854 // Wait until `alignedDepthImage` was processed and can be overridden again.
855 {
856 std::unique_lock signal_lock{pointcloudProcMutex};
857 ARMARX_DEBUG << "Capturing thread waiting for signal...";
858 pointcloudProcSignal.wait(signal_lock, [&] { return depthImageProcessed; });
859 ARMARX_DEBUG << "Capturing thread received signal.";
860 }
861
862 {
863 std::lock_guard g{debugObserverMtx};
865 }
866
867 if (enableHeartbeat)
868 {
869 heartbeatPlugin->heartbeat();
870 }
871
872 // return true;
873 }
874 else
875 {
876 ARMARX_INFO << deactivateSpam(30) << "Did not get frame until timeout of " << TIMEOUT;
877
878 {
879 std::lock_guard g{debugObserverMtx};
881 }
882
883 return false;
884 }
885
886 if (startIMU)
887 {
888 try
889 {
890 Eigen::Vector3f accMean = Eigen::Vector3f::Zero();
891 std::size_t cnt = 0;
892
893 k4a_imu_sample_t imu_sample;
894 while (device.get_imu_sample(&imu_sample, std::chrono::milliseconds(0)))
895 {
896 const Eigen::Vector3f acceleration{imu_sample.acc_sample.xyz.x,
897 imu_sample.acc_sample.xyz.y,
898 imu_sample.acc_sample.xyz.z};
899
900 accMean += acceleration;
901 cnt++;
902 }
903
904 accMean /= static_cast<float>(cnt);
905
906 ARMARX_INFO << "Got IMU sample";
907
908 {
909 std::lock_guard g{debugObserverMtx};
910
911 setDebugObserverDatafield("acceleration.x", accMean.x());
912 setDebugObserverDatafield("acceleration.y", accMean.y());
913 setDebugObserverDatafield("acceleration.z", accMean.z());
914
916 }
917 }
918 catch (const std::exception&)
919 {
920 ARMARX_WARNING << "Failed to get imu samples from device (#"
921 << ++mDiagnostics.num_crashes << ").";
922 }
923 }
924
925 return true;
926 }
927
928#ifdef INCLUDE_BODY_TRACKING
929 void
930 AzureKinectPointCloudProvider::runPublishBodyTrackingResults()
931 {
933 while (bodyTrackingIsRunning)
934 {
935
936 // body frames might just not be available.
937 const k4abt::frame body_frame = [&]() -> k4abt::frame
938 {
939 try
940 {
941 auto result = bodyTracker.pop_result();
942 return result;
943 }
944 catch (...)
945 {
946 ARMARX_VERBOSE << deactivateSpam(1) << "Exception in body tracking publishing: "
948 return {};
949 }
950 }();
951
952 if (body_frame != nullptr)
953 {
954 armarx::core::time::ScopedStopWatch sw{
955 createSwCallback("publish body tracking result")};
956 // see https://github.com/microsoft/Azure_Kinect_ROS_Driver/blob/melodic/src/k4a_ros_device.cpp
957 const armarx::DateTime timestamp =
958 timestampToArmarX(body_frame.get_device_timestamp());
959
960 {
961 auto real_time = IceUtil::Time::now();
962 auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
963 auto clock_diff = real_time - monotonic_time;
964
965 auto image_monotonic_time = IceUtil::Time::microSeconds(
966 std::chrono::duration_cast<std::chrono::microseconds>(
967 body_frame.get_system_timestamp())
968 .count());
969 // long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
970
971 // IceUtil::Time imageTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
972 IceUtil::Time imageTime = image_monotonic_time + clock_diff;
973
974 {
975 std::lock_guard g{debugObserverMtx};
976 setDebugObserverDatafield("ros_vs_ice_timestamp [µs]",
977 imageTime.toMicroSeconds());
978 }
979 }
980
981 {
982 const armarx::Clock realtimeClock = armarx::Clock(armarx::ClockType::Realtime);
983 const armarx::Clock monotonicClock =
984 armarx::Clock(armarx::ClockType::Monotonic);
985
986 auto real_time = realtimeClock.now();
987 auto monotonic_time = monotonicClock.now();
988 auto clock_diff = real_time - monotonic_time;
989
990 auto image_monotonic_time = armarx::Duration::MicroSeconds(
991 std::chrono::duration_cast<std::chrono::microseconds>(
992 body_frame.get_system_timestamp())
993 .count());
994 // long offset = long(getProperty<float>("CaptureTimeOffset").getValue() * 1000.0f);
995
996 // IceUtil::Time imageTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
997 auto imageTime = image_monotonic_time + clock_diff;
998
999 armarx::DateTime imageTimestamp = armarx::DateTime(imageTime);
1000
1001 {
1002 std::lock_guard g{debugObserverMtx};
1003 setDebugObserverDatafield("ros_vs_armarx_timestamp [µs]",
1004 imageTime.toMicroSeconds());
1005 }
1006 }
1007
1008
1009 std::uint32_t num_bodies = body_frame.get_num_bodies();
1010 {
1011 std::lock_guard g{debugObserverMtx};
1012 setDebugObserverDatafield("n_bodies_detected", num_bodies);
1013 }
1014
1015 std::vector<armarx::armem::human::HumanPose> humanPoses;
1016 humanPoses.reserve(num_bodies);
1017
1018 for (std::uint32_t i = 0; i < num_bodies; i++)
1019 {
1020 k4abt_body_t body = body_frame.get_body(i);
1021 printBodyInformation(body);
1022
1023 armarx::armem::human::HumanPose humanPose;
1024 humanPose.humanTrackingId = std::to_string(body.id);
1025 humanPose.cameraFrameName = bodyCameraFrameName;
1027
1028 for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
1029 {
1030 const auto joint =
1032 const auto name =
1034
1035 k4a_float3_t position = body.skeleton.joints[i].position;
1036 k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
1037 k4abt_joint_confidence_level_t confidence_level =
1038 body.skeleton.joints[i].confidence_level;
1039
1040 humanPose.keypoints[name] = armarx::armem::human::PoseKeypoint{
1041 .label = name,
1042 .confidence = static_cast<float>(static_cast<int>(confidence_level)),
1043 .positionCamera = armarx::FramedPosition(
1044 Eigen::Vector3f{position.v[0], position.v[1], position.v[2]},
1045 bodyCameraFrameName,
1046 robotName),
1047 .orientationCamera =
1048 armarx::FramedOrientation(Eigen::Quaternionf(orientation.wxyz.w,
1049 orientation.wxyz.x,
1050 orientation.wxyz.y,
1051 orientation.wxyz.z)
1052 .toRotationMatrix(),
1053 bodyCameraFrameName,
1054 robotName)};
1055 }
1056
1057 humanPoses.push_back(humanPose);
1058 }
1059
1060 {
1061 std::lock_guard g{debugObserverMtx};
1062 setDebugObserverDatafield("bodyTrackingCaptureUntilPublish [ms]",
1063 (armarx::Clock::Now() - timestamp).toMilliSeconds());
1064 }
1065
1066
1067 {
1069 //ARMARX_INFO << deactivateSpam(1) << "committing human poses";
1070 humanPoseWriter.commitHumanPosesInCameraFrame(humanPoses, getName(), timestamp);
1071 }
1072
1073
1074 // k4a::image body_index_map = body_frame.get_body_index_map();
1075 // if (body_index_map != nullptr)
1076 // {
1077 // //print_body_index_map_middle_line(body_index_map);
1078 // }
1079 // else
1080 // {
1081 // ARMARX_WARNING << "Error: Failed to generate bodyindex map!";
1082 // }
1083 }
1084 else
1085 {
1086 // It should never hit timeout when K4A_WAIT_INFINITE is set.
1087 ARMARX_VERBOSE << "Error! Pop body frame result time out!";
1088 }
1089
1090 metronome.waitForNextTick();
1091 }
1092 }
1093#endif
1094
1095 void
1097 {
1098 ARMARX_DEBUG << "Started pointcloud processing task.";
1099
1100 IceUtil::Time time;
1101
1102 // Main pointcloud processing loop.
1103 while (not pointcloudTask->isStopped())
1104 {
1105 // Wait for data.
1106 std::unique_lock signal_lock{pointcloudProcMutex};
1107 ARMARX_DEBUG << "Pointcloud thread waiting for signal...";
1108 pointcloudProcSignal.wait(signal_lock,
1109 [&]
1110 { return pointcloudTask->isStopped() or depthImageReady; });
1111 ARMARX_DEBUG << "Pointcloud thread received signal.";
1112
1113 // Assess timings, reset flags.
1114 const IceUtil::Time TIMESTAMP = imagesTime;
1115 depthImageReady = false;
1116 depthImageProcessed = false;
1117
1118 // Transform depth image to pointcloud.
1119 {
1121 transformation.depth_image_to_point_cloud(alignedDepthImage,
1122 K4A_CALIBRATION_TYPE_COLOR,
1123 &xyzImage);
1124 ARMARX_DEBUG << "Transforming depth image to point cloud took "
1126 }
1127
1128 // Construct PCL pointcloud.
1129 {
1131
1132 ARMARX_CHECK_EXPRESSION(pointcloud);
1133
1134 pointcloud->width = static_cast<uint32_t>(xyzImage.get_width_pixels());
1135 pointcloud->height = static_cast<uint32_t>(xyzImage.get_height_pixels());
1136
1138
1139 pointcloud->is_dense = false;
1140 pointcloud->points.resize(pointcloud->width * pointcloud->height);
1141
1142 auto k4a_cloud_buffer = reinterpret_cast<const int16_t*>(xyzImage.get_buffer());
1143
1144 ARMARX_CHECK_EXPRESSION(k4a_cloud_buffer);
1145
1146 unsigned char* color_buffer = resultColorImage->pixels;
1147 while (color_buffer == nullptr)
1148 {
1149 ARMARX_INFO << "color_buffer is null. This should never happen. There is "
1150 "probably a race condition somewhere that needs to be fixed. "
1151 "Temporarily we ignore this and continue.\n Timestamp: "
1153 color_buffer = resultColorImage->pixels;
1154 }
1155 ARMARX_CHECK_NOT_NULL(color_buffer);
1156
1157 ARMARX_CHECK_EQUAL(xyzImage.get_width_pixels(), resultColorImage->width);
1158 ARMARX_CHECK_EQUAL(xyzImage.get_height_pixels(), resultColorImage->height);
1159 ARMARX_CHECK_EQUAL(pointcloud->points.size(),
1160 pointcloud->width * pointcloud->height);
1161
1163
1164 size_t index = 0;
1165 float max_depth = getProperty<float>("MaxDepth").getValue();
1166 for (auto& p : pointcloud->points)
1167 {
1168 p.r = color_buffer[index];
1169 p.x = k4a_cloud_buffer[index];
1170
1171 index++;
1172
1173 p.g = color_buffer[index];
1174 p.y = k4a_cloud_buffer[index];
1175
1176 index++;
1177
1178 p.b = color_buffer[index];
1179 auto z = k4a_cloud_buffer[index];
1180
1181 index++;
1182
1183 if (z <= max_depth and z != 0)
1184 {
1185 p.z = z;
1186 }
1187 else
1188 {
1189 p.z = std::numeric_limits<float>::quiet_NaN();
1190 }
1191 }
1192
1193 ARMARX_DEBUG << "Constructing point cloud took "
1195 }
1196
1197 // Notify capturing thread that data was processed and may be overridden.
1198 depthImageProcessed = true;
1199 signal_lock.unlock();
1200 ARMARX_DEBUG << "Notifying capturing thread...";
1201 pointcloudProcSignal.notify_all();
1202
1204
1205 // Broadcast PCL pointcloud.
1206 {
1208 pointcloud->header.stamp = static_cast<unsigned long>(TIMESTAMP.toMicroSeconds());
1209 providePointCloud(pointcloud);
1210 ARMARX_DEBUG << "Broadcasting pointcloud took "
1212 }
1213 }
1214
1215 ARMARX_DEBUG << "Stopped pointcloud processing task.";
1216 }
1217
1218 std::string
1220 {
1221 return "AzureKinectPointCloudProvider";
1222 }
1223
1224 void
1230
1231 void
1237
1238 void
1246
1247 void
1253
1254 StereoCalibration
1256 {
1257 using namespace Eigen;
1258 using Matrix3FRowMajor = Matrix<float, 3, 3, StorageOptions::RowMajor>;
1260
1261 // TODO: use the externally used cameraMatrix and distCoeffs
1262 const auto convert_calibration =
1263 [](const k4a_calibration_camera_t& k4a_calib, float scale = 1.f)
1264 {
1265 MonocularCalibration monocular_calibration;
1266
1267 const k4a_calibration_intrinsic_parameters_t& params = k4a_calib.intrinsics.parameters;
1268 monocular_calibration.cameraParam.principalPoint = {params.param.cx * scale,
1269 params.param.cy * scale};
1270 monocular_calibration.cameraParam.focalLength = {params.param.fx * scale,
1271 params.param.fy * scale};
1272 // TODO: Figure out convertions. IVT (Calibration.h) expects 4 parameters:
1273 // - The first radial lens distortion parameter.
1274 // - The second radial lens distortion parameter.
1275 // - The first tangential lens distortion parameter.
1276 // - The second tangential lens distortion parameter.
1277 // However, the Kinect offers k1-k6 radial distortion coefficients and 2 p1-p2
1278 // tangential distortion parameters, which means that k3-k6 are unused.
1279 // It is even unclear whether this is correct now, as previously it was a vector of all
1280 // 6 k-params, which resulted in failed assertions in TypeMapping.cpp in the function
1281 // CStereoCalibration* visionx::tools::convert(const visionx::StereoCalibration& stereoCalibration)
1282 // lin 314 at time of this commit.
1283 // See: https://microsoft.github.io/Azure-Kinect-Sensor-SDK/master/structk4a__calibration__intrinsic__parameters__t_1_1__param.html
1284 monocular_calibration.cameraParam.distortion = {
1285 params.param.k1,
1286 params.param.k2,
1287 params.param.p1,
1288 params.param.p2,
1289 };
1290
1291 // TODO this needs to be scaled! Or why shouldn't it ?
1292 monocular_calibration.cameraParam.width =
1293 std::floor(float(k4a_calib.resolution_width) * scale);
1294 monocular_calibration.cameraParam.height =
1295 std::floor(float(k4a_calib.resolution_height) * scale);
1296
1297 const Matrix3FRowMajor rotation =
1298 Map<const Matrix3FRowMajor>{k4a_calib.extrinsics.rotation};
1299
1300 monocular_calibration.cameraParam.rotation = convertEigenMatToVisionX(rotation);
1301 monocular_calibration.cameraParam.translation = {k4a_calib.extrinsics.translation,
1302 k4a_calib.extrinsics.translation + 3};
1303
1304 return monocular_calibration;
1305 };
1306
1307 StereoCalibration stereo_calibration;
1308
1309 stereo_calibration.calibrationLeft =
1310 convert_calibration(k4aCalibration.color_camera_calibration);
1311
1312 // if the camera images are rectified, the distortion params are 0
1313 if (enableColorUndistortion)
1314 {
1315 auto& colorDistortionParams = stereo_calibration.calibrationLeft.cameraParam.distortion;
1316 std::fill(colorDistortionParams.begin(), colorDistortionParams.end(), 0);
1317 }
1318
1319 // the depth image is rectified by default
1320 {
1321 // The depth image has been warped to to color camera frame. See depth_image_to_color_camera() above.
1322 // Therefore we use the calibration of the color camera (focal length etc) and set the distortion parameters to 0.
1323 stereo_calibration.calibrationRight =
1324 convert_calibration(k4aCalibration.color_camera_calibration);
1325
1326 auto& depthDistortionParams =
1327 stereo_calibration.calibrationRight.cameraParam.distortion;
1328 std::fill(depthDistortionParams.begin(), depthDistortionParams.end(), 0);
1329 }
1330
1331 stereo_calibration.rectificationHomographyLeft =
1332 convertEigenMatToVisionX(Matrix3f::Identity());
1333 stereo_calibration.rectificationHomographyRight =
1334 convertEigenMatToVisionX(Matrix3f::Identity());
1335
1336 return stereo_calibration;
1337 }
1338
1339 bool
1341 {
1342 return enableColorUndistortion;
1343 }
1344
1345 std::string
1347 {
1348 return getProperty<std::string>("frameName");
1349 }
1350
1351 std::vector<imrec::ChannelPreferences>
1353 {
1355
1356 imrec::ChannelPreferences rgb;
1357 rgb.requiresLossless = false;
1358 rgb.name = "rgb";
1359
1360 imrec::ChannelPreferences depth;
1361 depth.requiresLossless = true;
1362 depth.name = "depth";
1363
1364 return {rgb, depth};
1365 }
1366
1367 void
1369 const armarx::EnableHumanPoseEstimationInput& input,
1370 const Ice::Current&)
1371 {
1372#ifndef INCLUDE_BODY_TRACKING
1373 {
1374 ARMARX_ERROR << "INCLUDE_BODY_TRACKING is not defined.";
1375 return;
1376 }
1377#endif
1378
1379#ifdef INCLUDE_BODY_TRACKING
1380 if (bodyTrackingEnabled)
1381 {
1382 // should not require a mutex
1383 if (not bodyTrackingIsRunning and input.enable3d)
1384 {
1385 bodyTrackingIsRunning = true;
1386 bodyTrackingPublishTask->start();
1387 }
1388 else if (bodyTrackingIsRunning and not input.enable3d)
1389 {
1390 bodyTrackingIsRunning = false;
1391 bodyTrackingPublishTask->stop();
1392 }
1393 }
1394 else
1395 {
1396 ARMARX_ERROR << "Azure Kinect Body Tracking is not enabled";
1397 }
1398#endif
1399 }
1400
1401 void
1402 AzureKinectPointCloudProvider::setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current&)
1403 {
1404 std::scoped_lock lock(bodyTrackingParameterMutex);
1405 bodyTrackingDepthMaskMaxZ = maxDepthInMM;
1406 }
1407
1408 void
1410 int maxXinPixel,
1411 const Ice::Current&)
1412 {
1413 std::scoped_lock lock(bodyTrackingParameterMutex);
1414 bodyTrackingDepthMaskMinX = minXinPixel;
1415 bodyTrackingDepthMaskMaxX = maxXinPixel;
1416 }
1417
1418 // the code below is taken from https://github.com/microsoft/Azure_Kinect_ROS_Driver
1419 // -> MIT license
1420
1421 void
1423 const std::chrono::microseconds& k4a_device_timestamp_us,
1424 const std::chrono::nanoseconds& k4a_system_timestamp_ns)
1425 {
1426 // System timestamp is on monotonic system clock.
1427 // Device time is on AKDK hardware clock.
1428 // We want to continuously estimate diff between realtime and AKDK hardware clock as low-pass offset.
1429 // This consists of two parts: device to monotonic, and monotonic to realtime.
1430
1431 // First figure out realtime to monotonic offset. This will change to keep updating it.
1432 std::chrono::nanoseconds realtime_clock =
1433 std::chrono::system_clock::now().time_since_epoch();
1434 std::chrono::nanoseconds monotonic_clock =
1435 std::chrono::steady_clock::now().time_since_epoch();
1436
1437 std::chrono::nanoseconds monotonic_to_realtime = realtime_clock - monotonic_clock;
1438
1439 // Next figure out the other part (combined).
1440 std::chrono::nanoseconds device_to_realtime =
1441 k4a_system_timestamp_ns - k4a_device_timestamp_us + monotonic_to_realtime;
1442
1443 {
1444 std::lock_guard g{deviceToRealtimeOffsetMtx};
1445
1446 {
1447 std::lock_guard g{debugObserverMtx};
1448 setDebugObserverDatafield("device_to_realtime_offset [ms]",
1449 device_to_realtime_offset_.count() /
1450 1'000'000.f); // [ns] -> [ms]
1452 "clock_error",
1453 std::abs<float>((device_to_realtime_offset_ - device_to_realtime).count()) /
1454 1'000.f); // [ns] -> [µs]
1455 }
1456
1457 const std::int64_t timeOffsetThreshold = 1e7; // 10 ms
1458
1459 // If we're over a certain time off, just snap into place.
1460 if (device_to_realtime_offset_.count() == 0 ||
1461 std::abs((device_to_realtime_offset_ - device_to_realtime).count()) >
1462 timeOffsetThreshold)
1463 {
1465 << "Initializing or re-initializing the device to realtime offset: "
1466 << device_to_realtime.count() << " ns";
1467 device_to_realtime_offset_ = device_to_realtime;
1468 }
1469 else
1470 {
1471 // Low-pass filter!
1472 constexpr double alpha = 0.10;
1473
1474 const std::chrono::nanoseconds timeCorrection(static_cast<int64_t>(
1475 std::floor(alpha * (device_to_realtime - device_to_realtime_offset_).count())));
1476 device_to_realtime_offset_ = device_to_realtime_offset_ + timeCorrection;
1477
1478 {
1479 std::lock_guard g{debugObserverMtx};
1480 setDebugObserverDatafield("timeCorrection [µs]",
1481 timeCorrection.count() / 1'000); // [ns] -> [µs]
1482 }
1483 }
1484 }
1485 }
1486
1489 const std::chrono::microseconds& k4a_timestamp_us)
1490 {
1491 std::lock_guard g{deviceToRealtimeOffsetMtx};
1492
1493 // must be initialized beforehand
1494 ARMARX_CHECK(device_to_realtime_offset_.count() != 0);
1495
1496 std::chrono::nanoseconds timestamp_in_realtime =
1497 k4a_timestamp_us + device_to_realtime_offset_;
1498
1499 return armarx::Duration::MicroSeconds(timestamp_in_realtime.count() /
1500 1'000); // [ns] -> [µs]
1501 }
1502
1503 void
1505 const std::chrono::microseconds& k4a_device_timestamp_us)
1506 {
1507 std::lock_guard g{deviceToRealtimeOffsetMtx};
1508
1509 // We have no better guess than "now".
1510 std::chrono::nanoseconds realtime_clock =
1511 std::chrono::system_clock::now().time_since_epoch();
1512
1513 device_to_realtime_offset_ = realtime_clock - k4a_device_timestamp_us;
1514
1515 ARMARX_INFO << "Initializing the device to realtime offset based on wall clock: "
1516 << device_to_realtime_offset_.count() << " ns";
1517 }
1518
1523
1524 void
1526 {
1527 if (std::abs(std::fmod(higherFramerate, value)) > 1e-6)
1528 {
1529 std::stringstream ss;
1530 ss << "Invalid value (" << value << " fps) for framerate of '" << name << "'."
1531 << " Reason: The framerate has to be a divider of the property framerate.image ("
1532 << higherFramerate << " fps).";
1533 throw armarx::LocalException() << ss.str();
1534 }
1535 skipFrames = std::round(higherFramerate / value) - 1;
1536 if (skipFrames != 0)
1537 {
1538 ARMARX_INFO_S << "Only publishing every " << (skipFrames + 1) << "'th " << name
1539 << " result!";
1540 }
1541 }
1542
1543 bool
1545 {
1546 bool skip = false;
1547 if (skipFrames > 0)
1548 {
1551 skipFramesCount %= (skipFrames + 1);
1552 }
1553 return skip;
1554 }
1555
1556} // namespace visionx
std::string timestamp()
uint8_t index
constexpr T c
Eigen::Matrix< T, 3, 3 > Matrix
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
static bool ReplaceEnvVars(std::string &string)
ReplaceEnvVars replaces environment variables in a string with their values, if the env.
static DateTime Now()
Current time on the virtual clock.
Definition Clock.cpp:93
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition Component.cpp:90
Property< PropertyType > getProperty(const std::string &name)
static Duration MicroSeconds(std::int64_t microSeconds)
Constructs a duration in microseconds.
Definition Duration.cpp:24
static Duration MilliSeconds(std::int64_t milliSeconds)
Constructs a duration in milliseconds.
Definition Duration.cpp:48
static Frequency Hertz(std::int64_t hertz)
Definition Frequency.cpp:20
SpamFilterDataPtr deactivateSpam(float deactivationDurationSec=10.0f, const std::string &identifier="", bool deactivate=true) const
disables the logging for the current line for the given amount of seconds.
Definition Logging.cpp:99
virtual void onExitComponent()
Hook for subclass.
virtual void onDisconnectComponent()
Hook for subclass.
PluginT * addPlugin(const std::string prefix="", ParamsT &&... params)
virtual void onConnectComponent()=0
Pure virtual hook for the subclass.
std::string getName() const
Retrieve name of object.
virtual void onInitComponent()=0
Pure virtual hook for the subclass.
void setMetaInfo(const std::string &id, const VariantBasePtr &value)
Allows to set meta information that can be queried live via Ice interface on the ArmarXManager.
ArmarXManagerPtr getArmarXManager() const
Returns the ArmarX manager used to add and remove components.
std::string prefix
Prefix of the properties such as namespace, domain, component name, etc.
PropertyDefinition< PropertyType > & defineOptionalProperty(const std::string &name, PropertyType defaultValue, const std::string &description="", PropertyDefinitionBase::PropertyConstness constness=PropertyDefinitionBase::eConstant)
IceUtil::Handle< RunningTask< T > > pointer_type
Shared pointer type for convenience.
static IceUtil::Time GetTime(TimeMode timeMode=TimeMode::VirtualTime)
Get the current time.
Definition TimeUtil.cpp:42
static IceUtil::Time GetTimeSince(IceUtil::Time referenceTime, TimeMode timeMode=TimeMode::VirtualTime)
Get the difference between the current time and a reference time.
Definition TimeUtil.cpp:68
The Variant class is described here: Variants.
Definition Variant.h:224
DateTime now() const
Current date/time of the clock.
Definition Clock.cpp:22
Represents a point in time.
Definition DateTime.h:25
std::int64_t toMilliSecondsSinceEpoch() const
Definition DateTime.cpp:93
Represents a duration.
Definition Duration.h:17
Simple rate limiter for use in loops to maintain a certain frequency given a clock.
Definition Metronome.h:57
Measures the time this stop watch was inside the current scope.
Measures the passed time between the construction or calling reset() and stop().
Definition StopWatch.h:42
void onInitComponent() override
Pure virtual hook for the subclass.
void setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current &=Ice::emptyCurrent) override
armarx::DateTime timestampToArmarX(const std::chrono::microseconds &k4a_timestamp_us)
bool doCapture() override
Main capturing function.
void enableHumanPoseEstimation(const armarx::EnableHumanPoseEstimationInput &input, const Ice::Current &=Ice::emptyCurrent) override
void onExitCapturingPointCloudProvider() override
This is called when the Component::onExitComponent() setup is called.
void onDisconnectComponent() override
Hook for subclass.
static std::pair< int, int > GetDepthDimensions(const k4a_depth_mode_t depth_mode)
Returns the dimension of the depth images that will be produced for a certain resolution.
void onStartCapture(float frames_per_second) override
This is called when the point cloud provider capturing has been started.
static std::pair< int, int > GetColorDimensions(const k4a_color_resolution_t resolution)
Returns the dimension of the color images that will be produced for a certain resolution.
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
void onConnectImageProvider() override
This is called when the Component::onConnectComponent() setup is called.
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c) override
std::string getReferenceFrame(const Ice::Current &c) override
static std::string VersionToString(const k4a_version_t &version)
Creates a string from a k4a_version_t.
std::function< void(armarx::Duration)> createSwCallback(const std::string &description)
void onInitCapturingPointCloudProvider() override
This is called when the Component::onInitComponent() is called.
void onConnectComponent() override
Pure virtual hook for the subclass.
void onStopCapture() override
This is called when the point cloud provider capturing has been stopped.
bool getImagesAreUndistorted(const ::Ice::Current &c) override
std::vector< imrec::ChannelPreferences > getImageRecordingChannelPreferences(const Ice::Current &) override
void onInitImageProvider() override
This is called when the Component::onInitComponent() is called.
void setWidthBodyTracking(int minXinPixel, int maxXinPixel, const Ice::Current &=Ice::emptyCurrent) override
void initializeTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us)
void updateTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us, const std::chrono::nanoseconds &k4a_system_timestamp_ns)
std::atomic_bool captureEnabled
Indicates that capturing is enabled and running.
void setPointCloudSyncMode(ImageSyncMode pointCloudSyncMode)
Sets the point cloud synchronization mode.
void onInitComponent() override
void onDisconnectComponent() override
Hook for subclass.
ImageFormatInfo getImageFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the entire image format info struct via Ice.
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
void onConnectComponent() override
void setNumberImages(int numberImages)
Sets the number of images on each capture.
void onExitComponent() override
MetaPointCloudFormatPtr getPointCloudFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the point cloud format info struct via Ice.
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
#define ARMARX_CHECK_EXPRESSION(expression)
This macro evaluates the expression and if it turns out to be false it will throw an ExpressionExcept...
#define ARMARX_CHECK(expression)
Shortcut for ARMARX_CHECK_EXPRESSION.
#define ARMARX_CHECK_NOT_NULL(ptr)
This macro evaluates whether ptr is not null and if it turns out to be false it will throw an Express...
#define ARMARX_CHECK_EQUAL(lhs, rhs)
This macro evaluates whether lhs is equal (==) rhs and if it turns out to be false it will throw an E...
#define ARMARX_INFO
The normal logging level.
Definition Logging.h:181
#define ARMARX_ERROR
The logging level for unexpected behaviour, that must be fixed.
Definition Logging.h:196
#define ARMARX_INFO_S
Definition Logging.h:202
#define ARMARX_DEBUG
The logging level for output that is only interesting while debugging.
Definition Logging.h:184
#define ARMARX_WARNING
The logging level for unexpected behaviour, but not a serious problem.
Definition Logging.h:193
#define ARMARX_VERBOSE
The logging level for verbose information.
Definition Logging.h:187
Quaternion< float, 0 > Quaternionf
const simox::meta::EnumNames< Joints > JointNames
Names of the joints as defined in the body model.
Joints
Joints with index as defined in the body model.
This file offers overloads of toIce() and fromIce() functions for STL container types.
std::string GetHandledExceptionString()
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
void convert(const CByteImage &in, cv::Mat &out)
Converts an IVT CByteImage to OpenCV's BGR Mat.
Definition helper.cpp:40
void convert_rgb2cbi(const cv::Mat &in, CByteImage &out)
Converts an OpenCV RGB Mat to IVT's CByteImage.
Definition helper.cpp:64
void depthValueToRGB(unsigned int depthInMM, unsigned char &r, unsigned char &g, unsigned char &b, bool noiseResistant=false)
Definition ImageUtil.h:183
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.
visionx::types::Mat convertEigenMatToVisionX(Eigen::MatrixXf m)
CByteImage::ImageType convert(const ImageType visionxImageType)
Converts a VisionX image type into an image type of IVT's ByteImage.
ArmarX headers.
std::optional< std::string > humanTrackingId
Definition types.h:47
#define ARMARX_TRACE
Definition trace.h:77