9 #include <opencv2/core/hal/interface.h>
10 #include <opencv2/imgproc/imgproc.hpp>
12 #include <IceUtil/Time.h>
32 #include <Calibration/Calibration.h>
33 #include <Image/ImageProcessor.h>
35 #ifdef INCLUDE_BODY_TRACKING
48 k4aToIvtImage(
const k4a::image& color_image, ::CByteImage& result)
50 auto cw =
static_cast<unsigned int>(color_image.get_width_pixels());
51 auto ch =
static_cast<unsigned int>(color_image.get_height_pixels());
56 auto color_buffer = color_image.get_buffer();
57 auto rgb_buffer_ivt = result.pixels;
65 for (
unsigned int y = 0; y < ch; ++y)
67 for (
unsigned int x = 0; x < cw; ++x)
69 rgb_buffer_ivt[index_ivt] = color_buffer[index_k4a + 2];
70 rgb_buffer_ivt[index_ivt + 1] = color_buffer[index_k4a + 1];
71 rgb_buffer_ivt[index_ivt + 2] = color_buffer[index_k4a + 0];
79 #ifdef INCLUDE_BODY_TRACKING
81 printBodyInformation(k4abt_body_t body)
84 for (
int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
86 const k4a_float3_t position = body.skeleton.joints[i].position;
87 const k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
88 const k4abt_joint_confidence_level_t confidence_level =
89 body.skeleton.joints[i].confidence_level;
92 <<
"Position[mm] (" << position.v[0] <<
"," << position.v[1] <<
","
93 << position.v[2] <<
"); "
94 <<
"Orientation (" << orientation.v[0] <<
"," << orientation.v[1] <<
","
95 << orientation.v[2] <<
"," << orientation.v[3] <<
"); "
96 <<
"Confidence Level " << confidence_level;
110 std::string name =
"duration " + description +
" in [ms]";
112 std::lock_guard g{metaInfoMtx};
117 std::lock_guard g{debugObserverMtx};
127 defineOptionalProperty<k4a_color_resolution_t>(
128 "ColorResolution", K4A_COLOR_RESOLUTION_720P,
"Resolution of the RGB camera image.")
129 .map(
"0", K4A_COLOR_RESOLUTION_OFF)
130 .map(
"720", K4A_COLOR_RESOLUTION_720P)
131 .map(
"1080", K4A_COLOR_RESOLUTION_1080P)
132 .map(
"1440", K4A_COLOR_RESOLUTION_1440P)
133 .map(
"1536", K4A_COLOR_RESOLUTION_1536P)
134 .map(
"2160", K4A_COLOR_RESOLUTION_2160P)
135 .map(
"3072", K4A_COLOR_RESOLUTION_3072P);
136 defineOptionalProperty<k4a_depth_mode_t>(
137 "DepthMode", K4A_DEPTH_MODE_NFOV_UNBINNED,
"Resolution/mode of the depth camera image.")
138 .setCaseInsensitive(
true)
139 .map(
"OFF", K4A_DEPTH_MODE_OFF)
140 .map(
"NFOV_2X2BINNED", K4A_DEPTH_MODE_NFOV_2X2BINNED)
141 .map(
"NFOV_UNBINNED", K4A_DEPTH_MODE_NFOV_UNBINNED)
142 .map(
"WFOV_2X2BINNED", K4A_DEPTH_MODE_WFOV_2X2BINNED)
143 .map(
"WFOV_UNBINNED", K4A_DEPTH_MODE_WFOV_UNBINNED)
144 .map(
"PASSIVE_IR", K4A_DEPTH_MODE_PASSIVE_IR);
146 defineOptionalProperty<float>(
"MaxDepth",
148 "Max. allowed depth value in mm. Depth values above this "
149 "threshold will be set to nan.");
151 defineOptionalProperty<float>(
"CaptureTimeOffset",
153 "In Milliseconds. Time offset between capturing the image on "
154 "the hardware and receiving the image in this process.",
165 enableColorUndistortion,
166 "EnableColorUndistortion",
167 "Undistort the color images using the full 8 radial and tangential distortion "
168 "parameters provided by the Azure Kinect.\n"
169 "This can help for processing tasks which cannot handle radial parameters k3-k6.\n"
170 "Note that this drastically reduces the FPS (to something like 3).");
171 defs->optional(externalCalibrationFilePath,
172 "ExternalCalibrationFilePath",
173 "Path to an optional external"
174 " calibration file, which has a"
175 " camera matrix and distortion"
177 defs->optional(mDeviceId,
"device_id",
"ID of the device.");
179 defs->optional(robotName,
"robotName");
180 defs->optional(bodyCameraFrameName,
"bodyCameraFrameName");
182 #ifdef INCLUDE_BODY_TRACKING
183 defs->optional(bodyTrackingEnabled,
184 "bodyTrackingEnabled",
185 "Whether the Azure Kinect Body Tracking SDK should be enabled or not.");
186 defs->optional(bodyTrackingRunAtStart,
187 "bodyTrackingRunAtStart",
188 "Whether the Azure Kinect Body Tracking SDK should directly run when the "
189 "component is startet."
190 "Otherwise it has to be activated by the ice interface.");
191 defs->optional(bodyTrackingModelFilename,
192 "bodyTrackingModelPath",
193 "Path where the .onnx DNN files can be found");
194 defs->optional(bodyTrackingGPUDeviceID,
"bodyTrackingGPUDeviceID",
"GPU Device ID.");
195 defs->optional(bodyTrackingTemporalSmoothingFactor,
196 "bodyTrackingTemporalSmoothingFactor",
197 "Temporal smoothing factor for Azure Kinect body tracking.");
199 defs->optional(bodyTrackingDepthMaskMinX,
"bodyTrackingDepthMaskMinX");
200 defs->optional(bodyTrackingDepthMaskMaxX,
"bodyTrackingDepthMaskMaxX");
201 defs->optional(bodyTrackingDepthMaskMaxZ,
"bodyTrackingDepthMaskMaxZ");
203 humanPoseWriter.registerPropertyDefinitions(defs);
206 defs->optional(enableHeartbeat,
"enableHeartbeat");
214 config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
215 auto fps = getProperty<float>(
"framerate").getValue();
218 config.camera_fps = K4A_FRAMES_PER_SECOND_5;
220 else if (fps == 15.0f)
222 config.camera_fps = K4A_FRAMES_PER_SECOND_15;
224 else if (fps == 30.0f)
226 config.camera_fps = K4A_FRAMES_PER_SECOND_30;
230 throw armarx::LocalException(
"Invalid framerate: ")
231 << fps <<
" - Only framerates 5, 15 and 30 are "
232 <<
"supported by Azure Kinect.";
235 config.depth_mode = getProperty<k4a_depth_mode_t>(
"DepthMode");
236 config.color_format = K4A_IMAGE_FORMAT_COLOR_BGRA32;
237 config.color_resolution = getProperty<k4a_color_resolution_t>(
"ColorResolution");
241 config.synchronized_images_only =
true;
249 ARMARX_INFO <<
"Depth image size: " << depth_dim.first <<
"x" << depth_dim.second;
250 ARMARX_INFO <<
"Color image size: " << color_dim.first <<
"x" << color_dim.second;
253 k4a::image::create(K4A_IMAGE_FORMAT_DEPTH16,
256 color_dim.first * 2 *
static_cast<int32_t
>(
sizeof(uint8_t)));
258 setImageFormat(visionx::ImageDimension(color_dim.first, color_dim.second),
260 visionx::eBayerPatternGr);
264 std::make_unique<CByteImage>(color_dim.first, color_dim.second, CByteImage::eRGB24);
266 xyzImage = k4a::image::create(K4A_IMAGE_FORMAT_CUSTOM,
269 color_dim.first * 3 *
static_cast<int32_t
>(
sizeof(int16_t)));
271 pointcloud = std::make_unique<pcl::PointCloud<CloudPointType>>();
284 pointcloudTask->start();
286 #ifdef INCLUDE_BODY_TRACKING
287 if (bodyTrackingEnabled)
289 humanPoseWriter.connect(memoryNameSystem());
292 this, &AzureKinectPointCloudProvider::runPublishBodyTrackingResults);
301 {
"Vision",
"Camera"},
302 "AzureKinectPointCloudProvider");
315 std::lock_guard<std::mutex> lock(pointcloudProcMutex);
316 ARMARX_DEBUG <<
"Stopping pointcloud processing thread...";
317 const bool WAIT_FOR_JOIN =
false;
318 pointcloudTask->stop(WAIT_FOR_JOIN);
319 pointcloudProcSignal.notify_all();
320 ARMARX_DEBUG <<
"Waiting for pointcloud processing thread to stop...";
321 pointcloudTask->waitForStop();
325 #ifdef INCLUDE_BODY_TRACKING
326 if (bodyTrackingIsRunning)
328 bodyTrackingPublishTask->stop();
358 const uint32_t DEVICE_COUNT = k4a::device::get_installed_count();
359 if (DEVICE_COUNT == 0)
362 throw armarx::LocalException(
"No Azure Kinect devices detected!");
367 device = k4a::device::open(
static_cast<uint32_t
>(mDeviceId));
368 ARMARX_DEBUG <<
"Opened device id #" << mDeviceId <<
" with serial number "
369 << device.get_serialnum() <<
".";
378 k4aCalibration = device.get_calibration(config.depth_mode, config.color_resolution);
380 <<
"Color camera calibration:"
382 <<
"cx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cx
384 <<
"cy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cy
386 <<
"fx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fx
388 <<
"fy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fy
390 <<
"k1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k1
392 <<
"k2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k2
394 <<
"p1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p1
396 <<
"p2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p2
398 <<
"k3: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k3
400 <<
"k4: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k4
402 <<
"k5: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k5
404 <<
"k6: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k6;
409 c_left->PrintCameraParameters();
412 c_right->PrintCameraParameters();
415 transformation = k4a::transformation(k4aCalibration);
417 #ifdef INCLUDE_BODY_TRACKING
418 if (bodyTrackingEnabled)
424 bodyTrackingModelFilename);
425 ARMARX_CHECK(found) <<
"Body tracking DNN model could not be found/resolved at `"
426 << bodyTrackingModelFilename <<
"`.";
428 ARMARX_INFO <<
"Using body tracking DNN model from directory `"
429 << bodyTrackingModelFilename <<
"`.";
431 ARMARX_CHECK(std::filesystem::exists(bodyTrackingModelFilename))
432 <<
"The path `" << bodyTrackingModelFilename <<
"` does not exist!";
434 k4abt_tracker_configuration_t
const bodyTrackingConfig{
435 .sensor_orientation = K4ABT_SENSOR_ORIENTATION_DEFAULT,
436 .processing_mode = K4ABT_TRACKER_PROCESSING_MODE_GPU_CUDA,
437 .gpu_device_id = bodyTrackingGPUDeviceID,
438 .model_path = bodyTrackingModelFilename.c_str()};
440 bodyTracker = k4abt::tracker::create(k4aCalibration, bodyTrackingConfig);
441 bodyTracker.set_temporal_smoothing(bodyTrackingTemporalSmoothingFactor);
443 if (bodyTrackingRunAtStart and not bodyTrackingIsRunning)
445 bodyTrackingIsRunning =
true;
446 bodyTrackingPublishTask->start();
452 device.set_color_control(K4A_COLOR_CONTROL_BRIGHTNESS, K4A_COLOR_CONTROL_MODE_MANUAL, 128);
454 device.start_cameras(&config);
473 const k4a_calibration_camera_t calibration{k4aCalibration.color_camera_calibration};
474 const k4a_calibration_intrinsic_parameters_t::_param param =
475 calibration.intrinsics.parameters.param;
482 cv::Mat1f camera_matrix(3, 3);
483 camera_matrix << param.fx, 0, param.cx, 0, param.fy, param.cy, 0, 0, 1;
484 cv::Mat1f new_camera_matrix(3, 3);
485 new_camera_matrix << param.fx, 0, param.cx, 0, param.fx, param.cy, 0, 0, 1;
486 cv::Mat1f distortion_coeff(1, 8);
487 distortion_coeff << param.k1, param.k2, param.p1, param.p2, param.k3, param.k4,
489 cv::Mat map1, map2, map3;
490 cv::initUndistortRectifyMap(
495 cv::Size{calibration.resolution_width, calibration.resolution_height},
499 cv::convertMaps(map1, map2, colorDistortionMap, map3, CV_16SC2,
true);
518 const std::chrono::milliseconds TIMEOUT{1000};
520 StopWatch sw_get_capture;
526 catch (
const std::exception&)
528 ARMARX_WARNING <<
"Failed to get capture from device (#" << ++mDiagnostics.num_crashes
529 <<
"). Restarting camera.";
531 device.stop_cameras();
532 device.start_cameras(&config);
533 ARMARX_INFO <<
"Restarting took " << sw.stop() <<
".";
542 std::lock_guard g{metaInfoMtx};
547 const bool mustInitializeTimestampOffset = [&]()
549 std::lock_guard g{deviceToRealtimeOffsetMtx};
550 return device_to_realtime_offset_.count() == 0;
552 if (mustInitializeTimestampOffset)
559 capture.get_ir_image().get_system_timestamp());
561 const k4a::image DEPTH_IMAGE =
capture.get_depth_image();
565 const k4a_image_format_t IMAGE_FORMAT = DEPTH_IMAGE.get_format();
566 if (IMAGE_FORMAT != K4A_IMAGE_FORMAT_DEPTH16 && IMAGE_FORMAT != K4A_IMAGE_FORMAT_IR16)
568 throw std::logic_error(
"Attempted to colorize a non-depth image!");
577 std::lock_guard lock{pointcloudProcMutex};
578 transformation.depth_image_to_color_camera(DEPTH_IMAGE, &alignedDepthImage);
581 depthImageReady =
true;
585 pointcloudProcSignal.notify_one();
588 #ifdef INCLUDE_BODY_TRACKING
589 if (bodyTrackingIsRunning)
591 std::scoped_lock lock(bodyTrackingParameterMutex);
593 k4a::image ir_image =
capture.get_ir_image();
594 std::uint8_t* ir_image_buffer = ir_image.get_buffer();
596 k4a::image depth_image =
capture.get_depth_image();
597 std::uint8_t* depth_image_buffer = depth_image.get_buffer();
602 const int stride = ir_image.get_stride_bytes() / ir_image.get_width_pixels();
604 for (
int x = 0; x < ir_image.get_width_pixels(); x++)
606 for (
int y = 0; y < ir_image.get_height_pixels(); y++)
608 const int i = (y * ir_image.get_width_pixels() * stride) + (x * stride);
609 const int z = (
static_cast<int>(depth_image_buffer[i])) +
610 (
static_cast<int>(depth_image_buffer[i + 1]) << 8);
612 if ((bodyTrackingDepthMaskMinX > 0 and x < bodyTrackingDepthMaskMinX) or
613 (bodyTrackingDepthMaskMaxX > 0 and x > bodyTrackingDepthMaskMaxX) or
614 (bodyTrackingDepthMaskMaxZ > 0 and z > bodyTrackingDepthMaskMaxZ))
624 if (not bodyTracker.enqueue_capture(
capture))
631 const k4a::image COLOR_IMAGE =
capture.get_color_image();
633 auto real_time = IceUtil::Time::now();
634 auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
635 auto clock_diff = real_time - monotonic_time;
637 auto image_monotonic_time =
638 IceUtil::Time::microSeconds(std::chrono::duration_cast<std::chrono::microseconds>(
639 DEPTH_IMAGE.get_system_timestamp())
641 long offset = long(getProperty<float>(
"CaptureTimeOffset").getValue() * 1000.0f);
643 imagesTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
646 std::lock_guard g{metaInfoMtx};
652 std::lock_guard g{debugObserverMtx};
654 (real_time - imagesTime).toMilliSecondsDouble());
657 if (enableColorUndistortion)
662 cv::cvtColor(cv::Mat{cv::Size{COLOR_IMAGE.get_width_pixels(),
663 COLOR_IMAGE.get_height_pixels()},
665 (
void*)COLOR_IMAGE.get_buffer(),
670 cv::Mat cv_color_image_undistorted(
671 COLOR_IMAGE.get_width_pixels(), COLOR_IMAGE.get_height_pixels(), CV_8UC3);
673 cv::remap(temp_image,
674 cv_color_image_undistorted,
678 cv::BORDER_CONSTANT);
686 ::k4aToIvtImage(COLOR_IMAGE, *resultColorImage);
694 const int DW = alignedDepthImage.get_width_pixels();
695 const int DH = alignedDepthImage.get_height_pixels();
701 auto result_depth_buffer = resultDepthImage->pixels;
702 const auto* depth_buffer =
703 reinterpret_cast<const uint16_t*
>(alignedDepthImage.get_buffer());
707 for (
int y = 0; y < DH; ++y)
709 for (
int x = 0; x < DW; ++x)
711 uint16_t depth_value = depth_buffer[index_2];
713 result_depth_buffer[
index],
714 result_depth_buffer[
index + 1],
715 result_depth_buffer[
index + 2]);
725 CByteImage* images[2] = {resultColorImage.get(), resultDepthImage.get()};
731 std::unique_lock signal_lock{pointcloudProcMutex};
732 ARMARX_DEBUG <<
"Capturing thread waiting for signal...";
733 pointcloudProcSignal.wait(signal_lock, [&] {
return depthImageProcessed; });
738 std::lock_guard g{debugObserverMtx};
754 std::lock_guard g{debugObserverMtx};
762 #ifdef INCLUDE_BODY_TRACKING
764 AzureKinectPointCloudProvider::runPublishBodyTrackingResults()
767 while (bodyTrackingIsRunning)
771 const k4abt::frame body_frame = [&]() -> k4abt::frame
775 auto result = bodyTracker.pop_result();
786 if (body_frame !=
nullptr)
795 auto real_time = IceUtil::Time::now();
796 auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
797 auto clock_diff = real_time - monotonic_time;
799 auto image_monotonic_time = IceUtil::Time::microSeconds(
800 std::chrono::duration_cast<std::chrono::microseconds>(
801 body_frame.get_system_timestamp())
809 std::lock_guard g{debugObserverMtx};
811 imageTime.toMicroSeconds());
820 auto real_time = realtimeClock.
now();
821 auto monotonic_time = monotonicClock.
now();
822 auto clock_diff = real_time - monotonic_time;
825 std::chrono::duration_cast<std::chrono::microseconds>(
826 body_frame.get_system_timestamp())
831 auto imageTime = image_monotonic_time + clock_diff;
836 std::lock_guard g{debugObserverMtx};
838 imageTime.toMicroSeconds());
843 std::uint32_t num_bodies = body_frame.get_num_bodies();
845 std::lock_guard g{debugObserverMtx};
849 std::vector<armarx::armem::human::HumanPose> humanPoses;
850 humanPoses.reserve(num_bodies);
852 for (std::uint32_t i = 0; i < num_bodies; i++)
854 k4abt_body_t body = body_frame.get_body(i);
855 printBodyInformation(body);
862 for (
int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
869 k4a_float3_t position = body.skeleton.joints[i].position;
870 k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
871 k4abt_joint_confidence_level_t confidence_level =
872 body.skeleton.joints[i].confidence_level;
876 .confidence =
static_cast<float>(
static_cast<int>(confidence_level)),
878 Eigen::Vector3f{position.v[0], position.v[1], position.v[2]},
891 humanPoses.push_back(humanPose);
895 std::lock_guard g{debugObserverMtx};
900 humanPoseWriter.commitHumanPosesInCameraFrame(humanPoses,
getName(), timestamp);
919 metronome.waitForNextTick();
932 while (not pointcloudTask->isStopped())
935 std::unique_lock signal_lock{pointcloudProcMutex};
936 ARMARX_DEBUG <<
"Pointcloud thread waiting for signal...";
937 pointcloudProcSignal.wait(
938 signal_lock, [&] {
return pointcloudTask->isStopped() or depthImageReady; });
943 depthImageReady =
false;
944 depthImageProcessed =
false;
949 transformation.depth_image_to_point_cloud(
950 alignedDepthImage, K4A_CALIBRATION_TYPE_COLOR, &xyzImage);
951 ARMARX_DEBUG <<
"Transforming depth image to point cloud took "
961 pointcloud->width =
static_cast<uint32_t
>(xyzImage.get_width_pixels());
962 pointcloud->height =
static_cast<uint32_t
>(xyzImage.get_height_pixels());
966 pointcloud->is_dense =
false;
967 pointcloud->points.resize(pointcloud->width * pointcloud->height);
969 auto k4a_cloud_buffer =
reinterpret_cast<const int16_t*
>(xyzImage.get_buffer());
973 unsigned char* color_buffer = resultColorImage->pixels;
974 while (color_buffer ==
nullptr)
976 ARMARX_WARNING <<
"color_buffer is null. This should never happen. There is "
977 "probably a race condition somewhere that needs to be fixed. "
978 "Temporarily we ignore this and continue.\n Timestamp: "
980 color_buffer = resultColorImage->pixels;
987 pointcloud->width * pointcloud->height);
992 float max_depth = getProperty<float>(
"MaxDepth").getValue();
993 for (
auto& p : pointcloud->points)
995 p.r = color_buffer[
index];
996 p.x = k4a_cloud_buffer[
index];
1000 p.g = color_buffer[
index];
1001 p.y = k4a_cloud_buffer[
index];
1005 p.b = color_buffer[
index];
1006 auto z = k4a_cloud_buffer[
index];
1010 if (z <= max_depth and z != 0)
1016 p.z = std::numeric_limits<float>::quiet_NaN();
1025 depthImageProcessed =
true;
1026 signal_lock.unlock();
1028 pointcloudProcSignal.notify_all();
1035 pointcloud->header.stamp =
static_cast<unsigned long>(
TIMESTAMP.toMicroSeconds());
1048 return "AzureKinectPointCloudProvider";
1084 using namespace Eigen;
1089 const auto convert_calibration =
1090 [](
const k4a_calibration_camera_t& k4a_calib,
float scale = 1.f)
1092 MonocularCalibration monocular_calibration;
1094 const k4a_calibration_intrinsic_parameters_t& params = k4a_calib.intrinsics.parameters;
1095 monocular_calibration.cameraParam.principalPoint = {params.param.cx * scale,
1096 params.param.cy * scale};
1097 monocular_calibration.cameraParam.focalLength = {params.param.fx * scale,
1098 params.param.fy * scale};
1111 monocular_calibration.cameraParam.distortion = {
1119 monocular_calibration.cameraParam.width =
1120 std::floor(
float(k4a_calib.resolution_width) * scale);
1121 monocular_calibration.cameraParam.height =
1122 std::floor(
float(k4a_calib.resolution_height) * scale);
1124 const Matrix3FRowMajor rotation =
1125 Map<const Matrix3FRowMajor>{k4a_calib.extrinsics.rotation};
1128 monocular_calibration.cameraParam.translation = {k4a_calib.extrinsics.translation,
1129 k4a_calib.extrinsics.translation + 3};
1131 return monocular_calibration;
1134 StereoCalibration stereo_calibration;
1136 stereo_calibration.calibrationLeft =
1137 convert_calibration(k4aCalibration.color_camera_calibration);
1140 if (enableColorUndistortion)
1142 auto& colorDistortionParams = stereo_calibration.calibrationLeft.cameraParam.distortion;
1143 std::fill(colorDistortionParams.begin(), colorDistortionParams.end(), 0);
1150 stereo_calibration.calibrationRight =
1151 convert_calibration(k4aCalibration.color_camera_calibration);
1153 auto& depthDistortionParams =
1154 stereo_calibration.calibrationRight.cameraParam.distortion;
1155 std::fill(depthDistortionParams.begin(), depthDistortionParams.end(), 0);
1158 stereo_calibration.rectificationHomographyLeft =
1160 stereo_calibration.rectificationHomographyRight =
1163 return stereo_calibration;
1169 return enableColorUndistortion;
1175 return getProperty<std::string>(
"frameName");
1178 std::vector<imrec::ChannelPreferences>
1183 imrec::ChannelPreferences rgb;
1184 rgb.requiresLossless =
false;
1187 imrec::ChannelPreferences depth;
1188 depth.requiresLossless =
true;
1189 depth.name =
"depth";
1191 return {rgb, depth};
1196 const armarx::EnableHumanPoseEstimationInput&
input,
1197 const Ice::Current&)
1199 #ifndef INCLUDE_BODY_TRACKING
1201 ARMARX_ERROR <<
"INCLUDE_BODY_TRACKING is not defined.";
1206 #ifdef INCLUDE_BODY_TRACKING
1207 if (bodyTrackingEnabled)
1210 if (not bodyTrackingIsRunning and
input.enable3d)
1212 bodyTrackingIsRunning =
true;
1213 bodyTrackingPublishTask->start();
1215 else if (bodyTrackingIsRunning and not
input.enable3d)
1217 bodyTrackingIsRunning =
false;
1218 bodyTrackingPublishTask->stop();
1223 ARMARX_ERROR <<
"Azure Kinect Body Tracking is not enabled";
1231 std::scoped_lock lock(bodyTrackingParameterMutex);
1232 bodyTrackingDepthMaskMaxZ = maxDepthInMM;
1238 const Ice::Current&)
1240 std::scoped_lock lock(bodyTrackingParameterMutex);
1241 bodyTrackingDepthMaskMinX = minXinPixel;
1242 bodyTrackingDepthMaskMaxX = maxXinPixel;
1250 const std::chrono::microseconds& k4a_device_timestamp_us,
1251 const std::chrono::nanoseconds& k4a_system_timestamp_ns)
1259 std::chrono::nanoseconds realtime_clock =
1260 std::chrono::system_clock::now().time_since_epoch();
1261 std::chrono::nanoseconds monotonic_clock =
1262 std::chrono::steady_clock::now().time_since_epoch();
1264 std::chrono::nanoseconds monotonic_to_realtime = realtime_clock - monotonic_clock;
1267 std::chrono::nanoseconds device_to_realtime =
1268 k4a_system_timestamp_ns - k4a_device_timestamp_us + monotonic_to_realtime;
1271 std::lock_guard g{deviceToRealtimeOffsetMtx};
1274 std::lock_guard g{debugObserverMtx};
1276 device_to_realtime_offset_.count() /
1280 std::abs<float>((device_to_realtime_offset_ - device_to_realtime).count()) /
1281 1
'000.f); // [ns] -> [µs]
1284 const std::int64_t timeOffsetThreshold = 1e7; // 10 ms
1286 // If we're over
a certain time off, just snap into place.
1287 if (device_to_realtime_offset_.count() == 0 ||
1288 std::abs((device_to_realtime_offset_ - device_to_realtime).count()) >
1289 timeOffsetThreshold)
1291 ARMARX_VERBOSE << deactivateSpam()
1292 <<
"Initializing or re-initializing the device to realtime offset: "
1293 << device_to_realtime.count() <<
" ns";
1294 device_to_realtime_offset_ = device_to_realtime;
1299 constexpr double alpha = 0.10;
1301 const std::chrono::nanoseconds timeCorrection(static_cast<int64_t>(
1302 std::floor(alpha * (device_to_realtime - device_to_realtime_offset_).count())));
1303 device_to_realtime_offset_ = device_to_realtime_offset_ + timeCorrection;
1306 std::lock_guard g{debugObserverMtx};
1307 setDebugObserverDatafield(
"timeCorrection [µs]",
1308 timeCorrection.count() / 1
'000); // [ns] -> [µs]
1315 AzureKinectPointCloudProvider::timestampToArmarX(
1316 const std::chrono::microseconds& k4a_timestamp_us)
1318 std::lock_guard g{deviceToRealtimeOffsetMtx};
1320 // must be initialized beforehand
1321 ARMARX_CHECK(device_to_realtime_offset_.count() != 0);
1323 std::chrono::nanoseconds timestamp_in_realtime =
1324 k4a_timestamp_us + device_to_realtime_offset_;
1326 return armarx::Duration::MicroSeconds(timestamp_in_realtime.count() /
1332 const std::chrono::microseconds& k4a_device_timestamp_us)
1334 std::lock_guard g{deviceToRealtimeOffsetMtx};
1337 std::chrono::nanoseconds realtime_clock =
1338 std::chrono::system_clock::now().time_since_epoch();
1340 device_to_realtime_offset_ = realtime_clock - k4a_device_timestamp_us;
1342 ARMARX_WARNING <<
"Initializing the device to realtime offset based on wall clock: "
1343 << device_to_realtime_offset_.count() <<
" ns";