9#include <IceUtil/Time.h>
11#include <opencv2/core/hal/interface.h>
12#include <opencv2/imgproc/imgproc.hpp>
32#include <Calibration/Calibration.h>
33#include <Image/ImageProcessor.h>
35#ifdef INCLUDE_BODY_TRACKING
48 k4aToIvtImage(
const k4a::image& color_image, ::CByteImage& result)
50 if (color_image.get_format() == K4A_IMAGE_FORMAT_COLOR_YUY2)
53 cv::Mat yuy2_image(color_image.get_height_pixels(),
54 color_image.get_width_pixels(),
56 const_cast<uint8_t*
>(color_image.get_buffer()));
58 cv::cvtColor(yuy2_image, rgb_image, cv::COLOR_YUV2RGB_YUY2);
63 else if (color_image.get_format() == K4A_IMAGE_FORMAT_COLOR_BGRA32)
65 auto cw =
static_cast<unsigned int>(color_image.get_width_pixels());
66 auto ch =
static_cast<unsigned int>(color_image.get_height_pixels());
70 auto color_buffer = color_image.get_buffer();
71 auto rgb_buffer_ivt = result.pixels;
79 for (
unsigned int y = 0; y < ch; ++y)
81 for (
unsigned int x = 0;
x < cw; ++
x)
83 rgb_buffer_ivt[index_ivt] = color_buffer[index_k4a + 2];
84 rgb_buffer_ivt[index_ivt + 1] = color_buffer[index_k4a + 1];
85 rgb_buffer_ivt[index_ivt + 2] = color_buffer[index_k4a + 0];
93 throw std::runtime_error(
"Unsupported color format in k4a image");
98#ifdef INCLUDE_BODY_TRACKING
100 printBodyInformation(k4abt_body_t body)
103 for (
int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
105 const k4a_float3_t position = body.skeleton.joints[i].position;
106 const k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
107 const k4abt_joint_confidence_level_t confidence_level =
108 body.skeleton.joints[i].confidence_level;
111 <<
"Position[mm] (" << position.v[0] <<
"," << position.v[1] <<
","
112 << position.v[2] <<
"); "
113 <<
"Orientation (" << orientation.v[0] <<
"," << orientation.v[1] <<
","
114 << orientation.v[2] <<
"," << orientation.v[3] <<
"); "
115 <<
"Confidence Level " << confidence_level;
121 k4aImageFormatToString(k4a_image_format_t format)
125 case K4A_IMAGE_FORMAT_COLOR_MJPG:
127 case K4A_IMAGE_FORMAT_COLOR_NV12:
129 case K4A_IMAGE_FORMAT_COLOR_YUY2:
131 case K4A_IMAGE_FORMAT_COLOR_BGRA32:
132 return "COLOR_BGRA32";
133 case K4A_IMAGE_FORMAT_DEPTH16:
135 case K4A_IMAGE_FORMAT_IR16:
137 case K4A_IMAGE_FORMAT_CUSTOM:
149 std::function<void(armarx::Duration)>
154 std::string name =
"duration " + description +
" in [ms]";
156 std::lock_guard g{metaInfoMtx};
161 std::lock_guard g{debugObserverMtx};
172 K4A_COLOR_RESOLUTION_720P,
173 "Resolution of the RGB camera image.")
174 .map(
"0", K4A_COLOR_RESOLUTION_OFF)
175 .map(
"720", K4A_COLOR_RESOLUTION_720P)
176 .map(
"1080", K4A_COLOR_RESOLUTION_1080P)
177 .map(
"1440", K4A_COLOR_RESOLUTION_1440P)
178 .map(
"1536", K4A_COLOR_RESOLUTION_1536P)
179 .map(
"2160", K4A_COLOR_RESOLUTION_2160P)
180 .map(
"3072", K4A_COLOR_RESOLUTION_3072P);
182 K4A_DEPTH_MODE_NFOV_UNBINNED,
183 "Resolution/mode of the depth camera image.")
184 .setCaseInsensitive(
true)
185 .map(
"OFF", K4A_DEPTH_MODE_OFF)
186 .map(
"NFOV_2X2BINNED", K4A_DEPTH_MODE_NFOV_2X2BINNED)
187 .map(
"NFOV_UNBINNED", K4A_DEPTH_MODE_NFOV_UNBINNED)
188 .map(
"WFOV_2X2BINNED", K4A_DEPTH_MODE_WFOV_2X2BINNED)
189 .map(
"WFOV_UNBINNED", K4A_DEPTH_MODE_WFOV_UNBINNED)
190 .map(
"PASSIVE_IR", K4A_DEPTH_MODE_PASSIVE_IR);
194 "Max. allowed depth value in mm. Depth values above this "
195 "threshold will be set to nan.");
199 "In Milliseconds. Time offset between capturing the image on "
200 "the hardware and receiving the image in this process.",
204 K4A_IMAGE_FORMAT_COLOR_BGRA32,
205 "Color format of the RGB camera image.")
206 .map(
"BGRA", K4A_IMAGE_FORMAT_COLOR_BGRA32)
207 .map(
"YUY2", K4A_IMAGE_FORMAT_COLOR_YUY2);
217 enableColorUndistortion,
218 "EnableColorUndistortion",
219 "Undistort the color images using the full 8 radial and tangential distortion "
220 "parameters provided by the Azure Kinect.\n"
221 "This can help for processing tasks which cannot handle radial parameters k3-k6.\n"
222 "Note that this drastically reduces the FPS (to something like 3).");
223 defs->optional(externalCalibrationFilePath,
224 "ExternalCalibrationFilePath",
225 "Path to an optional external"
226 " calibration file, which has a"
227 " camera matrix and distortion"
229 defs->optional(mDeviceId,
"device_id",
"ID of the device.");
231 defs->optional(robotName,
"robotName");
232 defs->optional(bodyCameraFrameName,
"bodyCameraFrameName");
233 defs->optional(framerate.value,
235 "The framerate of RGB-D images [frames per second]."
236 "\nNote that the point cloud and body tracking frame rates are controlled by"
237 " the properties 'framerate' (point cloud) and 'framerate.bodyTracking'"
238 " (body tracking), respectively.")
242#ifdef INCLUDE_BODY_TRACKING
243 defs->optional(bodyTrackingEnabled,
244 "bodyTrackingEnabled",
245 "Whether the Azure Kinect Body Tracking SDK should be enabled or not.");
246 defs->optional(bodyTrackingRunAtStart,
247 "bodyTrackingRunAtStart",
248 "Whether the Azure Kinect Body Tracking SDK should directly run when the "
249 "component is startet."
250 "Otherwise it has to be activated by the ice interface.");
251 defs->optional(bodyTrackingModelFilename,
252 "bodyTrackingModelPath",
253 "Path where the .onnx DNN files can be found");
254 defs->optional(bodyTrackingGPUDeviceID,
"bodyTrackingGPUDeviceID",
"GPU Device ID.");
255 defs->optional(bodyTrackingTemporalSmoothingFactor,
256 "bodyTrackingTemporalSmoothingFactor",
257 "Temporal smoothing factor for Azure Kinect body tracking.");
258 defs->optional(useCPU,
"useCPU",
"Whether to use cpu");
260 defs->optional(bodyTrackingDepthMaskMinX,
"bodyTrackingDepthMaskMinX");
261 defs->optional(bodyTrackingDepthMaskMaxX,
"bodyTrackingDepthMaskMaxX");
262 defs->optional(bodyTrackingDepthMaskMaxZ,
"bodyTrackingDepthMaskMaxZ");
263 defs->optional(framerate.bodyTracking.value,
264 "framerate.bodyTracking",
265 "The framerate with with the body tracking is run [frames per second]."
266 "\nNote that the RGB-D image and point cloud frame rates are controlled by"
267 " the properties 'framerate.image' (RGB-D images) and 'framerate'"
268 " (point cloud), respectively.")
272 defs->optional(startIMU,
"startIMU");
274 humanPoseWriter.registerPropertyDefinitions(defs);
277 defs->optional(enableHeartbeat,
"enableHeartbeat");
285 config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
286 if (framerate.value == 5)
288 config.camera_fps = K4A_FRAMES_PER_SECOND_5;
290 else if (framerate.value == 15)
292 config.camera_fps = K4A_FRAMES_PER_SECOND_15;
294 else if (framerate.value == 30)
296 config.camera_fps = K4A_FRAMES_PER_SECOND_30;
300 throw armarx::LocalException(
"Invalid image framerate (property 'framerate.images'): ")
301 << framerate.value <<
". Only framerates 5, 15 and 30 are "
302 <<
"supported by Azure Kinect.";
305 framerate.pointCloud.value =
307 framerate.pointCloud.update(framerate.value);
309#ifdef INCLUDE_BODY_TRACKING
310 framerate.bodyTracking.update(framerate.value);
319 config.synchronized_images_only =
true;
327 ARMARX_INFO <<
"Depth image size: " << depth_dim.first <<
"x" << depth_dim.second;
328 ARMARX_INFO <<
"Color image size: " << color_dim.first <<
"x" << color_dim.second;
331 k4a::image::create(K4A_IMAGE_FORMAT_DEPTH16,
334 color_dim.first * 2 *
static_cast<int32_t
>(
sizeof(uint8_t)));
336 setImageFormat(visionx::ImageDimension(color_dim.first, color_dim.second),
338 visionx::eBayerPatternGr);
342 std::make_unique<CByteImage>(color_dim.first, color_dim.second, CByteImage::eRGB24);
344 xyzImage = k4a::image::create(K4A_IMAGE_FORMAT_CUSTOM,
347 color_dim.first * 3 *
static_cast<int32_t
>(
sizeof(int16_t)));
349 pointcloud = std::make_unique<pcl::PointCloud<CloudPointType>>();
363 pointcloudTask->start();
365#ifdef INCLUDE_BODY_TRACKING
366 if (bodyTrackingEnabled)
369 humanPoseWriter.connect(memoryNameSystem());
374 &AzureKinectPointCloudProvider::runPublishBodyTrackingResults);
383 {
"Vision",
"Camera"},
384 "AzureKinectPointCloudProvider");
397 std::lock_guard<std::mutex> lock(pointcloudProcMutex);
398 ARMARX_DEBUG <<
"Stopping pointcloud processing thread...";
399 const bool WAIT_FOR_JOIN =
false;
400 pointcloudTask->stop(WAIT_FOR_JOIN);
401 pointcloudProcSignal.notify_all();
402 ARMARX_DEBUG <<
"Waiting for pointcloud processing thread to stop...";
403 pointcloudTask->waitForStop();
407#ifdef INCLUDE_BODY_TRACKING
408 if (bodyTrackingIsRunning)
410 bodyTrackingPublishTask->stop();
440 const uint32_t DEVICE_COUNT = k4a::device::get_installed_count();
441 if (DEVICE_COUNT == 0)
444 throw armarx::LocalException(
"No Azure Kinect devices detected!");
449 device = k4a::device::open(
static_cast<uint32_t
>(mDeviceId));
450 ARMARX_DEBUG <<
"Opened device id #" << mDeviceId <<
" with serial number "
451 << device.get_serialnum() <<
".";
460 k4aCalibration = device.get_calibration(config.depth_mode, config.color_resolution);
462 <<
"Color camera calibration:"
464 <<
"cx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cx
466 <<
"cy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.cy
468 <<
"fx: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fx
470 <<
"fy: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.fy
472 <<
"k1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k1
474 <<
"k2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k2
476 <<
"p1: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p1
478 <<
"p2: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.p2
480 <<
"k3: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k3
482 <<
"k4: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k4
484 <<
"k5: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k5
486 <<
"k6: " << k4aCalibration.color_camera_calibration.intrinsics.parameters.param.k6;
491 c_left->PrintCameraParameters();
494 c_right->PrintCameraParameters();
497 transformation = k4a::transformation(k4aCalibration);
499#ifdef INCLUDE_BODY_TRACKING
500 if (bodyTrackingEnabled)
506 bodyTrackingModelFilename);
507 ARMARX_CHECK(found) <<
"Body tracking DNN model could not be found/resolved at `"
508 << bodyTrackingModelFilename <<
"`.";
510 ARMARX_INFO <<
"Using body tracking DNN model from directory `"
511 << bodyTrackingModelFilename <<
"`.";
513 ARMARX_CHECK(std::filesystem::exists(bodyTrackingModelFilename))
514 <<
"The path `" << bodyTrackingModelFilename <<
"` does not exist!";
516 k4abt_tracker_configuration_t
const bodyTrackingConfig{
517 .sensor_orientation = K4ABT_SENSOR_ORIENTATION_DEFAULT,
518 .processing_mode = useCPU ? K4ABT_TRACKER_PROCESSING_MODE_CPU
519 : K4ABT_TRACKER_PROCESSING_MODE_GPU_CUDA,
520 .gpu_device_id = bodyTrackingGPUDeviceID,
521 .model_path = bodyTrackingModelFilename.c_str()};
523 bodyTracker = k4abt::tracker::create(k4aCalibration, bodyTrackingConfig);
524 bodyTracker.set_temporal_smoothing(bodyTrackingTemporalSmoothingFactor);
526 if (bodyTrackingRunAtStart and not bodyTrackingIsRunning)
528 bodyTrackingIsRunning =
true;
529 bodyTrackingPublishTask->start();
535 device.set_color_control(K4A_COLOR_CONTROL_BRIGHTNESS, K4A_COLOR_CONTROL_MODE_MANUAL, 128);
537 device.start_cameras(&config);
564 const k4a_calibration_camera_t calibration{k4aCalibration.color_camera_calibration};
565 const k4a_calibration_intrinsic_parameters_t::_param param =
566 calibration.intrinsics.parameters.param;
573 cv::Mat1f camera_matrix(3, 3);
574 camera_matrix << param.fx, 0, param.cx, 0, param.fy, param.cy, 0, 0, 1;
575 cv::Mat1f new_camera_matrix(3, 3);
576 new_camera_matrix << param.fx, 0, param.cx, 0, param.fx, param.cy, 0, 0, 1;
577 cv::Mat1f distortion_coeff(1, 8);
578 distortion_coeff << param.k1, param.k2, param.p1, param.p2, param.k3, param.k4,
580 cv::Mat map1, map2, map3;
581 cv::initUndistortRectifyMap(
586 cv::Size{calibration.resolution_width, calibration.resolution_height},
590 cv::convertMaps(map1, map2, colorDistortionMap, map3, CV_16SC2,
true);
612 const std::chrono::milliseconds TIMEOUT{1000};
614 StopWatch sw_get_capture;
619 status = device.get_capture(&
capture, TIMEOUT);
622 catch (
const std::exception&)
624 ARMARX_WARNING <<
"Failed to get capture from device (#" << ++mDiagnostics.num_crashes
625 <<
"). Restarting camera.";
627 device.stop_cameras();
628 device.start_cameras(&config);
629 ARMARX_INFO <<
"Restarting took " << sw.stop() <<
".";
638 std::lock_guard g{metaInfoMtx};
643 const bool mustInitializeTimestampOffset = [&]()
645 std::lock_guard g{deviceToRealtimeOffsetMtx};
646 return device_to_realtime_offset_.count() == 0;
648 if (mustInitializeTimestampOffset)
655 capture.get_ir_image().get_system_timestamp());
657 const k4a::image DEPTH_IMAGE =
capture.get_depth_image();
661 const k4a_image_format_t IMAGE_FORMAT = DEPTH_IMAGE.get_format();
662 if (IMAGE_FORMAT != K4A_IMAGE_FORMAT_DEPTH16 && IMAGE_FORMAT != K4A_IMAGE_FORMAT_IR16)
664 const char* format_str = ::k4aImageFormatToString(IMAGE_FORMAT);
665 std::stringstream error_msg;
666 error_msg <<
"Attempted to colorize a non-depth image with format: " << format_str;
667 throw std::logic_error(error_msg.str());
671 if (not framerate.pointCloud.skip())
677 std::lock_guard lock{pointcloudProcMutex};
678 transformation.depth_image_to_color_camera(DEPTH_IMAGE, &alignedDepthImage);
681 depthImageReady =
true;
685 pointcloudProcSignal.notify_one();
688#ifdef INCLUDE_BODY_TRACKING
689 if (bodyTrackingIsRunning)
692 std::scoped_lock lock(bodyTrackingParameterMutex);
694 if (not framerate.bodyTracking.skip())
696 k4a::image ir_image =
capture.get_ir_image();
697 std::uint8_t* ir_image_buffer = ir_image.get_buffer();
699 k4a::image depth_image =
capture.get_depth_image();
700 std::uint8_t* depth_image_buffer = depth_image.get_buffer();
704 depth_image.get_height_pixels());
706 const int stride = ir_image.get_stride_bytes() / ir_image.get_width_pixels();
708 for (
int x = 0;
x < ir_image.get_width_pixels();
x++)
710 for (
int y = 0; y < ir_image.get_height_pixels(); y++)
712 const int i = (y * ir_image.get_width_pixels() * stride) + (
x * stride);
713 const int z = (
static_cast<int>(depth_image_buffer[i])) +
714 (
static_cast<int>(depth_image_buffer[i + 1]) << 8);
716 if ((bodyTrackingDepthMaskMinX > 0 and
x < bodyTrackingDepthMaskMinX) or
717 (bodyTrackingDepthMaskMaxX > 0 and
x > bodyTrackingDepthMaskMaxX) or
718 (bodyTrackingDepthMaskMaxZ > 0 and z > bodyTrackingDepthMaskMaxZ))
720 ir_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
721 ir_image_buffer[i + 1] = std::numeric_limits<std::uint8_t>::max();
722 depth_image_buffer[i] = std::numeric_limits<std::uint8_t>::max();
723 depth_image_buffer[i + 1] =
724 std::numeric_limits<std::uint8_t>::max();
729 if (not bodyTracker.enqueue_capture(
capture))
737 const k4a::image COLOR_IMAGE =
capture.get_color_image();
740 auto real_time = IceUtil::Time::now();
741 auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
742 auto clock_diff = real_time - monotonic_time;
744 auto image_monotonic_time =
745 IceUtil::Time::microSeconds(std::chrono::duration_cast<std::chrono::microseconds>(
746 DEPTH_IMAGE.get_system_timestamp())
750 imagesTime = image_monotonic_time + clock_diff - IceUtil::Time::microSeconds(offset);
753 std::lock_guard g{metaInfoMtx};
759 std::lock_guard g{debugObserverMtx};
761 (real_time - imagesTime).toMilliSecondsDouble());
764 if (enableColorUndistortion)
767 cv::Mat tmp_rgb_image;
769 if (COLOR_IMAGE.get_format() == K4A_IMAGE_FORMAT_COLOR_YUY2)
772 cv::Mat yuy2_image(COLOR_IMAGE.get_height_pixels(),
773 COLOR_IMAGE.get_width_pixels(),
775 const_cast<uint8_t*
>(COLOR_IMAGE.get_buffer()));
777 cv::cvtColor(yuy2_image, tmp_rgb_image, cv::COLOR_YUV2RGB_YUY2);
779 else if (COLOR_IMAGE.get_format() == K4A_IMAGE_FORMAT_COLOR_BGRA32)
781 cv::cvtColor(cv::Mat{cv::Size{COLOR_IMAGE.get_width_pixels(),
782 COLOR_IMAGE.get_height_pixels()},
784 (
void*)COLOR_IMAGE.get_buffer(),
791 throw std::runtime_error(
"Unsupported color format in k4a image");
794 cv::Mat cv_color_image_undistorted(COLOR_IMAGE.get_width_pixels(),
795 COLOR_IMAGE.get_height_pixels(),
798 cv::remap(tmp_rgb_image,
799 cv_color_image_undistorted,
803 cv::BORDER_CONSTANT);
811 ::k4aToIvtImage(COLOR_IMAGE, *resultColorImage);
819 const int DW = alignedDepthImage.get_width_pixels();
820 const int DH = alignedDepthImage.get_height_pixels();
826 auto result_depth_buffer = resultDepthImage->pixels;
827 const auto* depth_buffer =
828 reinterpret_cast<const uint16_t*
>(alignedDepthImage.get_buffer());
832 for (
int y = 0; y < DH; ++y)
834 for (
int x = 0;
x < DW; ++
x)
836 uint16_t depth_value = depth_buffer[index_2];
838 result_depth_buffer[
index],
839 result_depth_buffer[
index + 1],
840 result_depth_buffer[
index + 2]);
850 CByteImage* images[2] = {resultColorImage.get(), resultDepthImage.get()};
856 std::unique_lock signal_lock{pointcloudProcMutex};
857 ARMARX_DEBUG <<
"Capturing thread waiting for signal...";
858 pointcloudProcSignal.wait(signal_lock, [&] {
return depthImageProcessed; });
863 std::lock_guard g{debugObserverMtx};
869 heartbeatPlugin->heartbeat();
879 std::lock_guard g{debugObserverMtx};
890 Eigen::Vector3f accMean = Eigen::Vector3f::Zero();
893 k4a_imu_sample_t imu_sample;
894 while (device.get_imu_sample(&imu_sample, std::chrono::milliseconds(0)))
896 const Eigen::Vector3f acceleration{imu_sample.acc_sample.xyz.x,
897 imu_sample.acc_sample.xyz.y,
898 imu_sample.acc_sample.xyz.z};
900 accMean += acceleration;
904 accMean /=
static_cast<float>(cnt);
909 std::lock_guard g{debugObserverMtx};
918 catch (
const std::exception&)
921 << ++mDiagnostics.num_crashes <<
").";
928#ifdef INCLUDE_BODY_TRACKING
930 AzureKinectPointCloudProvider::runPublishBodyTrackingResults()
933 while (bodyTrackingIsRunning)
937 const k4abt::frame body_frame = [&]() -> k4abt::frame
941 auto result = bodyTracker.pop_result();
952 if (body_frame !=
nullptr)
954 armarx::core::time::ScopedStopWatch sw{
961 auto real_time = IceUtil::Time::now();
962 auto monotonic_time = IceUtil::Time::now(IceUtil::Time::Monotonic);
963 auto clock_diff = real_time - monotonic_time;
965 auto image_monotonic_time = IceUtil::Time::microSeconds(
966 std::chrono::duration_cast<std::chrono::microseconds>(
967 body_frame.get_system_timestamp())
972 IceUtil::Time imageTime = image_monotonic_time + clock_diff;
975 std::lock_guard g{debugObserverMtx};
977 imageTime.toMicroSeconds());
982 const armarx::Clock realtimeClock = armarx::Clock(armarx::ClockType::Realtime);
983 const armarx::Clock monotonicClock =
984 armarx::Clock(armarx::ClockType::Monotonic);
986 auto real_time = realtimeClock.
now();
987 auto monotonic_time = monotonicClock.
now();
988 auto clock_diff = real_time - monotonic_time;
991 std::chrono::duration_cast<std::chrono::microseconds>(
992 body_frame.get_system_timestamp())
997 auto imageTime = image_monotonic_time + clock_diff;
999 armarx::DateTime imageTimestamp = armarx::DateTime(imageTime);
1002 std::lock_guard g{debugObserverMtx};
1004 imageTime.toMicroSeconds());
1009 std::uint32_t num_bodies = body_frame.get_num_bodies();
1011 std::lock_guard g{debugObserverMtx};
1015 std::vector<armarx::armem::human::HumanPose> humanPoses;
1016 humanPoses.reserve(num_bodies);
1018 for (std::uint32_t i = 0; i < num_bodies; i++)
1020 k4abt_body_t body = body_frame.get_body(i);
1021 printBodyInformation(body);
1023 armarx::armem::human::HumanPose humanPose;
1028 for (
int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
1035 k4a_float3_t position = body.skeleton.joints[i].position;
1036 k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
1037 k4abt_joint_confidence_level_t confidence_level =
1038 body.skeleton.joints[i].confidence_level;
1040 humanPose.
keypoints[name] = armarx::armem::human::PoseKeypoint{
1042 .confidence =
static_cast<float>(
static_cast<int>(confidence_level)),
1043 .positionCamera = armarx::FramedPosition(
1044 Eigen::Vector3f{position.v[0], position.v[1], position.v[2]},
1045 bodyCameraFrameName,
1047 .orientationCamera =
1052 .toRotationMatrix(),
1053 bodyCameraFrameName,
1057 humanPoses.push_back(humanPose);
1061 std::lock_guard g{debugObserverMtx};
1070 humanPoseWriter.commitHumanPosesInCameraFrame(humanPoses,
getName(),
timestamp);
1090 metronome.waitForNextTick();
1103 while (not pointcloudTask->isStopped())
1106 std::unique_lock signal_lock{pointcloudProcMutex};
1107 ARMARX_DEBUG <<
"Pointcloud thread waiting for signal...";
1108 pointcloudProcSignal.wait(signal_lock,
1110 {
return pointcloudTask->isStopped() or depthImageReady; });
1114 const IceUtil::Time TIMESTAMP = imagesTime;
1115 depthImageReady =
false;
1116 depthImageProcessed =
false;
1121 transformation.depth_image_to_point_cloud(alignedDepthImage,
1122 K4A_CALIBRATION_TYPE_COLOR,
1124 ARMARX_DEBUG <<
"Transforming depth image to point cloud took "
1134 pointcloud->width =
static_cast<uint32_t
>(xyzImage.get_width_pixels());
1135 pointcloud->height =
static_cast<uint32_t
>(xyzImage.get_height_pixels());
1139 pointcloud->is_dense =
false;
1140 pointcloud->points.resize(pointcloud->width * pointcloud->height);
1142 auto k4a_cloud_buffer =
reinterpret_cast<const int16_t*
>(xyzImage.get_buffer());
1146 unsigned char* color_buffer = resultColorImage->pixels;
1147 while (color_buffer ==
nullptr)
1149 ARMARX_INFO <<
"color_buffer is null. This should never happen. There is "
1150 "probably a race condition somewhere that needs to be fixed. "
1151 "Temporarily we ignore this and continue.\n Timestamp: "
1153 color_buffer = resultColorImage->pixels;
1160 pointcloud->width * pointcloud->height);
1166 for (
auto& p : pointcloud->points)
1168 p.r = color_buffer[
index];
1169 p.x = k4a_cloud_buffer[
index];
1173 p.g = color_buffer[
index];
1174 p.y = k4a_cloud_buffer[
index];
1178 p.b = color_buffer[
index];
1179 auto z = k4a_cloud_buffer[
index];
1183 if (z <= max_depth and z != 0)
1189 p.z = std::numeric_limits<float>::quiet_NaN();
1198 depthImageProcessed =
true;
1199 signal_lock.unlock();
1201 pointcloudProcSignal.notify_all();
1208 pointcloud->header.stamp =
static_cast<unsigned long>(TIMESTAMP.toMicroSeconds());
1221 return "AzureKinectPointCloudProvider";
1257 using namespace Eigen;
1262 const auto convert_calibration =
1263 [](
const k4a_calibration_camera_t& k4a_calib,
float scale = 1.f)
1265 MonocularCalibration monocular_calibration;
1267 const k4a_calibration_intrinsic_parameters_t& params = k4a_calib.intrinsics.parameters;
1268 monocular_calibration.cameraParam.principalPoint = {params.param.cx * scale,
1269 params.param.cy * scale};
1270 monocular_calibration.cameraParam.focalLength = {params.param.fx * scale,
1271 params.param.fy * scale};
1284 monocular_calibration.cameraParam.distortion = {
1292 monocular_calibration.cameraParam.width =
1293 std::floor(
float(k4a_calib.resolution_width) * scale);
1294 monocular_calibration.cameraParam.height =
1295 std::floor(
float(k4a_calib.resolution_height) * scale);
1297 const Matrix3FRowMajor rotation =
1298 Map<const Matrix3FRowMajor>{k4a_calib.extrinsics.rotation};
1300 monocular_calibration.cameraParam.rotation = convertEigenMatToVisionX(rotation);
1301 monocular_calibration.cameraParam.translation = {k4a_calib.extrinsics.translation,
1302 k4a_calib.extrinsics.translation + 3};
1304 return monocular_calibration;
1307 StereoCalibration stereo_calibration;
1309 stereo_calibration.calibrationLeft =
1310 convert_calibration(k4aCalibration.color_camera_calibration);
1313 if (enableColorUndistortion)
1315 auto& colorDistortionParams = stereo_calibration.calibrationLeft.cameraParam.distortion;
1316 std::fill(colorDistortionParams.begin(), colorDistortionParams.end(), 0);
1323 stereo_calibration.calibrationRight =
1324 convert_calibration(k4aCalibration.color_camera_calibration);
1326 auto& depthDistortionParams =
1327 stereo_calibration.calibrationRight.cameraParam.distortion;
1328 std::fill(depthDistortionParams.begin(), depthDistortionParams.end(), 0);
1331 stereo_calibration.rectificationHomographyLeft =
1332 convertEigenMatToVisionX(Matrix3f::Identity());
1333 stereo_calibration.rectificationHomographyRight =
1334 convertEigenMatToVisionX(Matrix3f::Identity());
1336 return stereo_calibration;
1342 return enableColorUndistortion;
1351 std::vector<imrec::ChannelPreferences>
1356 imrec::ChannelPreferences rgb;
1357 rgb.requiresLossless =
false;
1360 imrec::ChannelPreferences depth;
1361 depth.requiresLossless =
true;
1362 depth.name =
"depth";
1364 return {rgb, depth};
1369 const armarx::EnableHumanPoseEstimationInput& input,
1370 const Ice::Current&)
1372#ifndef INCLUDE_BODY_TRACKING
1374 ARMARX_ERROR <<
"INCLUDE_BODY_TRACKING is not defined.";
1379#ifdef INCLUDE_BODY_TRACKING
1380 if (bodyTrackingEnabled)
1383 if (not bodyTrackingIsRunning and input.enable3d)
1385 bodyTrackingIsRunning =
true;
1386 bodyTrackingPublishTask->start();
1388 else if (bodyTrackingIsRunning and not input.enable3d)
1390 bodyTrackingIsRunning =
false;
1391 bodyTrackingPublishTask->stop();
1396 ARMARX_ERROR <<
"Azure Kinect Body Tracking is not enabled";
1404 std::scoped_lock lock(bodyTrackingParameterMutex);
1405 bodyTrackingDepthMaskMaxZ = maxDepthInMM;
1411 const Ice::Current&)
1413 std::scoped_lock lock(bodyTrackingParameterMutex);
1414 bodyTrackingDepthMaskMinX = minXinPixel;
1415 bodyTrackingDepthMaskMaxX = maxXinPixel;
1423 const std::chrono::microseconds& k4a_device_timestamp_us,
1424 const std::chrono::nanoseconds& k4a_system_timestamp_ns)
1432 std::chrono::nanoseconds realtime_clock =
1433 std::chrono::system_clock::now().time_since_epoch();
1434 std::chrono::nanoseconds monotonic_clock =
1435 std::chrono::steady_clock::now().time_since_epoch();
1437 std::chrono::nanoseconds monotonic_to_realtime = realtime_clock - monotonic_clock;
1440 std::chrono::nanoseconds device_to_realtime =
1441 k4a_system_timestamp_ns - k4a_device_timestamp_us + monotonic_to_realtime;
1444 std::lock_guard g{deviceToRealtimeOffsetMtx};
1447 std::lock_guard g{debugObserverMtx};
1449 device_to_realtime_offset_.count() /
1453 std::abs<float>((device_to_realtime_offset_ - device_to_realtime).count()) /
1457 const std::int64_t timeOffsetThreshold = 1e7;
1460 if (device_to_realtime_offset_.count() == 0 ||
1461 std::abs((device_to_realtime_offset_ - device_to_realtime).count()) >
1462 timeOffsetThreshold)
1465 <<
"Initializing or re-initializing the device to realtime offset: "
1466 << device_to_realtime.count() <<
" ns";
1467 device_to_realtime_offset_ = device_to_realtime;
1472 constexpr double alpha = 0.10;
1474 const std::chrono::nanoseconds timeCorrection(
static_cast<int64_t
>(
1475 std::floor(alpha * (device_to_realtime - device_to_realtime_offset_).count())));
1476 device_to_realtime_offset_ = device_to_realtime_offset_ + timeCorrection;
1479 std::lock_guard g{debugObserverMtx};
1481 timeCorrection.count() / 1'000);
1489 const std::chrono::microseconds& k4a_timestamp_us)
1491 std::lock_guard g{deviceToRealtimeOffsetMtx};
1496 std::chrono::nanoseconds timestamp_in_realtime =
1497 k4a_timestamp_us + device_to_realtime_offset_;
1505 const std::chrono::microseconds& k4a_device_timestamp_us)
1507 std::lock_guard g{deviceToRealtimeOffsetMtx};
1510 std::chrono::nanoseconds realtime_clock =
1511 std::chrono::system_clock::now().time_since_epoch();
1513 device_to_realtime_offset_ = realtime_clock - k4a_device_timestamp_us;
1515 ARMARX_INFO <<
"Initializing the device to realtime offset based on wall clock: "
1516 << device_to_realtime_offset_.count() <<
" ns";
1527 if (std::abs(std::fmod(higherFramerate,
value)) > 1e-6)
1529 std::stringstream ss;
1530 ss <<
"Invalid value (" <<
value <<
" fps) for framerate of '" <<
name <<
"'."
1531 <<
" Reason: The framerate has to be a divider of the property framerate.image ("
1532 << higherFramerate <<
" fps).";
1533 throw armarx::LocalException() << ss.str();
Eigen::Matrix< T, 3, 3 > Matrix
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
static bool ReplaceEnvVars(std::string &string)
ReplaceEnvVars replaces environment variables in a string with their values, if the env.
static DateTime Now()
Current time on the virtual clock.
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Property< PropertyType > getProperty(const std::string &name)
void setDebugObserverDatafield(Ts &&... ts) const
void sendDebugObserverBatch()
void setDebugObserverBatchModeEnabled(bool enable)
static Duration MicroSeconds(std::int64_t microSeconds)
Constructs a duration in microseconds.
static Duration MilliSeconds(std::int64_t milliSeconds)
Constructs a duration in milliseconds.
static Frequency Hertz(std::int64_t hertz)
SpamFilterDataPtr deactivateSpam(float deactivationDurationSec=10.0f, const std::string &identifier="", bool deactivate=true) const
disables the logging for the current line for the given amount of seconds.
virtual void onExitComponent()
Hook for subclass.
virtual void onDisconnectComponent()
Hook for subclass.
PluginT * addPlugin(const std::string prefix="", ParamsT &&... params)
virtual void onConnectComponent()=0
Pure virtual hook for the subclass.
std::string getName() const
Retrieve name of object.
virtual void onInitComponent()=0
Pure virtual hook for the subclass.
void setMetaInfo(const std::string &id, const VariantBasePtr &value)
Allows to set meta information that can be queried live via Ice interface on the ArmarXManager.
ArmarXManagerPtr getArmarXManager() const
Returns the ArmarX manager used to add and remove components.
std::string prefix
Prefix of the properties such as namespace, domain, component name, etc.
PropertyDefinition< PropertyType > & defineOptionalProperty(const std::string &name, PropertyType defaultValue, const std::string &description="", PropertyDefinitionBase::PropertyConstness constness=PropertyDefinitionBase::eConstant)
IceUtil::Handle< RunningTask< T > > pointer_type
Shared pointer type for convenience.
static IceUtil::Time GetTime(TimeMode timeMode=TimeMode::VirtualTime)
Get the current time.
static IceUtil::Time GetTimeSince(IceUtil::Time referenceTime, TimeMode timeMode=TimeMode::VirtualTime)
Get the difference between the current time and a reference time.
The Variant class is described here: Variants.
DateTime now() const
Current date/time of the clock.
Represents a point in time.
std::int64_t toMilliSecondsSinceEpoch() const
Simple rate limiter for use in loops to maintain a certain frequency given a clock.
Measures the time this stop watch was inside the current scope.
Measures the passed time between the construction or calling reset() and stop().
AzureKinectPointCloudProviderPropertyDefinitions(std::string prefix)
void onInitComponent() override
Pure virtual hook for the subclass.
void setMaxDepthBodyTracking(int maxDepthInMM, const Ice::Current &=Ice::emptyCurrent) override
armarx::DateTime timestampToArmarX(const std::chrono::microseconds &k4a_timestamp_us)
bool doCapture() override
Main capturing function.
void enableHumanPoseEstimation(const armarx::EnableHumanPoseEstimationInput &input, const Ice::Current &=Ice::emptyCurrent) override
void onExitCapturingPointCloudProvider() override
This is called when the Component::onExitComponent() setup is called.
void onDisconnectComponent() override
Hook for subclass.
static std::pair< int, int > GetDepthDimensions(const k4a_depth_mode_t depth_mode)
Returns the dimension of the depth images that will be produced for a certain resolution.
void onStartCapture(float frames_per_second) override
This is called when the point cloud provider capturing has been started.
static std::pair< int, int > GetColorDimensions(const k4a_color_resolution_t resolution)
Returns the dimension of the color images that will be produced for a certain resolution.
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
void runPointcloudPublishing()
void onConnectImageProvider() override
This is called when the Component::onConnectComponent() setup is called.
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c) override
std::string getReferenceFrame(const Ice::Current &c) override
static std::string VersionToString(const k4a_version_t &version)
Creates a string from a k4a_version_t.
std::function< void(armarx::Duration)> createSwCallback(const std::string &description)
void onInitCapturingPointCloudProvider() override
This is called when the Component::onInitComponent() is called.
void onConnectComponent() override
Pure virtual hook for the subclass.
void onStopCapture() override
This is called when the point cloud provider capturing has been stopped.
bool getImagesAreUndistorted(const ::Ice::Current &c) override
std::vector< imrec::ChannelPreferences > getImageRecordingChannelPreferences(const Ice::Current &) override
void onInitImageProvider() override
This is called when the Component::onInitComponent() is called.
void setWidthBodyTracking(int minXinPixel, int maxXinPixel, const Ice::Current &=Ice::emptyCurrent) override
void onExitComponent() override
Hook for subclass.
void initializeTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us)
void onDisconnectImageProvider() override
AzureKinectPointCloudProvider()
std::string getDefaultName() const override
void updateTimestampOffset(const std::chrono::microseconds &k4a_device_timestamp_us, const std::chrono::nanoseconds &k4a_system_timestamp_ns)
CapturingPointCloudProviderPropertyDefinitions(std::string prefix)
std::atomic_bool captureEnabled
Indicates that capturing is enabled and running.
void setPointCloudSyncMode(ImageSyncMode pointCloudSyncMode)
Sets the point cloud synchronization mode.
void onInitComponent() override
void onDisconnectComponent() override
Hook for subclass.
ImageFormatInfo getImageFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the entire image format info struct via Ice.
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
void onConnectComponent() override
void setNumberImages(int numberImages)
Sets the number of images on each capture.
void onExitComponent() override
MetaPointCloudFormatPtr getPointCloudFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the point cloud format info struct via Ice.
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
#define ARMARX_CHECK_EXPRESSION(expression)
This macro evaluates the expression and if it turns out to be false it will throw an ExpressionExcept...
#define ARMARX_CHECK(expression)
Shortcut for ARMARX_CHECK_EXPRESSION.
#define ARMARX_CHECK_NOT_NULL(ptr)
This macro evaluates whether ptr is not null and if it turns out to be false it will throw an Express...
#define ARMARX_CHECK_EQUAL(lhs, rhs)
This macro evaluates whether lhs is equal (==) rhs and if it turns out to be false it will throw an E...
#define ARMARX_INFO
The normal logging level.
#define ARMARX_ERROR
The logging level for unexpected behaviour, that must be fixed.
#define ARMARX_DEBUG
The logging level for output that is only interesting while debugging.
#define ARMARX_WARNING
The logging level for unexpected behaviour, but not a serious problem.
#define ARMARX_VERBOSE
The logging level for verbose information.
Quaternion< float, 0 > Quaternionf
const std::string ModelId
const simox::meta::EnumNames< Joints > JointNames
Names of the joints as defined in the body model.
Joints
Joints with index as defined in the body model.
This file offers overloads of toIce() and fromIce() functions for STL container types.
std::string GetHandledExceptionString()
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
void convert(const CByteImage &in, cv::Mat &out)
Converts an IVT CByteImage to OpenCV's BGR Mat.
void convert_rgb2cbi(const cv::Mat &in, CByteImage &out)
Converts an OpenCV RGB Mat to IVT's CByteImage.
std::optional< std::string > humanTrackingId
std::string cameraFrameName
unsigned int skipFramesCount
void update(int higherFramerate)