27 #include <Eigen/Geometry>
29 #include <pcl/common/transforms.h>
30 #include <pcl/compression/organized_pointcloud_conversion.h>
32 #include <SimoxUtility/math/convert/deg_to_rad.h>
33 #include <VirtualRobot/Nodes/RobotNode.h>
34 #include <VirtualRobot/Visualization/CoinVisualization/CoinVisualizationFactory.h>
41 #include <Inventor/SoInteraction.h>
42 #include <Inventor/actions/SoGLRenderAction.h>
49 visionx::CapturingPointCloudProviderPropertyDefinitions(prefix)
51 defineOptionalProperty<bool>(
52 "FloatImageMode",
false,
"Whether to provide a CFloatImage or the standard CByteImage");
53 defineOptionalProperty<float>(
"Noise",
55 "Noise of the point cloud position results as standard "
56 "deviation of the normal distribution (in mm)")
58 defineOptionalProperty<float>(
"DistanceZNear",
60 "Distance of the near clipping plain. (If set too small "
61 "the agent's model's inside may be visible")
63 defineOptionalProperty<float>(
66 "Distance of the far clipping plain. (DistanceZFar-DistanceZNear should be "
67 "minimal, DistanceZFar > DistanceZNear)")
69 defineOptionalProperty<float>(
70 "FieldOfView", 90,
"Vertical field of view (FOV) in degrees.");
71 defineOptionalProperty<float>(
72 "BaseLine", 0.075,
"The value returned from getBaseline(). It has no other effect.");
73 defineOptionalProperty<float>(
"NanValue",
75 "Value of points that are farther away than DistanceZFar. "
76 "Most cameras return NaN here.");
78 defineOptionalProperty<visionx::ImageDimension>(
80 visionx::ImageDimension(640, 480),
81 "Target resolution of the images. Captured images will be converted to this size.")
82 .setCaseInsensitive(
true)
83 .map(
"200x200", visionx::ImageDimension(200, 200))
84 .map(
"320x240", visionx::ImageDimension(320, 240))
85 .map(
"640x480", visionx::ImageDimension(640, 480))
86 .map(
"800x600", visionx::ImageDimension(800, 600))
87 .map(
"768x576", visionx::ImageDimension(768, 576))
88 .map(
"1024x768", visionx::ImageDimension(1024, 768))
89 .map(
"1280x960", visionx::ImageDimension(1280, 960))
90 .map(
"1600x1200", visionx::ImageDimension(1600, 1200))
91 .map(
"1920x1080", visionx::ImageDimension(1920, 1080))
92 .map(
"none", visionx::ImageDimension(0, 0));
93 defineOptionalProperty<std::string>(
"RobotName",
"Armar3",
"The robot's name.");
94 defineOptionalProperty<std::string>(
95 "RobotNodeCamera",
"DepthCameraSim",
"The coordinate system of the used camera");
98 defineOptionalProperty<bool>(
101 "Whether the point cloud is drawn to the given DebugDrawerTopic");
102 defineOptionalProperty<std::string>(
"DrawPointCloud_DebugDrawerTopic",
103 "DebugDrawerUpdates",
104 "Name of the DebugDrawerTopic");
105 defineOptionalProperty<unsigned int>(
106 "DrawPointCloud_DrawDelay",
108 "The time between updates of the drawn point cloud (in ms)");
109 defineOptionalProperty<float>(
"DrawPointCloud_PointSize", 4,
"The size of a point.");
110 defineOptionalProperty<std::size_t>(
111 "DrawPointCloud_PointSkip",
113 "Only draw every n'th point in x and y direction (n=DrawPointCloud_PointSkip). "
114 "Increase this whenever the ice buffer size is to small to transmitt the cloud "
117 defineOptionalProperty<bool>(
118 "DrawPointCloud_ClipPoints",
120 "Whether to clip the point cloud drawn to the given DebugDrawerTopic");
122 defineOptionalProperty<float>(
123 "DrawPointCloud_ClipXHi", 25000,
"Skip points with x higher than this limit.");
124 defineOptionalProperty<float>(
125 "DrawPointCloud_ClipYHi", 25000,
"Skip points with y higher than this limit.");
126 defineOptionalProperty<float>(
127 "DrawPointCloud_ClipZHi", 25000,
"Skip points with z higher than this limit.");
129 defineOptionalProperty<float>(
130 "DrawPointCloud_ClipXLo", -25000,
"Skip points with x lower than this limit.");
131 defineOptionalProperty<float>(
132 "DrawPointCloud_ClipYLo", -25000,
"Skip points with y lower than this limit.");
133 defineOptionalProperty<float>(
134 "DrawPointCloud_ClipZLo", -25000,
"Skip points with z lower than this limit.");
142 baseline = getProperty<float>(
"BaseLine").getValue();
143 robotName = getProperty<std::string>(
"RobotName").getValue();
144 camNodeName = getProperty<std::string>(
"RobotNodeCamera").getValue();
146 zNear = getProperty<float>(
"DistanceZNear").getValue();
147 zFar = getProperty<float>(
"DistanceZFar").getValue();
148 vertFov = simox::math::deg_to_rad(getProperty<float>(
"FieldOfView").getValue());
149 nanValue = getProperty<float>(
"NanValue").getValue();
156 lastUpdate = std::chrono::high_resolution_clock::now() -
159 getProperty<std::string>(
"DrawPointCloud_DebugDrawerTopic").getValue();
164 pointSkip = getProperty<std::size_t>(
"DrawPointCloud_PointSkip").getValue();
174 clipPointCloud = getProperty<bool>(
"DrawPointCloud_ClipPoints").getValue();
176 noise = getProperty<float>(
"Noise").getValue();
187 ImageProvider::onInitComponent();
188 CapturingPointCloudProvider::onInitComponent();
189 ARMARX_VERBOSE_S <<
"done DepthImageProviderDynamicSimulation::onInitComponent";
196 ARMARX_VERBOSE_S <<
"DepthImageProviderDynamicSimulation::onConnectComponent";
203 ImageProvider::onConnectComponent();
204 CapturingPointCloudProvider::onConnectComponent();
205 ARMARX_VERBOSE_S <<
"done DepthImageProviderDynamicSimulation::onConnectComponent";
212 CapturingPointCloudProvider::onExitComponent();
213 ImageProvider::onExitComponent();
214 ARMARX_VERBOSE_S <<
"done DepthImageProviderDynamicSimulation::onExitComponent";
221 ARMARX_INFO <<
"DepthImageProviderDynamicSimulation::onStartCapture";
228 simVisu->synchronizeVisualizationData();
238 auto l =
simVisu->getScopedLock();
243 VirtualRobot::CoinVisualizationFactory::createOffscreenRenderer(
width,
height));
244 ARMARX_INFO <<
"done DepthImageProviderDynamicSimulation::onStartCapture";
259 return "DynamicSimulationDepthImageProvider";
268 visionx::StereoCalibration
271 visionx::StereoCalibration stereoCalibration;
273 float r =
static_cast<float>(
width) /
static_cast<float>(
height);
275 visionx::CameraParameters RGBCameraIntrinsics;
276 RGBCameraIntrinsics.distortion = {0, 0, 0, 0};
277 RGBCameraIntrinsics.focalLength = {
278 static_cast<float>(
width) / (2 * std::tan((
vertFov * r) / 2)),
280 RGBCameraIntrinsics.height =
height;
281 RGBCameraIntrinsics.principalPoint = {
width / 2.0f,
height / 2.0f};
282 RGBCameraIntrinsics.rotation =
284 RGBCameraIntrinsics.translation =
286 RGBCameraIntrinsics.width =
width;
288 visionx::CameraParameters DepthCameraIntrinsics;
289 DepthCameraIntrinsics.distortion = {0, 0, 0, 0};
290 DepthCameraIntrinsics.focalLength = {
291 static_cast<float>(
width) / (2 * std::tan((
vertFov * r) / 2)),
293 DepthCameraIntrinsics.height =
height;
294 DepthCameraIntrinsics.principalPoint = {
width / 2.0f,
height / 2.0f};
295 DepthCameraIntrinsics.rotation =
297 DepthCameraIntrinsics.translation = {0.075, 0, 0};
298 DepthCameraIntrinsics.width =
width;
303 stereoCalibration.calibrationLeft.cameraParam = RGBCameraIntrinsics;
304 stereoCalibration.calibrationRight.cameraParam = DepthCameraIntrinsics;
305 stereoCalibration.rectificationHomographyLeft =
307 stereoCalibration.rectificationHomographyRight =
311 return stereoCalibration;
323 return getProperty<std::string>(
"RobotNodeCamera");
360 ARMARX_VERBOSE_S <<
"DepthImageProviderDynamicSimulation::onInitImageProvider";
362 const auto imgSz = getProperty<visionx::ImageDimension>(
"ImageSize").getValue();
372 static_cast<int>(
width),
static_cast<int>(
height), CByteImage::eRGB24});
373 dImage.reset(
new CByteImage{
374 static_cast<int>(
width),
static_cast<int>(
height), CByteImage::eRGB24});
380 setImageFormat(visionx::ImageDimension(imgSz), visionx::eFloat1Channel);
384 new CFloatImage{
static_cast<int>(
width),
static_cast<int>(
height), 1});
389 pointcloud.reset(
new pcl::PointCloud<PointT>{
static_cast<unsigned int>(
width),
390 static_cast<unsigned int>(
height)});
402 SoInteraction::init();
404 std::stringstream svName;
405 svName <<
getName() <<
"_PhysicsWorldVisualization";
406 simVisu = Component::create<ArmarXPhysicsWorldVisualization>(
409 ARMARX_VERBOSE_S <<
"done DepthImageProviderDynamicSimulation::onInitImageProvider";
420 ImageProvider::onDisconnectComponent();
421 CapturingPointCloudProvider::onDisconnectComponent();
439 bool succeeded =
true;
444 : IceUtil::Time::now().toMicroSeconds();
462 rotate.block(0, 0, 3, 3) =
463 Eigen::AngleAxis<float>(M_PI_2, Eigen::Vector3f::UnitZ()).toRotationMatrix();
464 pcl::PointCloud<PointT>::Ptr cloudTransformedPtr(
new pcl::PointCloud<PointT>());
465 pcl::transformPointCloud(*
pointcloud, *cloudTransformedPtr, rotate);
466 cloudTransformedPtr->header.stamp = timestamp;
482 lastUpdate = std::chrono::high_resolution_clock::now();
483 DebugDrawer24BitColoredPointCloud dbgPcl;
484 dbgPcl.points.reserve(cloudTransformedPtr->size());
488 const auto pointCloudBaseNode =
491 if (!pointCloudBaseNode)
495 <<
"\nthe point cloud will be attached to (0,0,0)";
501 (pointCloudBaseNode ? pointCloudBaseNode->getGlobalPose()
508 for (std::size_t x = 0; x < static_cast<std::size_t>(
width); x +=
pointSkip)
510 for (std::size_t y = 0; y < static_cast<std::size_t>(
height); y +=
pointSkip)
512 const auto& pclPoint = cloudTransformedPtr->at(x, y);
515 const Eigen::Vector4f pointFromPcl{pclPoint.x, pclPoint.y, pclPoint.z, 1};
517 const Eigen::Vector4f pointInWorld = transformCamToWorld * pointFromPcl;
518 const Eigen::Vector4f pointInWorldNormalizedW =
519 pointInWorld / pointInWorld(3);
521 DebugDrawer24BitColoredPointCloudElement dbgPoint;
523 dbgPoint.x = pointInWorldNormalizedW(0);
524 dbgPoint.y = pointInWorldNormalizedW(1);
525 dbgPoint.z = pointInWorldNormalizedW(2);
536 dbgPoint.color.r = pclPoint.r;
537 dbgPoint.color.g = pclPoint.g;
538 dbgPoint.color.b = pclPoint.b;
540 dbgPcl.points.emplace_back(dbgPoint);
545 "PointClouds",
getName() +
"::PointCloud", dbgPcl);
563 bool renderOK =
false;
565 auto l =
simVisu->getScopedLock();
566 auto start = IceUtil::Time::now();
569 renderOK = VirtualRobot::CoinVisualizationFactory::renderOffscreenRgbDepthPointcloud(
587 << (IceUtil::Time::now() -
start).toMilliSecondsDouble() <<
" ms";
595 auto start = std::chrono::high_resolution_clock::now();
598 int noiseOffset =
noise > 0 ? rand() % randValuesSize : 0;
604 for (
int y = 0; y <
height; y++)
606 for (
int x = 0; x <
width; x++)
608 float value = depthBufferEndOfRow[-x];
611 dFloatImageRow[x] =
value;
613 dFloatImageRow +=
width;
614 depthBufferEndOfRow +=
width;
620 std::uint8_t* rgbImageRow =
rgbImage->pixels;
621 std::uint8_t* rgbBufferEndOfRow =
rgbBuffer.data() + (
width - 1) * 3;
622 std::uint8_t* dImageRow =
dImage->pixels;
623 for (
int y = 0; y <
height; ++y)
625 for (
int x = 0; x <
width; ++x)
627 memcpy(
static_cast<void*
>(&(rgbImageRow[x * 3])),
628 static_cast<const void*
>(&(rgbBufferEndOfRow[-x * 3])),
631 float valueF = depthBufferEndOfRow[-x];
633 valueF =
std::min(valueF,
static_cast<float>(0xffffff));
634 std::uint32_t
value =
static_cast<std::uint32_t
>(valueF);
635 memcpy(
static_cast<void*
>(&(dImageRow[x * 3])),
636 static_cast<const void*
>(&
value),
640 rgbImageRow +=
width * 3;
641 rgbBufferEndOfRow +=
width * 3;
642 dImageRow +=
width * 3;
643 depthBufferEndOfRow +=
width;
649 std::uint8_t* rgbBufferEntry =
rgbBuffer.data();
652 for (
int y = 0; y <
height; ++y)
655 for (
int x = 0; x <
width; ++x)
657 PointT& point = outputEndOfRow[-x];
658 Eigen::Vector3f pointCloudBufferPoint = inputRow[x];
660 int noiseIndex = (y *
width + x) + noiseOffset;
662 noise > 0.0f ? randValuesData[(noiseIndex) % randValuesSize] : 0.0f;
664 noise > 0.0f ? randValuesData[(noiseIndex + 1) % randValuesSize] : 0.0f;
666 noise > 0.0f ? randValuesData[(noiseIndex + 2) % randValuesSize] : 0.0f;
668 point.x = pointCloudBufferPoint[0] + noiseX;
669 point.y = pointCloudBufferPoint[1] + noiseY;
670 point.z = pointCloudBufferPoint[2] + noiseZ;
671 point.r = rgbBufferEntry[0];
672 point.g = rgbBufferEntry[1];
673 point.b = rgbBufferEntry[2];
678 outputEndOfRow +=
width;
681 auto end = std::chrono::high_resolution_clock::now();
682 std::int64_t timeDiff =
683 std::chrono::duration_cast<std::chrono::microseconds>(end -
start).count();
693 double stddevTime =
std::sqrt(averageSquaredTime - averageTime * averageTime);
695 ARMARX_DEBUG <<
"Copying depth buffer data took: " << averageTime <<
"ms +- "
696 << stddevTime <<
"ms";
714 simVisu->synchronizeVisualizationData();