27 #include <Eigen/Geometry>
29 #include <pcl/common/transforms.h>
30 #include <pcl/compression/organized_pointcloud_conversion.h>
32 #include <SimoxUtility/math/convert/deg_to_rad.h>
33 #include <VirtualRobot/Visualization/CoinVisualization/CoinVisualizationFactory.h>
40 #include <Inventor/SoInteraction.h>
41 #include <Inventor/actions/SoGLRenderAction.h>
48 visionx::CapturingPointCloudProviderPropertyDefinitions(prefix)
50 defineOptionalProperty<bool>(
51 "FloatImageMode",
false,
"Whether to provide a CFloatImage or the standard CByteImage");
52 defineOptionalProperty<float>(
"Noise",
54 "Noise of the point cloud position results as standard "
55 "deviation of the normal distribution (in mm)")
57 defineOptionalProperty<float>(
"DistanceZNear",
59 "Distance of the near clipping plain. (If set too small "
60 "the agent's model's inside may be visible")
62 defineOptionalProperty<float>(
65 "Distance of the far clipping plain. (DistanceZFar-DistanceZNear should be "
66 "minimal, DistanceZFar > DistanceZNear)")
68 defineOptionalProperty<float>(
69 "FieldOfView", 90,
"Vertical field of view (FOV) in degrees.");
70 defineOptionalProperty<float>(
71 "BaseLine", 0.075,
"The value returned from getBaseline(). It has no other effect.");
72 defineOptionalProperty<float>(
"NanValue",
74 "Value of points that are farther away than DistanceZFar. "
75 "Most cameras return NaN here.");
77 defineOptionalProperty<visionx::ImageDimension>(
79 visionx::ImageDimension(640, 480),
80 "Target resolution of the images. Captured images will be converted to this size.")
81 .setCaseInsensitive(
true)
82 .map(
"200x200", visionx::ImageDimension(200, 200))
83 .map(
"320x240", visionx::ImageDimension(320, 240))
84 .map(
"640x480", visionx::ImageDimension(640, 480))
85 .map(
"800x600", visionx::ImageDimension(800, 600))
86 .map(
"768x576", visionx::ImageDimension(768, 576))
87 .map(
"1024x768", visionx::ImageDimension(1024, 768))
88 .map(
"1280x960", visionx::ImageDimension(1280, 960))
89 .map(
"1600x1200", visionx::ImageDimension(1600, 1200))
90 .map(
"1920x1080", visionx::ImageDimension(1920, 1080))
91 .map(
"none", visionx::ImageDimension(0, 0));
92 defineOptionalProperty<std::string>(
"RobotName",
"Armar3",
"The robot's name.");
93 defineOptionalProperty<std::string>(
94 "RobotNodeCamera",
"DepthCameraSim",
"The coordinate system of the used camera");
97 defineOptionalProperty<bool>(
100 "Whether the point cloud is drawn to the given DebugDrawerTopic");
101 defineOptionalProperty<std::string>(
"DrawPointCloud_DebugDrawerTopic",
102 "DebugDrawerUpdates",
103 "Name of the DebugDrawerTopic");
104 defineOptionalProperty<unsigned int>(
105 "DrawPointCloud_DrawDelay",
107 "The time between updates of the drawn point cloud (in ms)");
108 defineOptionalProperty<float>(
"DrawPointCloud_PointSize", 4,
"The size of a point.");
109 defineOptionalProperty<std::size_t>(
110 "DrawPointCloud_PointSkip",
112 "Only draw every n'th point in x and y direction (n=DrawPointCloud_PointSkip). "
113 "Increase this whenever the ice buffer size is to small to transmitt the cloud "
116 defineOptionalProperty<bool>(
117 "DrawPointCloud_ClipPoints",
119 "Whether to clip the point cloud drawn to the given DebugDrawerTopic");
121 defineOptionalProperty<float>(
122 "DrawPointCloud_ClipXHi", 25000,
"Skip points with x higher than this limit.");
123 defineOptionalProperty<float>(
124 "DrawPointCloud_ClipYHi", 25000,
"Skip points with y higher than this limit.");
125 defineOptionalProperty<float>(
126 "DrawPointCloud_ClipZHi", 25000,
"Skip points with z higher than this limit.");
128 defineOptionalProperty<float>(
129 "DrawPointCloud_ClipXLo", -25000,
"Skip points with x lower than this limit.");
130 defineOptionalProperty<float>(
131 "DrawPointCloud_ClipYLo", -25000,
"Skip points with y lower than this limit.");
132 defineOptionalProperty<float>(
133 "DrawPointCloud_ClipZLo", -25000,
"Skip points with z lower than this limit.");
141 baseline = getProperty<float>(
"BaseLine").getValue();
142 robotName = getProperty<std::string>(
"RobotName").getValue();
143 camNodeName = getProperty<std::string>(
"RobotNodeCamera").getValue();
145 zNear = getProperty<float>(
"DistanceZNear").getValue();
146 zFar = getProperty<float>(
"DistanceZFar").getValue();
147 vertFov = simox::math::deg_to_rad(getProperty<float>(
"FieldOfView").getValue());
148 nanValue = getProperty<float>(
"NanValue").getValue();
155 lastUpdate = std::chrono::high_resolution_clock::now() -
158 getProperty<std::string>(
"DrawPointCloud_DebugDrawerTopic").getValue();
163 pointSkip = getProperty<std::size_t>(
"DrawPointCloud_PointSkip").getValue();
173 clipPointCloud = getProperty<bool>(
"DrawPointCloud_ClipPoints").getValue();
175 noise = getProperty<float>(
"Noise").getValue();
186 ImageProvider::onInitComponent();
187 CapturingPointCloudProvider::onInitComponent();
188 ARMARX_VERBOSE_S <<
"done DepthImageProviderDynamicSimulation::onInitComponent";
195 ARMARX_VERBOSE_S <<
"DepthImageProviderDynamicSimulation::onConnectComponent";
202 ImageProvider::onConnectComponent();
203 CapturingPointCloudProvider::onConnectComponent();
204 ARMARX_VERBOSE_S <<
"done DepthImageProviderDynamicSimulation::onConnectComponent";
211 CapturingPointCloudProvider::onExitComponent();
212 ImageProvider::onExitComponent();
213 ARMARX_VERBOSE_S <<
"done DepthImageProviderDynamicSimulation::onExitComponent";
220 ARMARX_INFO <<
"DepthImageProviderDynamicSimulation::onStartCapture";
227 simVisu->synchronizeVisualizationData();
237 auto l =
simVisu->getScopedLock();
242 VirtualRobot::CoinVisualizationFactory::createOffscreenRenderer(
width,
height));
243 ARMARX_INFO <<
"done DepthImageProviderDynamicSimulation::onStartCapture";
258 return "DynamicSimulationDepthImageProvider";
267 visionx::StereoCalibration
270 visionx::StereoCalibration stereoCalibration;
272 float r =
static_cast<float>(
width) /
static_cast<float>(
height);
274 visionx::CameraParameters RGBCameraIntrinsics;
275 RGBCameraIntrinsics.distortion = {0, 0, 0, 0};
276 RGBCameraIntrinsics.focalLength = {
277 static_cast<float>(
width) / (2 * std::tan((
vertFov * r) / 2)),
279 RGBCameraIntrinsics.height =
height;
280 RGBCameraIntrinsics.principalPoint = {
width / 2.0f,
height / 2.0f};
281 RGBCameraIntrinsics.rotation =
283 RGBCameraIntrinsics.translation =
285 RGBCameraIntrinsics.width =
width;
287 visionx::CameraParameters DepthCameraIntrinsics;
288 DepthCameraIntrinsics.distortion = {0, 0, 0, 0};
289 DepthCameraIntrinsics.focalLength = {
290 static_cast<float>(
width) / (2 * std::tan((
vertFov * r) / 2)),
292 DepthCameraIntrinsics.height =
height;
293 DepthCameraIntrinsics.principalPoint = {
width / 2.0f,
height / 2.0f};
294 DepthCameraIntrinsics.rotation =
296 DepthCameraIntrinsics.translation = {0.075, 0, 0};
297 DepthCameraIntrinsics.width =
width;
302 stereoCalibration.calibrationLeft.cameraParam = RGBCameraIntrinsics;
303 stereoCalibration.calibrationRight.cameraParam = DepthCameraIntrinsics;
304 stereoCalibration.rectificationHomographyLeft =
306 stereoCalibration.rectificationHomographyRight =
310 return stereoCalibration;
322 return getProperty<std::string>(
"RobotNodeCamera");
359 ARMARX_VERBOSE_S <<
"DepthImageProviderDynamicSimulation::onInitImageProvider";
361 const auto imgSz = getProperty<visionx::ImageDimension>(
"ImageSize").getValue();
371 static_cast<int>(
width),
static_cast<int>(
height), CByteImage::eRGB24});
372 dImage.reset(
new CByteImage{
373 static_cast<int>(
width),
static_cast<int>(
height), CByteImage::eRGB24});
379 setImageFormat(visionx::ImageDimension(imgSz), visionx::eFloat1Channel);
383 new CFloatImage{
static_cast<int>(
width),
static_cast<int>(
height), 1});
388 pointcloud.reset(
new pcl::PointCloud<PointT>{
static_cast<unsigned int>(
width),
389 static_cast<unsigned int>(
height)});
401 SoInteraction::init();
403 std::stringstream svName;
404 svName <<
getName() <<
"_PhysicsWorldVisualization";
405 simVisu = Component::create<ArmarXPhysicsWorldVisualization>(
408 ARMARX_VERBOSE_S <<
"done DepthImageProviderDynamicSimulation::onInitImageProvider";
419 ImageProvider::onDisconnectComponent();
420 CapturingPointCloudProvider::onDisconnectComponent();
438 bool succeeded =
true;
443 : IceUtil::Time::now().toMicroSeconds();
461 rotate.block(0, 0, 3, 3) =
462 Eigen::AngleAxis<float>(M_PI_2, Eigen::Vector3f::UnitZ()).toRotationMatrix();
463 pcl::PointCloud<PointT>::Ptr cloudTransformedPtr(
new pcl::PointCloud<PointT>());
464 pcl::transformPointCloud(*
pointcloud, *cloudTransformedPtr, rotate);
465 cloudTransformedPtr->header.stamp = timestamp;
481 lastUpdate = std::chrono::high_resolution_clock::now();
482 DebugDrawer24BitColoredPointCloud dbgPcl;
483 dbgPcl.points.reserve(cloudTransformedPtr->size());
487 const auto pointCloudBaseNode =
490 if (!pointCloudBaseNode)
494 <<
"\nthe point cloud will be attached to (0,0,0)";
500 (pointCloudBaseNode ? pointCloudBaseNode->getGlobalPose()
507 for (std::size_t x = 0; x < static_cast<std::size_t>(
width); x +=
pointSkip)
509 for (std::size_t y = 0; y < static_cast<std::size_t>(
height); y +=
pointSkip)
511 const auto& pclPoint = cloudTransformedPtr->at(x, y);
514 const Eigen::Vector4f pointFromPcl{pclPoint.x, pclPoint.y, pclPoint.z, 1};
516 const Eigen::Vector4f pointInWorld = transformCamToWorld * pointFromPcl;
517 const Eigen::Vector4f pointInWorldNormalizedW =
518 pointInWorld / pointInWorld(3);
520 DebugDrawer24BitColoredPointCloudElement dbgPoint;
522 dbgPoint.x = pointInWorldNormalizedW(0);
523 dbgPoint.y = pointInWorldNormalizedW(1);
524 dbgPoint.z = pointInWorldNormalizedW(2);
535 dbgPoint.color.r = pclPoint.r;
536 dbgPoint.color.g = pclPoint.g;
537 dbgPoint.color.b = pclPoint.b;
539 dbgPcl.points.emplace_back(dbgPoint);
544 "PointClouds",
getName() +
"::PointCloud", dbgPcl);
562 bool renderOK =
false;
564 auto l =
simVisu->getScopedLock();
565 auto start = IceUtil::Time::now();
568 renderOK = VirtualRobot::CoinVisualizationFactory::renderOffscreenRgbDepthPointcloud(
586 << (IceUtil::Time::now() -
start).toMilliSecondsDouble() <<
" ms";
594 auto start = std::chrono::high_resolution_clock::now();
597 int noiseOffset =
noise > 0 ? rand() % randValuesSize : 0;
603 for (
int y = 0; y <
height; y++)
605 for (
int x = 0; x <
width; x++)
607 float value = depthBufferEndOfRow[-x];
610 dFloatImageRow[x] =
value;
612 dFloatImageRow +=
width;
613 depthBufferEndOfRow +=
width;
619 std::uint8_t* rgbImageRow =
rgbImage->pixels;
620 std::uint8_t* rgbBufferEndOfRow =
rgbBuffer.data() + (
width - 1) * 3;
621 std::uint8_t* dImageRow =
dImage->pixels;
622 for (
int y = 0; y <
height; ++y)
624 for (
int x = 0; x <
width; ++x)
626 memcpy(
static_cast<void*
>(&(rgbImageRow[x * 3])),
627 static_cast<const void*
>(&(rgbBufferEndOfRow[-x * 3])),
630 float valueF = depthBufferEndOfRow[-x];
632 valueF =
std::min(valueF,
static_cast<float>(0xffffff));
633 std::uint32_t
value =
static_cast<std::uint32_t
>(valueF);
634 memcpy(
static_cast<void*
>(&(dImageRow[x * 3])),
635 static_cast<const void*
>(&
value),
639 rgbImageRow +=
width * 3;
640 rgbBufferEndOfRow +=
width * 3;
641 dImageRow +=
width * 3;
642 depthBufferEndOfRow +=
width;
648 std::uint8_t* rgbBufferEntry =
rgbBuffer.data();
651 for (
int y = 0; y <
height; ++y)
654 for (
int x = 0; x <
width; ++x)
656 PointT& point = outputEndOfRow[-x];
657 Eigen::Vector3f pointCloudBufferPoint = inputRow[x];
659 int noiseIndex = (y *
width + x) + noiseOffset;
661 noise > 0.0f ? randValuesData[(noiseIndex) % randValuesSize] : 0.0f;
663 noise > 0.0f ? randValuesData[(noiseIndex + 1) % randValuesSize] : 0.0f;
665 noise > 0.0f ? randValuesData[(noiseIndex + 2) % randValuesSize] : 0.0f;
667 point.x = pointCloudBufferPoint[0] + noiseX;
668 point.y = pointCloudBufferPoint[1] + noiseY;
669 point.z = pointCloudBufferPoint[2] + noiseZ;
670 point.r = rgbBufferEntry[0];
671 point.g = rgbBufferEntry[1];
672 point.b = rgbBufferEntry[2];
677 outputEndOfRow +=
width;
680 auto end = std::chrono::high_resolution_clock::now();
681 std::int64_t timeDiff =
682 std::chrono::duration_cast<std::chrono::microseconds>(end -
start).count();
692 double stddevTime =
std::sqrt(averageSquaredTime - averageTime * averageTime);
694 ARMARX_DEBUG <<
"Copying depth buffer data took: " << averageTime <<
"ms +- "
695 << stddevTime <<
"ms";
713 simVisu->synchronizeVisualizationData();