DepthImageProviderDynamicSimulation.cpp
Go to the documentation of this file.
1/*
2 * This file is part of ArmarX.
3 *
4 * Copyright (C) 2013-2016, High Performance Humanoid Technologies (H2T), Karlsruhe Institute of Technology (KIT), all rights reserved.
5 *
6 * ArmarX is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * ArmarX is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
17 *
18 * @package
19 * @author Raphael Grimm
20 * @date 2015
21 * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
22 * GNU General Public License
23 */
24
26
27#include <Eigen/Geometry>
28
29#include <pcl/common/transforms.h>
30#include <pcl/compression/organized_pointcloud_conversion.h>
31
32#include <SimoxUtility/math/convert/deg_to_rad.h>
33#include <VirtualRobot/Nodes/RobotNode.h>
34#include <VirtualRobot/Visualization/CoinVisualization/CoinVisualizationFactory.h>
35
37
40
41#include <Inventor/SoInteraction.h>
42#include <Inventor/actions/SoGLRenderAction.h>
43
44namespace armarx
45{
46
50 {
52 "FloatImageMode", false, "Whether to provide a CFloatImage or the standard CByteImage");
54 0.0f,
55 "Noise of the point cloud position results as standard "
56 "deviation of the normal distribution (in mm)")
57 .setMin(0);
58 defineOptionalProperty<float>("DistanceZNear",
59 20.0f,
60 "Distance of the near clipping plain. (If set too small "
61 "the agent's model's inside may be visible")
62 .setMin(1e-8);
64 "DistanceZFar",
65 5000.0f,
66 "Distance of the far clipping plain. (DistanceZFar-DistanceZNear should be "
67 "minimal, DistanceZFar > DistanceZNear)")
68 .setMin(1e-8);
70 "FieldOfView", 90, "Vertical field of view (FOV) in degrees.");
72 "BaseLine", 0.075, "The value returned from getBaseline(). It has no other effect.");
74 std::nan(""),
75 "Value of points that are farther away than DistanceZFar. "
76 "Most cameras return NaN here.");
77
79 "ImageSize",
80 visionx::ImageDimension(640, 480),
81 "Target resolution of the images. Captured images will be converted to this size.")
82 .setCaseInsensitive(true)
83 .map("200x200", visionx::ImageDimension(200, 200))
84 .map("320x240", visionx::ImageDimension(320, 240))
85 .map("640x480", visionx::ImageDimension(640, 480))
86 .map("800x600", visionx::ImageDimension(800, 600))
87 .map("768x576", visionx::ImageDimension(768, 576))
88 .map("1024x768", visionx::ImageDimension(1024, 768))
89 .map("1280x960", visionx::ImageDimension(1280, 960))
90 .map("1600x1200", visionx::ImageDimension(1600, 1200))
91 .map("1920x1080", visionx::ImageDimension(1920, 1080))
92 .map("none", visionx::ImageDimension(0, 0));
93 defineOptionalProperty<std::string>("RobotName", "Armar3", "The robot's name.");
95 "RobotNodeCamera", "DepthCameraSim", "The coordinate system of the used camera");
96
97 //used to draw the cloud to the simulator
99 "DrawPointCloud",
100 false,
101 "Whether the point cloud is drawn to the given DebugDrawerTopic");
102 defineOptionalProperty<std::string>("DrawPointCloud_DebugDrawerTopic",
103 "DebugDrawerUpdates",
104 "Name of the DebugDrawerTopic");
106 "DrawPointCloud_DrawDelay",
107 1000,
108 "The time between updates of the drawn point cloud (in ms)");
109 defineOptionalProperty<float>("DrawPointCloud_PointSize", 4, "The size of a point.");
111 "DrawPointCloud_PointSkip",
112 3,
113 "Only draw every n'th point in x and y direction (n=DrawPointCloud_PointSkip). "
114 "Increase this whenever the ice buffer size is to small to transmitt the cloud "
115 "size. (>0)");
116
118 "DrawPointCloud_ClipPoints",
119 true,
120 "Whether to clip the point cloud drawn to the given DebugDrawerTopic");
121
123 "DrawPointCloud_ClipXHi", 25000, "Skip points with x higher than this limit.");
125 "DrawPointCloud_ClipYHi", 25000, "Skip points with y higher than this limit.");
127 "DrawPointCloud_ClipZHi", 25000, "Skip points with z higher than this limit.");
128
130 "DrawPointCloud_ClipXLo", -25000, "Skip points with x lower than this limit.");
132 "DrawPointCloud_ClipYLo", -25000, "Skip points with y lower than this limit.");
134 "DrawPointCloud_ClipZLo", -25000, "Skip points with z lower than this limit.");
135 }
136
137 void
139 {
141 ARMARX_VERBOSE_S << "DepthImageProviderDynamicSimulation::onInitComponent";
142 baseline = getProperty<float>("BaseLine").getValue();
143 robotName = getProperty<std::string>("RobotName").getValue();
144 camNodeName = getProperty<std::string>("RobotNodeCamera").getValue();
145
146 zNear = getProperty<float>("DistanceZNear").getValue();
147 zFar = getProperty<float>("DistanceZFar").getValue();
148 vertFov = simox::math::deg_to_rad(getProperty<float>("FieldOfView").getValue());
149 nanValue = getProperty<float>("NanValue").getValue();
150
151 floatImageMode = getProperty<bool>("FloatImageMode").getValue();
152
153 drawPointCloud = getProperty<bool>("DrawPointCloud").getValue();
154 if (drawPointCloud)
155 {
156 lastUpdate = std::chrono::high_resolution_clock::now() -
157 std::chrono::seconds{pointCloudDrawDelay};
159 getProperty<std::string>("DrawPointCloud_DebugDrawerTopic").getValue();
161 pointCloudDrawDelay = getProperty<unsigned int>("DrawPointCloud_DrawDelay").getValue();
162 pointCloudPointSize = getProperty<float>("DrawPointCloud_PointSize").getValue();
163
164 pointSkip = getProperty<std::size_t>("DrawPointCloud_PointSkip").getValue();
165
166 clipDrawnCloudXLo = getProperty<float>("DrawPointCloud_ClipXLo").getValue();
167 clipDrawnCloudYLo = getProperty<float>("DrawPointCloud_ClipYLo").getValue();
168 clipDrawnCloudZLo = getProperty<float>("DrawPointCloud_ClipZLo").getValue();
169
170 clipDrawnCloudXHi = getProperty<float>("DrawPointCloud_ClipXHi").getValue();
171 clipDrawnCloudYHi = getProperty<float>("DrawPointCloud_ClipYHi").getValue();
172 clipDrawnCloudZHi = getProperty<float>("DrawPointCloud_ClipZHi").getValue();
173
174 clipPointCloud = getProperty<bool>("DrawPointCloud_ClipPoints").getValue();
175 }
176 noise = getProperty<float>("Noise").getValue();
177 if (noise > 0)
178 {
179 distribution = std::normal_distribution<double>(0, noise);
180 randValues.resize(1000, 0);
181 for (float& val : randValues)
182 {
183 val = distribution(generator);
184 }
185 }
186
187 ImageProvider::onInitComponent();
188 CapturingPointCloudProvider::onInitComponent();
189 ARMARX_VERBOSE_S << "done DepthImageProviderDynamicSimulation::onInitComponent";
190 }
191
192 void
194 {
196 ARMARX_VERBOSE_S << "DepthImageProviderDynamicSimulation::onConnectComponent";
197
198 if (drawPointCloud)
199 {
201 }
202
203 ImageProvider::onConnectComponent();
204 CapturingPointCloudProvider::onConnectComponent();
205 ARMARX_VERBOSE_S << "done DepthImageProviderDynamicSimulation::onConnectComponent";
206 }
207
208 void
210 {
211 ARMARX_VERBOSE_S << "DepthImageProviderDynamicSimulation::onExitComponent";
212 CapturingPointCloudProvider::onExitComponent();
213 ImageProvider::onExitComponent();
214 ARMARX_VERBOSE_S << "done DepthImageProviderDynamicSimulation::onExitComponent";
215 }
216
217 void
219 {
221 ARMARX_INFO << "DepthImageProviderDynamicSimulation::onStartCapture";
222 // ensure that all data is loaded
223 while (!simVisu || !simVisu->getRobot(robotName))
224 {
225 if (simVisu)
226 {
228 simVisu->synchronizeVisualizationData();
229 }
230 usleep(100000);
231 ARMARX_INFO << deactivateSpam(3) << "Waiting for visu";
232 }
233 syncVisu();
234 ARMARX_INFO << "Got visu";
235
237 std::unique_lock lock(captureMutex);
238 auto l = simVisu->getScopedLock();
239
240 // some checks...
242 offscreenRenderer.reset(
243 VirtualRobot::CoinVisualizationFactory::createOffscreenRenderer(width, height));
244 ARMARX_INFO << "done DepthImageProviderDynamicSimulation::onStartCapture";
245 }
246
247 void
251
255
256 std::string
258 {
259 return "DynamicSimulationDepthImageProvider";
260 }
261
262 bool
264 {
265 return true;
266 }
267
268 visionx::StereoCalibration
270 {
271 visionx::StereoCalibration stereoCalibration;
272
273 float r = static_cast<float>(width) / static_cast<float>(height);
274
275 visionx::CameraParameters RGBCameraIntrinsics;
276 RGBCameraIntrinsics.distortion = {0, 0, 0, 0};
277 RGBCameraIntrinsics.focalLength = {
278 static_cast<float>(width) / (2 * std::tan((vertFov * r) / 2)),
279 static_cast<float>(height) / (2 * std::tan(vertFov / 2))};
280 RGBCameraIntrinsics.height = height;
281 RGBCameraIntrinsics.principalPoint = {width / 2.0f, height / 2.0f};
282 RGBCameraIntrinsics.rotation =
283 visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Identity());
284 RGBCameraIntrinsics.translation =
285 visionx::tools::convertEigenVecToVisionX(Eigen::Vector3f::Zero());
286 RGBCameraIntrinsics.width = width;
287
288 visionx::CameraParameters DepthCameraIntrinsics;
289 DepthCameraIntrinsics.distortion = {0, 0, 0, 0};
290 DepthCameraIntrinsics.focalLength = {
291 static_cast<float>(width) / (2 * std::tan((vertFov * r) / 2)),
292 static_cast<float>(height) / (2 * std::tan(vertFov / 2))};
293 DepthCameraIntrinsics.height = height;
294 DepthCameraIntrinsics.principalPoint = {width / 2.0f, height / 2.0f};
295 DepthCameraIntrinsics.rotation =
296 visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Identity());
297 DepthCameraIntrinsics.translation = {0.075, 0, 0};
298 DepthCameraIntrinsics.width = width;
299
300
301 stereoCalibration.calibrationLeft = visionx::tools::createDefaultMonocularCalibration();
302 stereoCalibration.calibrationRight = visionx::tools::createDefaultMonocularCalibration();
303 stereoCalibration.calibrationLeft.cameraParam = RGBCameraIntrinsics;
304 stereoCalibration.calibrationRight.cameraParam = DepthCameraIntrinsics;
305 stereoCalibration.rectificationHomographyLeft =
306 visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Zero());
307 stereoCalibration.rectificationHomographyRight =
308 visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Zero());
309
310
311 return stereoCalibration;
312 }
313
314 bool
316 {
317 return true;
318 }
319
320 std::string
322 {
323 return getProperty<std::string>("RobotNodeCamera");
324 }
325
326 void
330
331 void
333 {
335 rgbImage.reset();
336 dImage.reset();
337 dFloatImage.reset();
338 imgPtr[0] = nullptr;
339 imgPtr[1] = nullptr;
340 imgPtrFlt[0] = nullptr;
341
342 if (simVisu)
343 {
344 simVisu->releaseResources();
345 getArmarXManager()->removeObjectBlocking(simVisu);
346 }
347 simVisu = nullptr;
348 cameraNode = nullptr;
349 visualization = nullptr;
350
351 offscreenRenderer.reset();
352
353 SoDB::finish();
354 }
355
356 void
358 {
360 ARMARX_VERBOSE_S << "DepthImageProviderDynamicSimulation::onInitImageProvider";
361
362 const auto imgSz = getProperty<visionx::ImageDimension>("ImageSize").getValue();
363 height = imgSz.height;
364 width = imgSz.width;
365
366 if (!floatImageMode)
367 {
368 setImageFormat(visionx::ImageDimension(imgSz), visionx::eRgb);
370
371 rgbImage.reset(new CByteImage{
372 static_cast<int>(width), static_cast<int>(height), CByteImage::eRGB24});
373 dImage.reset(new CByteImage{
374 static_cast<int>(width), static_cast<int>(height), CByteImage::eRGB24});
375 imgPtr[0] = rgbImage.get();
376 imgPtr[1] = dImage.get();
377 }
378 else
379 {
380 setImageFormat(visionx::ImageDimension(imgSz), visionx::eFloat1Channel);
382
383 dFloatImage.reset(
384 new CFloatImage{static_cast<int>(width), static_cast<int>(height), 1});
385 imgPtrFlt[0] = dFloatImage.get();
386 }
387
388
389 pointcloud.reset(new pcl::PointCloud<PointT>{static_cast<unsigned int>(width),
390 static_cast<unsigned int>(height)});
391
392 rgbBuffer.resize(width * height);
393 depthBuffer.resize(width * height);
394 pointCloudBuffer.resize(width * height);
395
396 // init SoDB / Coin3D
397 SoDB::init();
398
399 VirtualRobot::init(getName());
400
401 // needed for SoSelection
402 SoInteraction::init();
403
404 std::stringstream svName;
405 svName << getName() << "_PhysicsWorldVisualization";
407 getIceProperties(), getName() + "_PhysicsWorldVisualization");
408 getArmarXManager()->addObject(simVisu);
409 ARMARX_VERBOSE_S << "done DepthImageProviderDynamicSimulation::onInitImageProvider";
410
412 sumRenderTimes = 0;
414 }
415
416 void
418 {
420 ImageProvider::onDisconnectComponent();
421 CapturingPointCloudProvider::onDisconnectComponent();
422 }
423
424 void
428
434
435 bool
437 {
439 bool succeeded = true;
440 Ice::Long timestamp;
441 try
442 {
443 timestamp = TimeUtil::HasTimeServer() ? simVisu->getSyncTimestamp()
444 : IceUtil::Time::now().toMicroSeconds();
445 syncVisu();
447 {
448 return false;
449 }
450 succeeded = render();
451 }
452 catch (...)
453 {
455 succeeded = false;
456 }
457
458 if (succeeded)
459 {
460 Eigen::Matrix4f rotate = Eigen::Matrix4f::Identity();
461 // rotate to conform to ArmarX standards
462 rotate.block(0, 0, 3, 3) =
463 Eigen::AngleAxis<float>(M_PI_2, Eigen::Vector3f::UnitZ()).toRotationMatrix();
464 pcl::PointCloud<PointT>::Ptr cloudTransformedPtr(new pcl::PointCloud<PointT>());
465 pcl::transformPointCloud(*pointcloud, *cloudTransformedPtr, rotate);
466 cloudTransformedPtr->header.stamp = timestamp;
467 providePointCloud(cloudTransformedPtr);
468
470 if (!floatImageMode)
471 {
473 }
474 else
475 {
477 }
478
479 if (drawPointCloud && (std::chrono::high_resolution_clock::now() - lastUpdate >
480 std::chrono::milliseconds{pointCloudDrawDelay}))
481 {
482 lastUpdate = std::chrono::high_resolution_clock::now();
483 DebugDrawer24BitColoredPointCloud dbgPcl;
484 dbgPcl.points.reserve(cloudTransformedPtr->size());
485
486 dbgPcl.pointSize = pointCloudPointSize;
487
488 const auto pointCloudBaseNode =
489 simVisu->getRobot(robotName)->getRobotNode(camNodeName);
490
491 if (!pointCloudBaseNode)
492 {
493 ARMARX_ERROR_S << deactivateSpam() << robotName << " has no node "
494 << camNodeName
495 << "\nthe point cloud will be attached to (0,0,0)";
496 }
497
498 Eigen::Matrix4f rotate = Eigen::Matrix4f::Identity();
499 // rotate.block(0, 0, 3, 3) = Eigen::AngleAxis<float>(M_PI_2, Eigen::Vector3f::UnitZ()). toRotationMatrix();
500 Eigen::Matrix4f transformCamToWorld =
501 (pointCloudBaseNode ? pointCloudBaseNode->getGlobalPose()
502 : Eigen::Matrix4f::Identity()) *
503 rotate;
504
505
506 assert(static_cast<std::size_t>(width) == pointcloud->width);
507 assert(static_cast<std::size_t>(height) == pointcloud->height);
508 for (std::size_t x = 0; x < static_cast<std::size_t>(width); x += pointSkip)
509 {
510 for (std::size_t y = 0; y < static_cast<std::size_t>(height); y += pointSkip)
511 {
512 const auto& pclPoint = cloudTransformedPtr->at(x, y);
513
514 //transform to point in world coord
515 const Eigen::Vector4f pointFromPcl{pclPoint.x, pclPoint.y, pclPoint.z, 1};
516
517 const Eigen::Vector4f pointInWorld = transformCamToWorld * pointFromPcl;
518 const Eigen::Vector4f pointInWorldNormalizedW =
519 pointInWorld / pointInWorld(3);
520
521 DebugDrawer24BitColoredPointCloudElement dbgPoint;
522
523 dbgPoint.x = pointInWorldNormalizedW(0);
524 dbgPoint.y = pointInWorldNormalizedW(1);
525 dbgPoint.z = pointInWorldNormalizedW(2);
526
527 //clip to space
528 if (clipPointCloud &&
529 (dbgPoint.x < clipDrawnCloudXLo || dbgPoint.x > clipDrawnCloudXHi ||
530 dbgPoint.y < clipDrawnCloudYLo || dbgPoint.y > clipDrawnCloudYHi ||
531 dbgPoint.z < clipDrawnCloudZLo || dbgPoint.z > clipDrawnCloudZHi))
532 {
533 continue;
534 }
535
536 dbgPoint.color.r = pclPoint.r;
537 dbgPoint.color.g = pclPoint.g;
538 dbgPoint.color.b = pclPoint.b;
539
540 dbgPcl.points.emplace_back(dbgPoint);
541 }
542 }
543
544 debugDrawer->set24BitColoredPointCloudVisu(
545 "PointClouds", getName() + "::PointCloud", dbgPcl);
546 // debugDrawer->setPoseVisu("PointClouds", getName() + "::PointCloudBaseNode", new Pose { (pointCloudBaseNode ? pointCloudBaseNode->getGlobalPose() : Eigen::Matrix4f::Identity()) });
547 }
548 }
549 return succeeded;
550 }
551
552 bool
554 {
556 //render images
557 if (!cameraNode)
558 {
559 ARMARX_ERROR << deactivateSpam() << "No camera node:" << camNodeName;
560 return false;
561 }
562
563 bool renderOK = false;
564 {
565 auto l = simVisu->getScopedLock();
566 auto start = IceUtil::Time::now();
567
569 renderOK = VirtualRobot::CoinVisualizationFactory::renderOffscreenRgbDepthPointcloud(
570 offscreenRenderer.get(),
573 width,
574 height,
575 true,
576 rgbBuffer,
577 true,
579 true,
581 zNear,
582 zFar,
583 vertFov,
584 nanValue);
585
586 ARMARX_DEBUG << deactivateSpam(1) << "Offscreen rendering took: "
587 << (IceUtil::Time::now() - start).toMilliSecondsDouble() << " ms";
588 }
589 if (renderOK)
590 {
592 //get images + cloud
593 std::unique_lock lock(captureMutex);
594
595 auto start = std::chrono::high_resolution_clock::now();
596
597 auto randValuesSize = randValues.size();
598 int noiseOffset = noise > 0 ? rand() % randValuesSize : 0;
599
600 float* depthBufferEndOfRow = depthBuffer.data() + width - 1;
601 if (floatImageMode)
602 {
603 float* dFloatImageRow = dFloatImage->pixels;
604 for (int y = 0; y < height; y++)
605 {
606 for (int x = 0; x < width; x++)
607 {
608 float value = depthBufferEndOfRow[-x];
609 value = std::max(value, 0.0f);
610 value = std::min(value, static_cast<float>(0xffffff));
611 dFloatImageRow[x] = value;
612 }
613 dFloatImageRow += width;
614 depthBufferEndOfRow += width;
615 }
616 }
617 else
618 {
620 std::uint8_t* rgbImageRow = rgbImage->pixels;
621 std::uint8_t* rgbBufferEndOfRow = rgbBuffer.data() + (width - 1) * 3;
622 std::uint8_t* dImageRow = dImage->pixels;
623 for (int y = 0; y < height; ++y)
624 {
625 for (int x = 0; x < width; ++x)
626 {
627 memcpy(static_cast<void*>(&(rgbImageRow[x * 3])), //dest
628 static_cast<const void*>(&(rgbBufferEndOfRow[-x * 3])), //src
629 3 //count
630 );
631 float valueF = depthBufferEndOfRow[-x];
632 valueF = std::max(0.0f, valueF);
633 valueF = std::min(valueF, static_cast<float>(0xffffff));
634 std::uint32_t value = static_cast<std::uint32_t>(valueF);
635 memcpy(static_cast<void*>(&(dImageRow[x * 3])), //dest
636 static_cast<const void*>(&value), //src
637 3 //count
638 );
639 }
640 rgbImageRow += width * 3;
641 rgbBufferEndOfRow += width * 3;
642 dImageRow += width * 3;
643 depthBufferEndOfRow += width;
644 }
645 }
646
648 float* randValuesData = randValues.data();
649 std::uint8_t* rgbBufferEntry = rgbBuffer.data();
650 Eigen::Vector3f* inputRow = pointCloudBuffer.data();
651 PointT* outputEndOfRow = pointcloud->points.data() + (width - 1);
652 for (int y = 0; y < height; ++y)
653 {
655 for (int x = 0; x < width; ++x)
656 {
657 PointT& point = outputEndOfRow[-x];
658 Eigen::Vector3f pointCloudBufferPoint = inputRow[x];
659
660 int noiseIndex = (y * width + x) + noiseOffset;
661 float noiseX =
662 noise > 0.0f ? randValuesData[(noiseIndex) % randValuesSize] : 0.0f;
663 float noiseY =
664 noise > 0.0f ? randValuesData[(noiseIndex + 1) % randValuesSize] : 0.0f;
665 float noiseZ =
666 noise > 0.0f ? randValuesData[(noiseIndex + 2) % randValuesSize] : 0.0f;
667
668 point.x = pointCloudBufferPoint[0] + noiseX;
669 point.y = pointCloudBufferPoint[1] + noiseY;
670 point.z = pointCloudBufferPoint[2] + noiseZ;
671 point.r = rgbBufferEntry[0];
672 point.g = rgbBufferEntry[1];
673 point.b = rgbBufferEntry[2];
674
675 rgbBufferEntry += 3;
676 }
677 inputRow += width;
678 outputEndOfRow += width;
679 }
680
681 auto end = std::chrono::high_resolution_clock::now();
682 std::int64_t timeDiff =
683 std::chrono::duration_cast<std::chrono::microseconds>(end - start).count();
684 sumRenderTimes += timeDiff;
685 sumRenderTimesSquared += timeDiff * timeDiff;
686 renderTimesCount += 1;
687
688 if (renderTimesCount > 256)
689 {
691 double averageTime = sumRenderTimes * 1.0e-3 / renderTimesCount;
692 double averageSquaredTime = sumRenderTimesSquared * 1.0e-6 / renderTimesCount;
693 double stddevTime = std::sqrt(averageSquaredTime - averageTime * averageTime);
694
695 ARMARX_DEBUG << "Copying depth buffer data took: " << averageTime << "ms +- "
696 << stddevTime << "ms";
697
699 sumRenderTimes = 0;
702 }
703 }
704 return renderOK;
705 }
706
707 void
709 {
711 if (simVisu)
712 {
714 simVisu->synchronizeVisualizationData();
715 if (simVisu->getRobot(robotName))
716 {
718 cameraNode = simVisu->getRobot(robotName)->getRobotNode(camNodeName);
719 }
720 visualization = simVisu->getVisualization();
721 if (!cameraNode)
722 {
723 ARMARX_WARNING << "The camera node is not in the scene";
724 }
725 if (!visualization)
726 {
727 ARMARX_WARNING << "No physics visualization scene";
728 }
729 }
730 }
731
732} // namespace armarx
std::string timestamp()
SpamFilterDataPtr deactivateSpam(SpamFilterDataPtr const &spamFilter, float deactivationDurationSec, const std::string &identifier, bool deactivate)
Definition Logging.cpp:75
constexpr T c
static TPtr create(Ice::PropertiesPtr properties=Ice::createProperties(), const std::string &configName="", const std::string &configDomain="ArmarX")
Factory method for a component.
Definition Component.h:116
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition Component.cpp:90
Property< PropertyType > getProperty(const std::string &name)
std::string getReferenceFrame(const Ice::Current &c=Ice::emptyCurrent) override
bool hasSharedMemorySupport(const Ice::Current &c=Ice::emptyCurrent) override
void onInitComponent() override
Pure virtual hook for the subclass.
void onExitCapturingPointCloudProvider() override
This is called when the Component::onExitComponent() setup is called.
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c=Ice::emptyCurrent) override
std::chrono::high_resolution_clock::time_point lastUpdate
void onStartCapture(float frameRate) override
This is called when the point cloud provider capturing has been started.
void onInitCapturingPointCloudProvider() override
This is called when the Component::onInitComponent() is called.
bool getImagesAreUndistorted(const Ice::Current &c=Ice::emptyCurrent) override
void onConnectComponent() override
Pure virtual hook for the subclass.
void onStopCapture() override
This is called when the point cloud provider capturing has been stopped.
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
void onInitImageProvider() override
This is called when the Component::onInitComponent() is called.
void onExitImageProvider() override
This is called when the Component::onExitComponent() setup is called.
std::string getDefaultName() const override
Retrieve default name of component.
void offeringTopic(const std::string &name)
Registers a topic for retrival after initialization.
TopicProxyType getTopic(const std::string &name)
Returns a proxy of the specified topic.
std::string getName() const
Retrieve name of object.
ArmarXManagerPtr getArmarXManager() const
Returns the ArmarX manager used to add and remove components.
std::string prefix
Prefix of the properties such as namespace, domain, component name, etc.
PropertyDefinition< PropertyType > & defineOptionalProperty(const std::string &name, PropertyType defaultValue, const std::string &description="", PropertyDefinitionBase::PropertyConstness constness=PropertyDefinitionBase::eConstant)
Ice::PropertiesPtr getIceProperties() const
Returns the set of Ice properties.
static bool HasTimeServer()
check if we have been initialized with a Timeserver
Definition TimeUtil.cpp:124
void updateTimestamp(Ice::Long timestamp, bool threadSafe=true)
Updates the timestamp of the currently captured image.
void start(const Ice::Current &c) override
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
void setNumberImages(int numberImages)
Sets the number of images on each capture.
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
#define ARMARX_INFO
The normal logging level.
Definition Logging.h:181
#define ARMARX_ERROR_S
The logging level for unexpected behaviour, that must be fixed.
Definition Logging.h:216
#define ARMARX_ERROR
The logging level for unexpected behaviour, that must be fixed.
Definition Logging.h:196
#define ARMARX_VERBOSE_S
Definition Logging.h:207
#define ARMARX_DEBUG
The logging level for output that is only interesting while debugging.
Definition Logging.h:184
#define ARMARX_WARNING
The logging level for unexpected behaviour, but not a serious problem.
Definition Logging.h:193
This file offers overloads of toIce() and fromIce() functions for STL container types.
void handleExceptions()
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
visionx::types::Vec convertEigenVecToVisionX(Eigen::VectorXf v)
visionx::types::Mat convertEigenMatToVisionX(Eigen::MatrixXf m)
MonocularCalibration createDefaultMonocularCalibration()
Creates a MonocularCalibration with all parameters set to a neutral value.
ArmarX headers.
#define ARMARX_TRACE
Definition trace.h:77