ValveAttention.cpp
Go to the documentation of this file.
1 /*
2  * This file is part of ArmarX.
3  *
4  * ArmarX is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  *
8  * ArmarX is distributed in the hope that it will be useful, but
9  * WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11  * GNU General Public License for more details.
12  *
13  * You should have received a copy of the GNU General Public License
14  * along with this program. If not, see <http://www.gnu.org/licenses/>.
15  *
16  * @package VisionX::ArmarXObjects::ValveAttention
17  * @author Markus Grotz ( markus dot grotz at kit dot edu )
18  * @date 2016
19  * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
20  * GNU General Public License
21  */
22 
23 #include "ValveAttention.h"
24 
27 
28 
30 
31 
32 #include <Image/ImageProcessor.h>
33 #include <pcl/common/point_tests.h>
34 
35 namespace armarx
36 {
37 
38 
39 
41  {
44  }
45 
46 
47 
49  {
50  usingProxy(getProperty<std::string>("ViewSelectionName").getValue());
51  usingProxy(getProperty<std::string>("RobotStateComponentName").getValue());
52 
53  providerName = getProperty<std::string>("providerName").getValue();
54  usingImageProvider(providerName);
55  usingPointCloudProvider(providerName);
56 
57  std::string graphFileName = "RobotAPI/ViewSelection/graph40k.gra";
58 
59  armarx::CMakePackageFinder finder("RobotAPI");
61 
62  if (ArmarXDataPath::getAbsolutePath(graphFileName, graphFileName))
63  {
64  saliencyEgosphereGraph = new CIntensityGraph(graphFileName);
65  ARMARX_VERBOSE << "Created egosphere graph with " << saliencyEgosphereGraph->getNodes()->size() << "nodes";
66  graphLookupTable = new CGraphPyramidLookupTable(9, 18);
67  graphLookupTable->buildLookupTable(saliencyEgosphereGraph);
68  }
69  else
70  {
71  ARMARX_ERROR << "Could not find required graph file";
73  }
74 
75  headFrameName = getProperty<std::string>("HeadFrameName").getValue();
76  hog = new HoG();
77  hog->loadTrainingData(getProperty<std::string>("TrainingData").getValue());
78  hog->setParameters(true, true);
79  }
80 
82  {
83 
84  visionx::ImageType imageDisplayType = visionx::tools::typeNameToImageType("rgb");
85 
86  visionx::ImageProviderInfo imageProviderInfo = getImageProvider(providerName, imageDisplayType);
87 
88  numImages = imageProviderInfo.numberImages;
89 
90  images = new CByteImage*[numImages];
91  for (int i = 0 ; i < numImages; i++)
92  {
93  images[i] = visionx::tools::createByteImage(imageProviderInfo);
94  }
95 
96  numResultImages = 3;
97  result = new CByteImage*[numResultImages];
98  result[0] = images[0];
99  result[1] = visionx::tools::createByteImage(imageProviderInfo);
100  result[2] = visionx::tools::createByteImage(imageProviderInfo);
101 
102 
103  visionx::ImageDimension dimension(images[0]->width, images[0]->height);
104  enableResultImages(numResultImages, dimension, visionx::tools::typeNameToImageType("rgb"));
105 
106  robotStateComponent = getProxy<RobotStateComponentInterfacePrx>(getProperty<std::string>("RobotStateComponentName").getValue());
107 
109 
110  viewSelection = getProxy<ViewSelectionInterfacePrx>(getProperty<std::string>("ViewSelectionName").getValue());
111 
112  }
113 
115  {
116 
117  }
118 
120  {
121  for (int i = 0; i < numImages; i++)
122  {
123  delete images[i];
124  }
125  delete [] images;
126 
127  for (int i = 0; i < numResultImages; i++)
128  {
129  delete result[i];
130  }
131  delete [] result;
132 
133  delete graphLookupTable;
134  delete saliencyEgosphereGraph;
135  delete hog;
136  }
137 
139  {
140  ARMARX_IMPORTANT << "process";
141 
142  std::lock_guard<std::mutex> lock(mutex);
143 
144  pcl::PointCloud<pcl::PointXYZRGBA>::Ptr currentPointCloud(new pcl::PointCloud<pcl::PointXYZRGBA>());
145 
146 
147 
148  ARMARX_VERBOSE << "waiting for images";
149  if (!waitForImages(providerName))
150  {
151  ARMARX_WARNING << "Timeout while waiting for images";
152  return;
153  }
154 
155  ARMARX_VERBOSE << "waiting for pointcloud";
156  if (!waitForPointClouds(providerName))
157  {
158  ARMARX_WARNING << "Timeout while waiting for pointcloud";
159  return;
160  }
161 
162  ARMARX_VERBOSE << "getting images";
163  if (getImages(images) != numImages)
164  {
165  ARMARX_WARNING << "Unable to transfer or read images";
166  return;
167  }
168 
169  ARMARX_VERBOSE << "getting pointcloud";
170  if (!getPointClouds(currentPointCloud))
171  {
172  ARMARX_WARNING << "Unable to transfer or read point cloud";
173  return;
174  }
175 
176 
177  RemoteRobot::synchronizeLocalClone(robot, robotStateComponent);
178 
179  saliencyEgosphereGraph->set(0.0);
180 
181  const int scaleFactor = 4; //images[0]->width / 255;
182 
183  int width = images[0]->width / scaleFactor;
184  int height = images[0]->height / scaleFactor;
185  CByteImage* image = new CByteImage(width, height, CByteImage::eRGB24);
186  ::ImageProcessor::Resize(images[0], image);
187 
188  ARMARX_VERBOSE << "downscaling image to: " << width << "x" << height;
189 
190  //CByteImage* saliencyImage = result[0]; //new CByteImage(width, height, CByteImage::eRGB24);
191  CFloatImage* saliencyImage = new CFloatImage(width, height, 1);
192 
193 
194  ARMARX_VERBOSE << "computing saliency map";
195  IceUtil::Time startTime = TimeUtil::GetTime();
196 
197  hog->findSalientRegions(image, saliencyImage);
198  //*************************test for Cluster**************************/
199  std::vector<cv::Point2f> points;
200  cv::Mat outputLable;
201  cv::Mat centers;
202  std::map<int, int> saliencyHist;
203  float priorityProcent = 0.9;
204  int clusterNum = 3;
205  int intensityThreshold = 255;
206  int saliencySum = 0;
207  int saliencyImageWidth = saliencyImage->width;
208 
209  for (int i = 0; i < saliencyImage->height; i++)
210  {
211  for (int j = 0; j < saliencyImageWidth; j++)
212  {
213  saliencyHist[(int)(saliencyImage->pixels[i * saliencyImageWidth + j] * 255.0)]++;
214  }
215  }
216 
217  // determine 90% number of non-zero salient values
218  int usefulSampleAmount = priorityProcent * (saliencyImage->width * saliencyImage->height - saliencyHist[0]);
219  for (int i = 1; i <= 255; i++)
220  {
221  if (!saliencyHist.count(i))
222  {
223  continue;
224  }
225  saliencySum += saliencyHist[i];
226  if (saliencySum > usefulSampleAmount)
227  {
228  intensityThreshold = i;
229  break;
230  }
231  }
232 
233  for (int i = 0; i < saliencyImage->height; i++)
234  {
235  for (int j = 0; j < saliencyImageWidth; j++)
236  {
237  if (saliencyImage->pixels[i * saliencyImageWidth + j] * 255.0 > intensityThreshold)
238  {
239  points.push_back(cv::Point2f(j, i));
240  }
241  }
242  }
243 
244  cv::Mat samples(points.size(), 2, CV_32F);
245  for (size_t i = 0; i < points.size(); i++)
246  {
247  samples.at<float>(i, 0) = points[i].x;
248  samples.at<float>(i, 1) = points[i].y;
249  }
250 
251  if (points.size())
252  {
253  cv::kmeans(samples, clusterNum, outputLable, cv::TermCriteria(cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 20, 0.1), 10, cv::KMEANS_PP_CENTERS, centers);
254  }
255 
256  // calculate the number of elements in each cluster and get first two biggest cluster
257  std::map<int, int> numberOfEachCluster;
258  for (int i = 0; i < samples.rows; i++)
259  {
260  numberOfEachCluster[outputLable.at<int>(i, 0)]++;
261  }
262  size_t maxCluster = 0;
263  int secondCluster;
264  for (size_t i = 0; i < numberOfEachCluster.size(); i++)
265  {
266  if (numberOfEachCluster[i] > numberOfEachCluster[maxCluster])
267  {
268  maxCluster = i;
269  }
270  }
271  if (maxCluster != 0)
272  {
273  secondCluster = 0;
274  }
275  else
276  {
277  secondCluster = 1;
278  }
279  for (size_t i = 0; i < numberOfEachCluster.size(); i++)
280  {
281  if ((i != maxCluster) && numberOfEachCluster[i] > numberOfEachCluster[secondCluster])
282  {
283  secondCluster = i;
284  }
285  }
286 
287  //*************************test for Cluster END**************************/
288 
289  ARMARX_VERBOSE << "done computing saliency map. took " << (TimeUtil::GetTime() - startTime).toMilliSeconds() << "ms";
290 
291  delete image;
292 
293  #pragma omp parallel for
294  for (int i = 0; i < images[0]->height; i++)
295  {
296  for (int j = 0; j < images[0]->width; j++)
297  {
298  int idx = i * images[0]->width + j;
299 
300  float saliency = saliencyImage->pixels[(i / scaleFactor * width) + (j / scaleFactor)];
301 
302  result[1]->pixels[3 * idx + 0] = 255 * saliency;
303  result[1]->pixels[3 * idx + 1] = 255 * saliency;
304  result[1]->pixels[3 * idx + 2] = 255 * saliency;
305 
306  result[2]->pixels[3 * idx + 0] = (0.2 + 0.8 * saliency) * result[0]->pixels[3 * idx + 0];
307  result[2]->pixels[3 * idx + 1] = (0.2 + 0.8 * saliency) * result[0]->pixels[3 * idx + 1];
308  result[2]->pixels[3 * idx + 2] = (0.2 + 0.8 * saliency) * result[0]->pixels[3 * idx + 2];
309 
310  }
311  }
312 
313 
314 
315  /*********transform cluster daten (the points near cluster centers) from pointcloud to sphere **********/
316  /*
317  for (int i = 0; i < samples.rows; i++)
318  {
319  int downScaleIdx = ((int)samples.at<float>(i, 1) * width) + ((int)samples.at<float>(i, 0));
320  int idx = (int)samples.at<float>(i, 1) * scaleFactor * images[0]->width + (int)samples.at<float>(i, 0) * scaleFactor;
321  float saliency = saliencyImage->pixels[downScaleIdx];
322  int clusterLabel = outputLable.at<int>(i, 0);
323  float distanceToClusterCenter = sqrt(pow((centers.at<float>(clusterLabel, 0) - samples.at<float>(i, 0)), 2) + pow((centers.at<float>(clusterLabel, 1) - samples.at<float>(i, 1)), 2));
324 
325  if (saliency > 0 && (distanceToClusterCenter < (width / 5)))
326  {
327  if (idx >= (int)currentPointCloud->points.size())
328  {
329  continue;
330  }
331 
332  Eigen::Vector3f vec = currentPointCloud->points[idx].getVector3fMap();
333 
334  if (!pcl::isFinite(currentPointCloud->points[idx]))
335  {
336  continue;
337  }
338  TSphereCoord positionInSphereCoordinates;
339  //armarx::GlobalFrame
340  FramedPositionPtr currentViewTarget = new FramedPosition(vec, "DepthCamera", robotStateComponent->getSynchronizedRobot()->getName());
341  currentViewTarget->changeFrame(robot, headFrameName);
342 
343  MathTools::convert(currentViewTarget->toEigen(), positionInSphereCoordinates);
344  int closestNodeIndex = graphLookupTable->getClosestNode(positionInSphereCoordinates);
345  CIntensityNode* currentNode = ((CIntensityNode*)saliencyEgosphereGraph->getNodes()->at(closestNodeIndex));
346 
347  float currentSaliency = currentNode->getIntensity();
348 
349  // give priority to the cluster which has the most elements, because this cluster always near valve
350  if (clusterLabel == maxCluster)
351  {
352  //consider about history? & predict --> consider about observation & predict
353  currentSaliency = std::min(1.0f, currentSaliency + saliency * (float)1.3);
354 
355  //consider only about predict
356  //currentSaliency = std::min(1.0f, saliency * (float)1.3);
357  //std::cout << "currentSaliency!! " << currentSaliency << std::endl;
358  }
359  else
360  {
361  if (outputLable.at<int>(i, 0) == secondCluster)
362  {
363  currentSaliency = std::min(1.0f, currentSaliency + saliency * (float)1.02);
364  //currentSaliency = std::min(1.0f, saliency * (float)1.02);
365  }
366  }
367  currentNode->setIntensity(currentSaliency);
368  }
369  }
370 
371  */
372  /*********transform cluster daten from pointcloud to sphere ***** END*****/
373 
374  /*********transform cluster centers from pointcloud to sphere ***** *****/
375  for (int i = 0; i < centers.rows; i++)
376  {
377  int downScaleIdx = ((int)centers.at<float>(i, 1) * width) + ((int)centers.at<float>(i, 0));
378  int idx = (int)centers.at<float>(i, 1) * scaleFactor * images[0]->width + (int)centers.at<float>(i, 0) * scaleFactor;
379  //attention: center's saliency may shouldn't directly get from saliency map, because it's saliency can be low
380  float saliency = saliencyImage->pixels[downScaleIdx];
381 
382  if (idx >= (int)currentPointCloud->points.size())
383  {
384  continue;
385  }
386 
387  Eigen::Vector3f vec = currentPointCloud->points[idx].getVector3fMap();
388 
389  if (!pcl::isFinite(currentPointCloud->points[idx]))
390  {
391  continue;
392  }
393  TSphereCoord positionInSphereCoordinates;
394  //armarx::GlobalFrame
395  FramedPositionPtr currentViewTarget = new FramedPosition(vec, "DepthCamera", robotStateComponent->getSynchronizedRobot()->getName());
396  currentViewTarget->changeFrame(robot, headFrameName);
397 
398  MathTools::convert(currentViewTarget->toEigen(), positionInSphereCoordinates);
399  int closestNodeIndex = graphLookupTable->getClosestNode(positionInSphereCoordinates);
400 
401  float halfCameraOpeningAngle = 12.0 * M_PI / 180.0;
402  float modifiedHalfCameraOpeningAngle = halfCameraOpeningAngle;
403  float distance = currentViewTarget->toEigen().norm();
404 
405 
406  if (distance > 6000)
407  {
408  ARMARX_WARNING << "position is too far away." << distance;
409  return;
410  }
411 
412  const float distanceThreshold = 1500;
413  if (distance < distanceThreshold)
414  {
415  modifiedHalfCameraOpeningAngle = (distance - 0.1f * distanceThreshold) / (0.9f * distanceThreshold) * halfCameraOpeningAngle;
416  modifiedHalfCameraOpeningAngle = std::max(0.0f, modifiedHalfCameraOpeningAngle);
417  }
418 
419 
420  std::vector<bool> visitedNodes(saliencyEgosphereGraph->getNodes()->size(), false);
421  addSaliencyRecursive(closestNodeIndex, visitedNodes, saliency, positionInSphereCoordinates, modifiedHalfCameraOpeningAngle);
422 
423  }
424  /*********transform cluster centers from pointcloud to sphere ***** END*****/
425 
426  //*************************test for Cluster**************************/
427  int index;
428  for (std::size_t row = 0; centers.rows > 0 && row < static_cast<std::size_t>(centers.rows); row++)
429  {
430  for (std::size_t i = 0; i < 8; i++)
431  {
432  for (std::size_t j = 0; j < 8; j++)
433  {
434  index = 3 * (((int)centers.at<float>(row, 1) * scaleFactor + i) * images[0]->width + ((int)centers.at<float>(row, 0) * scaleFactor + j));
435  result[2]->pixels[index] = 255;
436  result[2]->pixels[index + 1] = 255 * (row == maxCluster);
437  result[2]->pixels[index + 2] = 255 * (row == static_cast<std::size_t>(secondCluster));
438  }
439  }
440 
441  }
442 
443  //*************************test for Cluster END**************************/
444 
445  ARMARX_VERBOSE << "mapping took !!!!! " << (TimeUtil::GetTime() - startTime).toMilliSeconds() << "ms";
446 
447  provideResultImages(result);
448 
449  SaliencyMapBasePtr primitiveSaliency = new SaliencyMapBase();
450  primitiveSaliency->name = "ValveAttention";
451  saliencyEgosphereGraph->graphToVec(primitiveSaliency->map);
452  viewSelection->updateSaliencyMap(primitiveSaliency);
453 
454  ARMARX_IMPORTANT << "process finished";
455  }
456 
457 
458  void ValveAttention::addSaliencyRecursive(const int currentNodeIndex, std::vector<bool>& visitedNodes, const float saliency, const TSphereCoord objectSphereCoord, const float maxDistanceOnArc)
459  {
460 
461  // distance on arc between object projection center and node,
462  // normalized by the maximal viewing angle of the camera (=> in [0,1])
463  float normalizedDistance = MathTools::getDistanceOnArc(objectSphereCoord, saliencyEgosphereGraph->getNodes()->at(currentNodeIndex)->getPosition()) / maxDistanceOnArc;
464 
465  // increase value of node
466  float newValue = ((CIntensityNode*)saliencyEgosphereGraph->getNodes()->at(currentNodeIndex))->getIntensity()
467  + (1.0f - 0.5f * normalizedDistance * normalizedDistance) * saliency;
468 
469  if (newValue > 1.0)
470  {
471  ARMARX_INFO << "saliency is greater than 1.0: " << newValue;
472  newValue = std::min(1.0f, newValue);
473  }
474 
475  ((CIntensityNode*)saliencyEgosphereGraph->getNodes()->at(currentNodeIndex))->setIntensity(newValue);
476 
477  // mark node as visited for this object
478  visitedNodes.at(currentNodeIndex) = true;
479 
480  // recurse on neighbours if they were not yet visited and close enough to the object projection center
481  int neighbourIndex;
482 
483  for (size_t i = 0; i < saliencyEgosphereGraph->getNodeAdjacency(currentNodeIndex)->size(); i++)
484  {
485  neighbourIndex = saliencyEgosphereGraph->getNodeAdjacency(currentNodeIndex)->at(i);
486 
487  if (!visitedNodes.at(neighbourIndex))
488  {
489  if (MathTools::getDistanceOnArc(objectSphereCoord, saliencyEgosphereGraph->getNodes()->at(neighbourIndex)->getPosition()) <= maxDistanceOnArc)
490  {
491  addSaliencyRecursive(neighbourIndex, visitedNodes, saliency, objectSphereCoord, maxDistanceOnArc);
492  }
493  else
494  {
495  visitedNodes.at(neighbourIndex) = true;
496  }
497  }
498  }
499  }
500 }
RemoteRobot.h
ARMARX_VERBOSE
#define ARMARX_VERBOSE
Definition: Logging.h:180
armarx::eFull
@ eFull
Definition: MorphingItem.h:35
CIntensityNode
Definition: IntensityGraph.h:25
ARMARX_IMPORTANT
#define ARMARX_IMPORTANT
Definition: Logging.h:183
visionx::ImageProviderInfo::numberImages
int numberImages
Number of images.
Definition: ImageProcessor.h:506
index
uint8_t index
Definition: EtherCATFrame.h:59
armarx::RemoteRobot::synchronizeLocalClone
static bool synchronizeLocalClone(VirtualRobot::RobotPtr robot, RobotStateComponentInterfacePrx robotStatePrx)
Definition: RemoteRobot.cpp:448
armarx::RemoteRobot::createLocalCloneFromFile
static VirtualRobot::RobotPtr createLocalCloneFromFile(RobotStateComponentInterfacePrx robotStatePrx, VirtualRobot::RobotIO::RobotDescription loadMode=VirtualRobot::RobotIO::eFull)
This is a convenience function for createLocalClone, which automatically gets the filename from the R...
Definition: RemoteRobot.cpp:441
armarx::ValveAttention::onConnectPointCloudAndImageProcessor
void onConnectPointCloudAndImageProcessor() override
Implement this method in your PointCloudAndImageProcessor in order execute parts when the component i...
Definition: ValveAttention.cpp:81
HoG
Definition: hog.h:48
armarx::CMakePackageFinder
The CMakePackageFinder class provides an interface to the CMake Package finder capabilities.
Definition: CMakePackageFinder.h:53
visionx::tools::createByteImage
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.
visionx::ImageProviderInfo
Definition: ImageProcessor.h:466
IceInternal::Handle< FramedPosition >
CGraphPyramidLookupTable
Definition: GraphPyramidLookupTable.h:22
CIntensityGraph
Definition: IntensityGraph.h:51
M_PI
#define M_PI
Definition: MathTools.h:17
armarx::ValveAttention::onInitPointCloudAndImageProcessor
void onInitPointCloudAndImageProcessor() override
Setup the vision component.
Definition: ValveAttention.cpp:48
armarx::CMakePackageFinder::getDataDir
std::string getDataDir() const
Definition: CMakePackageFinder.h:176
EPS
#define EPS
Definition: gdiam.cpp:2021
max
T max(T t1, T t2)
Definition: gdiam.h:48
ARMARX_ERROR
#define ARMARX_ERROR
Definition: Logging.h:189
armarx::ValveAttention::onDisconnectPointCloudAndImageProcessor
void onDisconnectPointCloudAndImageProcessor() override
Implement this method in the PointCloudAndImageProcessor in order to execute parts when the component...
Definition: ValveAttention.cpp:114
armarx::armem::Time
armarx::core::time::DateTime Time
Definition: forward_declarations.h:13
CSphericalGraph::getNodeAdjacency
std::vector< int > * getNodeAdjacency(int nIndex)
Definition: SphericalGraph.cpp:177
armarx::TimeUtil::GetTime
static IceUtil::Time GetTime(TimeMode timeMode=TimeMode::VirtualTime)
Get the current time.
Definition: TimeUtil.cpp:42
armarx::Component::getConfigIdentifier
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition: Component.cpp:74
CMakePackageFinder.h
armarx::ValveAttention::process
void process() override
Process the vision component.
Definition: ValveAttention.cpp:138
ARMARX_INFO
#define ARMARX_INFO
Definition: Logging.h:174
armarx::ArmarXDataPath::addDataPaths
static void addDataPaths(const std::string &dataPathList)
Definition: ArmarXDataPath.cpp:559
IceUtil::Handle< class PropertyDefinitionContainer >
visionx::tools::typeNameToImageType
ImageType typeNameToImageType(const std::string &imageTypeName)
Converts an image type name as string into an ImageType integer.
Definition: TypeMapping.cpp:42
CSphericalGraph::getNodes
TNodeList * getNodes()
Definition: SphericalGraph.cpp:162
distance
double distance(const Point &a, const Point &b)
Definition: point.hpp:88
armarx::ArmarXDataPath::getAbsolutePath
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
Definition: ArmarXDataPath.cpp:111
armarx::ValveAttention::createPropertyDefinitions
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
Definition: ValveAttention.cpp:40
armarx::VariantType::FramedPosition
const VariantTypeId FramedPosition
Definition: FramedPose.h:39
min
T min(T t1, T t2)
Definition: gdiam.h:42
armarx::handleExceptions
void handleExceptions()
Definition: Exception.cpp:141
ARMARX_WARNING
#define ARMARX_WARNING
Definition: Logging.h:186
armarx::PropertyDefinitionsPtr
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
Definition: forward_declarations.h:34
ValveAttention.h
MathTools::getDistanceOnArc
static float getDistanceOnArc(TSphereCoord p1, TSphereCoord p2)
Definition: MathTools.cpp:277
ArmarXDataPath.h
TSphereCoord
Definition: Structs.h:22
MathTools::convert
static void convert(TSphereCoord in, Eigen::Vector3d &out)
Definition: MathTools.cpp:401
armarx
This file offers overloads of toIce() and fromIce() functions for STL container types.
Definition: ArmarXTimeserver.cpp:28
armarx::ValveAttentionPropertyDefinitions
Definition: ValveAttention.h:61
armarx::ValveAttention::onExitPointCloudAndImageProcessor
void onExitPointCloudAndImageProcessor() override
Exit the ImapeProcessor component.
Definition: ValveAttention.cpp:119