RGBDHandLocalizer.cpp
Go to the documentation of this file.
1/*
2 * This file is part of ArmarX.
3 *
4 * Copyright (C) 2011-2016, High Performance Humanoid Technologies (H2T), Karlsruhe Institute of Technology (KIT), all rights reserved.
5 *
6 * ArmarX is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * ArmarX is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
17 *
18 * @package VisionX::ArmarXObjects::RGBDHandLocalizer
19 * @author Markus Grotz ( markus dot grotz at kit dot edu )
20 * @date 2015
21 * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
22 * GNU General Public License
23 */
24
25#include "RGBDHandLocalizer.h"
26
27namespace visionx
28{
29
36
37 void
39 {
40 usingProxy(getProperty<std::string>("RobotStateComponentProxyName").getValue());
41
42 providerName = getProperty<std::string>("providerName").getValue();
43
44
45 sensorFrameName = getProperty<std::string>("SensorFrameName").getValue();
46 markerFrameName = getProperty<std::string>("MarkerName").getValue();
47 handFrameName = getProperty<std::string>("HandNameInRobotModel").getValue();
48
49
50 usingImageProvider(providerName);
51 usingPointCloudProvider(providerName);
52 offeringTopic(getProperty<std::string>("DebugDrawerTopicName").getValue());
53 }
54
55 void
57 {
58 ImageType imageDisplayType = visionx::tools::typeNameToImageType("rgb");
59
60 ImageProviderInfo imageProviderInfo = getImageProvider(providerName, imageDisplayType);
61
62 this->numImages = imageProviderInfo.numberImages;
63
64 if (numImages < 1 || numImages > 2)
65 {
66 ARMARX_FATAL << "invalid number of images. aborting";
67 return;
68 }
69
70 images = new CByteImage*[numImages];
71
72 for (int i = 0; i < numImages; i++)
73 {
74 images[i] = visionx::tools::createByteImage(imageProviderInfo);
75 }
76
77 result = new CByteImage*[numImages];
78
80 getProperty<std::string>("RobotStateComponentProxyName").getValue());
81
82 ImageDimension dimension(images[0]->width, images[0]->height);
83 enableResultImages(numImages, dimension, visionx::tools::typeNameToImageType("rgb"));
86 this->debugDrawerTopic = getTopic<armarx::DebugDrawerInterfacePrx>(
87 getProperty<std::string>("DebugDrawerTopicName").getValue());
88 }
89
90 void
92 {
93 this->debugDrawerTopic->clearLayer("RGBDHandLocalizer");
94 }
95
96 void
98 {
99 for (int i = 0; i < numImages; i++)
100 {
101 delete images[i];
102 }
103 delete[] images;
104
105 for (int i = 0; i < numImages; i++)
106 {
107 delete result[i];
108 }
109 delete[] result;
110 }
111
112 void
114 {
115
116 if (!waitForImages(2000))
117 {
118 ARMARX_WARNING << "Timeout or error while waiting for image data";
119 return;
120 }
121 if (getImages(images) != numImages)
122 {
123 ARMARX_WARNING << "Unable to transfer or read images";
124 return;
125 }
126 if (!waitForPointClouds(2000))
127 {
128 ARMARX_WARNING << "Can't read Pointcloud.";
129 return;
130 }
131
132 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr currentPointCloud(
133 new pcl::PointCloud<pcl::PointXYZRGBA>());
134 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr filteredPointCloud(
135 new pcl::PointCloud<pcl::PointXYZRGBA>());
136 armarx::SharedRobotInterfacePrx currentRobot = this->robotStatePrx->getSynchronizedRobot();
137 std::vector<Eigen::Vector3f> medianCoordinates;
138
139
140 getPointClouds(currentPointCloud);
141
142 if (!currentPointCloud->isOrganized())
143 {
144 ARMARX_WARNING << "Pointcloud is not organized.";
145 return;
146 }
147
148
149 IplImage* iplImage = IplImageAdaptor::Adapt(images[0]);
150 cv::Mat RGBImage = cv::cvarrToMat(iplImage);
151
152 cv::Mat HSVImage;
153 cv::cvtColor(RGBImage, HSVImage, cv::COLOR_RGB2HSV);
154
155 cv::Mat mask;
156 // todo move to onInit..-method
157 cv::Scalar minColor(getProperty<int>("hueMin").getValue(),
158 getProperty<int>("satMin").getValue(),
159 getProperty<int>("valMin").getValue());
160
161 cv::Scalar maxColor(getProperty<int>("hueMax").getValue(),
162 getProperty<int>("satMax").getValue(),
163 getProperty<int>("valMax").getValue());
164 cv::inRange(HSVImage, minColor, maxColor, mask);
165
166 //opening
167 cv::erode(mask, mask, getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(6, 6)));
168 cv::dilate(mask, mask, getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(6, 6)));
169 //closing
170 cv::dilate(mask, mask, getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(3, 3)));
171 cv::erode(mask, mask, getStructuringElement(cv::MORPH_ELLIPSE, cv::Size(3, 3)));
172
173
174 //ARMARX_INFO << "Hand name in robot model:" << handFrameName;
175
176
177 armarx::FramedPositionPtr kinematicMarkerPosition = new armarx::FramedPosition(
178 Eigen::Vector3f(0, 0, 0), markerFrameName, currentRobot->getName());
180 armarx::FramedPositionPtr::dynamicCast(kinematicMarkerPosition->clone());
181 copy->changeFrame(currentRobot, sensorFrameName);
182 Eigen::Vector3f guessMM = copy->toEigen();
183 this->cropFilter(currentPointCloud,
184 filteredPointCloud,
185 getProperty<float>("uncertaintyMM").getValue(),
186 guessMM);
187
188 provideResultPointClouds(filteredPointCloud, "Filtered_Cloud");
189
190
191 this->calcPositions(
192 mask, filteredPointCloud, medianCoordinates); //Here the calculation happens
193
194 armarx::FramedPositionPtr kinematicHandPosition = new armarx::FramedPosition(
195 Eigen::Vector3f(0, 0, 0), handFrameName, currentRobot->getName());
196 kinematicHandPosition->changeFrame(currentRobot, markerFrameName);
198 kinematicHandPosition->toEigen(), markerFrameName, currentRobot->getName());
199
200 VirtualRobot::RobotPtr sharedRobot(new armarx::RemoteRobot(currentRobot));
201 offsetToHand->changeFrame(sharedRobot, sensorFrameName);
202
203 //Choose the best blob
204 if (medianCoordinates.size() > 0)
205 {
206 float distance = std::numeric_limits<float>::max();
208 for (Eigen::Vector3f currentMedian : medianCoordinates)
209 {
210 armarx::FramedPosition currentPos(
211 currentMedian, sensorFrameName, currentRobot->getName());
212
213 float currentDist = (kinematicMarkerPosition->toEigen() - currentMedian).norm();
214 if (currentDist < distance)
215 {
216 min = currentPos;
217 distance = currentDist;
218 }
219 }
220
221
223 Eigen::Vector3f handPositionEigen = markerPosition->toEigen() + offsetToHand->toEigen();
224
225
226 if (getProperty<bool>("PrimitiveCalibrationOnStart").getValue())
227 {
228 int numCalibrations = 20;
229 if (calibrationCounter < numCalibrations)
230 {
232 armarx::FramedPositionPtr::dynamicCast(kinematicHandPosition->clone());
233 kinCopy->changeFrame(currentRobot, sensorFrameName);
234 Eigen::Vector3f diff = kinCopy->toEigen() - handPositionEigen;
235 this->calibrationSum = this->calibrationSum + diff;
236 calibrationCounter++;
237 }
238 else
239 {
240 handPositionEigen = handPositionEigen + (calibrationSum / numCalibrations);
241 }
242 }
243
244
246 handPositionEigen, sensorFrameName, currentRobot->getName());
247
248
249 armarx::FramedPosePtr handRootPose = armarx::FramedPosePtr::dynamicCast(
250 currentRobot->getRobotNode(handFrameName)->getPoseInRootFrame());
251
252
253 armarx::FramedOrientationPtr handOrientation = handRootPose->getOrientation();
254 handOrientation->changeFrame(currentRobot, sensorFrameName);
255
256 std::scoped_lock lock(positionLock);
257
258 this->realHandPose = new armarx::FramedPose(handOrientation->toEigen(),
259 realHandPosition->toEigen(),
260 sensorFrameName,
261 currentRobot->getName());
262
263 this->timestamp = armarx::TimestampVariant::nowPtr();
264
265
266 this->debugDrawerTopic->setPoseVisu("RGBDHandLocalizer",
267 "Real_Hand_Position",
268 this->realHandPose->toGlobal(currentRobot));
269 this->debugDrawerTopic->setPoseVisu("RGBDHandLocalizer",
270 "Kinematic_Hand_Position",
271 handRootPose->toGlobal(currentRobot));
272 this->debugDrawerTopic->setPoseVisu(
273 "RGBDHandLocalizer",
274 "DepthCamera",
275 currentRobot->getRobotNode(sensorFrameName)->getGlobalPose());
276 this->debugDrawerTopic->setSphereVisu(
277 "RGBDHandLocalizer",
278 "MarkerPos_Sensor",
279 new armarx::Vector3(markerPosition->toGlobalEigen(currentRobot)),
280 armarx::DrawColor{1, 1, 0, 0.5},
281 20.0f);
282 this->debugDrawerTopic->setSphereVisu(
283 "RGBDHandLocalizer",
284 "MarkerPos_Kinematic",
285 new armarx::Vector3(kinematicMarkerPosition->toGlobalEigen(currentRobot)),
286 armarx::DrawColor{1, 0, 1, 0.5},
287 20.0f);
288 }
289 else
290 {
291 ARMARX_WARNING << "No blob found";
292 }
293
294
295 //Provide the segmented Image as Result
296
297
298 IplImage* img = cvCreateImage(cvSize(640, 480), IPL_DEPTH_8U, 3);
299 cv::Mat outMat = cv::cvarrToMat(img);
300 cv::cvtColor(mask, outMat, cv::COLOR_GRAY2RGB);
301
302 result[1] = images[0];
303 result[0] = IplImageAdaptor::Adapt(img);
304 provideResultImages(result);
305 cvReleaseImage(&img);
306 outMat.release();
307 //ARMARX_VERBOSE << "process took: " << (IceUtil::Time::now() - start).toMilliSecondsDouble() << " milliseconds.";
308 }
309
310 void
311 RGBDHandLocalizer::calcPositions(cv::Mat& binaryImage,
312 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pointcloud,
313 std::vector<Eigen::Vector3f>& medianCoordinates)
314 {
315 medianCoordinates.clear();
316 BlobLabeler labeler;
317 cv::Mat labeledImage;
318 std::vector<std::vector<cv::Point>> blobList;
319 labeler.labelBlobs(binaryImage, labeledImage, blobList);
320
321 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr outputCloud(
322 new pcl::PointCloud<pcl::PointXYZRGBA>());
323
324 for (std::vector<cv::Point> blob : blobList)
325 {
326 Eigen::Vector3f sum = Eigen::Vector3f::Zero();
327
328 int numberOfPixels = 0;
329 for (cv::Point picturePoint : blob)
330 {
331 pcl::PointXYZRGBA point3D = pointcloud->at(picturePoint.x, picturePoint.y);
332 bool point3Dvalid = std::isfinite(point3D.x) && std::isfinite(point3D.y) &&
333 std::isfinite(point3D.z);
334
335 if (point3Dvalid)
336 {
337 outputCloud->push_back(point3D);
338
339 Eigen::Vector3f vector;
340 vector << point3D.x, point3D.y, point3D.z;
341 sum = sum + vector;
342 numberOfPixels++;
343 }
344 }
345
346 if (numberOfPixels != 0)
347 {
348 Eigen::Vector3f median = sum / numberOfPixels;
349 float markerRadius = getProperty<float>("MarkerRadiusMM").getValue();
350 median = median + markerRadius * median.normalized();
351 medianCoordinates.push_back(median);
352 }
353 }
354 labeledImage.release();
356 provideResultPointClouds(outputCloud, "MarkerCloud");
357 }
358
359 void
360 RGBDHandLocalizer::cropFilter(pcl::PointCloud<pcl::PointXYZRGBA>::Ptr input,
361 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr output,
362 float uncertaintyMM,
363 Eigen::Vector3f guessMM)
364 {
365
366 //Guesses come from the armarx Framework and so are measured in mm.
367
368
369 //Crop the pointcloud
370 this->cropper.setInputCloud(input);
371 this->cropper.setKeepOrganized(true);
372 this->cropper.setMax(Eigen::Vector4f(uncertaintyMM, uncertaintyMM, uncertaintyMM, 1));
373 this->cropper.setMin(Eigen::Vector4f(-uncertaintyMM, -uncertaintyMM, -uncertaintyMM, 1));
374 this->cropper.setTranslation(guessMM);
375 this->cropper.setRotation(Eigen::Vector3f::Zero());
376 this->cropper.filter(*output);
377 }
378
379 memoryx::ObjectLocalizationResultList
380 RGBDHandLocalizer::localizeObjectClasses(const memoryx::ObjectClassNameList& classNames,
381 const Ice::Current&)
382 {
383
384 memoryx::ObjectLocalizationResultList resultList;
385
386
387 if (classNames.size() == 1)
388 {
389 if (classNames.at(0) == getProperty<std::string>("HandNameInMemory").getValue())
390 {
391 if (this->realHandPose)
392 {
393
394 std::scoped_lock lock(positionLock);
395
396 memoryx::ObjectLocalizationResult localizationResult;
397
398 localizationResult.objectClassName =
399 getProperty<std::string>("HandNameInMemory").getValue();
400 localizationResult.timeStamp = this->timestamp;
401 localizationResult.orientation = this->realHandPose->getOrientation();
402 localizationResult.position = this->realHandPose->getPosition();
403 localizationResult.recognitionCertainty = 1.0;
404 Eigen::Vector3f mean = Eigen::Vector3f::Zero();
405 Eigen::Matrix3f covar = Eigen::Matrix3f::Identity() * 10000;
406 memoryx::MultivariateNormalDistributionBasePtr dummy =
408 localizationResult.positionNoise = dummy;
409
410
411 resultList.push_back(localizationResult);
412 ARMARX_INFO << "Hand found at: " << localizationResult.position->output();
413 }
414 else
415 {
416 ARMARX_WARNING << "Hand not found";
417 }
418 }
419 else
420 {
421 ARMARX_WARNING << "Wrong classname requested";
422 }
423 }
424 else
425 {
426 ARMARX_WARNING << "More than one classname requested";
427 }
428
429 return resultList;
430 }
431} // namespace visionx
void labelBlobs(cv::Mat &binaryImage, cv::Mat &outputImage, std::vector< std::vector< cv::Point > > &blobList)
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition Component.cpp:90
Property< PropertyType > getProperty(const std::string &name)
FramedDirection is a 3 dimensional direction vector with a reference frame.
Definition FramedPose.h:87
The FramedPose class.
Definition FramedPose.h:281
The FramedPosition class.
Definition FramedPose.h:158
void offeringTopic(const std::string &name)
Registers a topic for retrival after initialization.
TopicProxyType getTopic(const std::string &name)
Returns a proxy of the specified topic.
bool usingProxy(const std::string &name, const std::string &endpoints="")
Registers a proxy for retrieval after initialization and adds it to the dependency list.
Ice::ObjectPrx getProxy(long timeoutMs=0, bool waitForScheduler=true) const
Returns the proxy of this object (optionally it waits for the proxy)
Mimics the behaviour of the VirtualRobot::Robot class while redirecting everything to an Ice proxy.
static TimestampVariantPtr nowPtr()
The Vector3 class.
Definition Pose.h:113
The MultivariateNormalDistribution class.
void enableResultImages(int numberImages, ImageDimension imageDimension, ImageType imageType, const std::string &name="")
Enables visualization.
void usingImageProvider(std::string name)
Registers a delayed topic subscription and a delayed provider proxy retrieval which all will be avail...
bool waitForImages(int milliseconds=1000)
Wait for new images.
ImageProviderInfo getImageProvider(std::string name, ImageType destinationImageType=eRgb, bool waitForProxy=false)
Select an ImageProvider.
int getImages(CByteImage **ppImages)
Poll images from provider.
void provideResultImages(CByteImage **images, armarx::MetaInfoSizeBasePtr info=nullptr)
sends result images for visualization
int numberImages
Number of images.
void enableResultPointClouds(std::string resultProviderName="")
Enables visualization.
int getPointClouds(const PointCloudPtrT &pointCloudPtr)
Poll PointClouds from provider.
void usingPointCloudProvider(std::string providerName)
Registers a delayed topic subscription and a delayed provider proxy retrieval which will be available...
bool waitForPointClouds(int milliseconds=1000)
Wait for new PointClouds.
void provideResultPointClouds(const PointCloudPtrT &pointClouds, std::string providerName="")
sends result PointClouds for visualization
void onDisconnectPointCloudAndImageProcessor() override
Implement this method in the PointCloudAndImageProcessor in order to execute parts when the component...
void onConnectPointCloudAndImageProcessor() override
Implement this method in your PointCloudAndImageProcessor in order execute parts when the component i...
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
void process() override
Process the vision component.
void onInitPointCloudAndImageProcessor() override
Setup the vision component.
memoryx::ObjectLocalizationResultList localizeObjectClasses(const memoryx::ObjectClassNameList &, const Ice::Current &) override
void onExitPointCloudAndImageProcessor() override
Exit the ImapeProcessor component.
T min(T t1, T t2)
Definition gdiam.h:44
#define ARMARX_INFO
The normal logging level.
Definition Logging.h:181
#define ARMARX_FATAL
The logging level for unexpected behaviour, that will lead to a seriously malfunctioning program and ...
Definition Logging.h:199
#define ARMARX_WARNING
The logging level for unexpected behaviour, but not a serious problem.
Definition Logging.h:193
std::shared_ptr< class Robot > RobotPtr
Definition Bus.h:19
IceInternal::Handle< FramedDirection > FramedDirectionPtr
Definition FramedPose.h:84
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
IceInternal::Handle< FramedPosition > FramedPositionPtr
Definition FramedPose.h:149
::IceInternal::ProxyHandle<::IceProxy::armarx::SharedRobotInterface > SharedRobotInterfacePrx
Definition FramedPose.h:59
IceInternal::Handle< FramedOrientation > FramedOrientationPtr
Definition FramedPose.h:207
IceInternal::Handle< FramedPose > FramedPosePtr
Definition FramedPose.h:272
bool isfinite(const std::vector< T, Ts... > &v)
Definition algorithm.h:366
ImageType typeNameToImageType(const std::string &imageTypeName)
Converts an image type name as string into an ImageType integer.
std::tuple< uint8_t, uint8_t, uint8_t > colorizeSegment(PointCloudPtrT &segment)
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.
ArmarX headers.
double distance(const Point &a, const Point &b)
Definition point.hpp:95