HypothesisValidationRGBD.cpp
Go to the documentation of this file.
1 /*
2  * This file is part of ArmarX.
3  *
4  * Copyright (C) 2011-2016, High Performance Humanoid Technologies (H2T), Karlsruhe Institute of Technology (KIT), all rights reserved.
5  *
6  * ArmarX is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * ArmarX is distributed in the hope that it will be useful, but
11  * WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program. If not, see <http://www.gnu.org/licenses/>.
17  *
18  * @package
19  * @author
20  * @date
21  * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
22  * GNU General Public License
23  */
25 #include "OLPTools.h"
26 #include "PointCloudRegistration.h"
27 #include "ColorICP.h"
28 #include "ObjectRecognition.h"
29 
30 
31 // IVT
32 #include <Image/ByteImage.h>
33 #include <Calibration/Calibration.h>
34 #include <Math/Math3d.h>
35 
36 
37 #include <cstdlib>
38 
39 
41 
42 
43 
44 
45 
46 void CHypothesisValidationRGBD::ValidateInitialHypotheses(const std::vector<CHypothesisPoint*>& aOldDepthMapPoints, const std::vector<CHypothesisPoint*>& aNewDepthMapPoints, const CByteImage* pForegroundImage,
47  const CByteImage* pHSVImage, const CCalibration* calibration, const Vec3d upwardsVector, CObjectHypothesisArray* pObjectHypotheses, CObjectHypothesisArray* pConfirmedHypotheses)
48 {
49  // calculate foreground points
50  std::vector<CHypothesisPoint*> aForegroundPoints;
51  COLPTools::FilterForegroundPoints(aNewDepthMapPoints, pForegroundImage, calibration, aForegroundPoints);
52  ARMARX_VERBOSE_S << aForegroundPoints.size() << " foreground points";
53 
54 
55  // if something changed in the image, determine transformations and validate hypotheses
56  if (aForegroundPoints.size() > 0)
57  {
58  // create hypotheses from points that changed
59  GenerateAndAddForegroundHypotheses(aOldDepthMapPoints, pForegroundImage, calibration, pObjectHypotheses);
60 
61  // set up the point cloud registration
62  CPointCloudRegistration* pPointCloudRegistration = new CPointCloudRegistration();
63  pPointCloudRegistration->SetScenePointcloud(aNewDepthMapPoints);
64 
65  // validate all hypotheses
66  for (int i = 0; i < pObjectHypotheses->GetSize(); i++)
67  {
68  // debug only for paper!
69  //if ((*pObjectHypotheses)[i]->nHypothesisNumber > 40) continue;
70 
71  // check if the hypothesis is of type RGBD
72  if (((*pObjectHypotheses)[i]->eType != CObjectHypothesis::eRGBD) && ((*pObjectHypotheses)[i]->eType != CObjectHypothesis::eSingleColored) && false)
73  {
74  ARMARX_VERBOSE_S << "ValidateInitialHypothesis: Wrong hypothesis type (not RGBD or SingleColored). Not checking it for motion.";
75  }
76  else
77  {
78  // check hypothesis only if it lies in a region that changed
79  float fForegroundRatio;
80  int nForegroundPoints;
81  COLPTools::CalculateForegroundRatioOfHypothesis((*pObjectHypotheses)[i], pForegroundImage, calibration, fForegroundRatio, nForegroundPoints);
82 
83  if (fForegroundRatio > 0.3f) // || 100*nForegroundPoints>OLP_IMG_WIDTH*OLP_IMG_HEIGHT)
84  {
85  ARMARX_VERBOSE_S << "Estimating transformation of hypothesis " << (*pObjectHypotheses)[i]->nHypothesisNumber << " (" << 100 * fForegroundRatio << " % change, " << (*pObjectHypotheses)[i]->aNewPoints.size() << " points).";
86 
87  CObjectHypothesis* pConfirmedHypothesis;
88  bool bValidated = ValidateHypothesis(pPointCloudRegistration, aNewDepthMapPoints, aForegroundPoints, pForegroundImage, pHSVImage, calibration, upwardsVector, (*pObjectHypotheses)[i], pConfirmedHypothesis, OLP_EFFORT_POINTCLOUD_MATCHING - 1);
89  //bool bValidated = ValidateHypothesis(pPointCloudRegistration, aNewDepthMapPoints, aForegroundPoints, pForegroundImage, pHSVImage, calibration, upwardsVector, (*pObjectHypotheses)[i], pConfirmedHypothesis, OLP_EFFORT_POINTCLOUD_MATCHING);
90 
91  if (bValidated)
92  {
93  pConfirmedHypotheses->AddElement(pConfirmedHypothesis);
94  }
95  }
96  else
97  {
98  ARMARX_VERBOSE_S << "The image region containing hypothesis " << i << " did not change (" << 100 * fForegroundRatio << " %).";
99  }
100  }
101  }
102 
103  delete pPointCloudRegistration;
104  }
105  else
106  {
107  ARMARX_VERBOSE_S << "No change in image detected!";
108  }
109 }
110 
111 
112 
113 
114 
115 
116 
117 void CHypothesisValidationRGBD::RevalidateHypotheses(const std::vector<CHypothesisPoint*>& aNewDepthMapPoints, const CByteImage* pForegroundImage,
118  const CByteImage* pHSVImage, const CCalibration* calibration, const Vec3d upwardsVector, CObjectHypothesisArray* pObjectHypotheses)
119 {
120  // calculate foreground points
121  std::vector<CHypothesisPoint*> aForegroundPoints;
122  COLPTools::FilterForegroundPoints(aNewDepthMapPoints, pForegroundImage, calibration, aForegroundPoints);
123  ARMARX_VERBOSE_S << aForegroundPoints.size() << " foreground points";
124 
125 
126  // if something changed in the image, determine transformations and revalidate hypotheses
127  if (aForegroundPoints.size() > 0)
128  {
129  // set up the point cloud registration
130  CPointCloudRegistration* pPointCloudRegistration = new CPointCloudRegistration();
131  pPointCloudRegistration->SetScenePointcloud(aNewDepthMapPoints);
132 
133  // validate all hypotheses
134  for (int i = 0; i < pObjectHypotheses->GetSize(); i++)
135  {
136  // check hypothesis, no matter if it lies in a region that changed
137  CObjectHypothesis* pObjectHypothesis = (*pObjectHypotheses)[i];
138  ARMARX_VERBOSE_S << "Estimating transformation of hypothesis " << pObjectHypothesis->nHypothesisNumber
139  << " (" << (*pObjectHypotheses)[i]->aConfirmedPoints.size() << " + " << (*pObjectHypotheses)[i]->aNewPoints.size() << " points).";
140  CObjectHypothesis* pConfirmedHypothesis;
141  bool bValidated = ValidateHypothesis(pPointCloudRegistration, aNewDepthMapPoints, aForegroundPoints, pForegroundImage, pHSVImage, calibration, upwardsVector, pObjectHypothesis, pConfirmedHypothesis);
142 
143  if (bValidated)
144  {
145  (*pObjectHypotheses)[i] = pConfirmedHypothesis;
146  delete pObjectHypothesis;
147  }
148  else
149  {
150  // just keep the hypothesis
151  }
152  }
153 
154  delete pPointCloudRegistration;
155  }
156  else
157  {
158  ARMARX_VERBOSE_S << "No change in image detected!";
159  }
160 }
161 
162 
163 
164 
165 
166 
167 void CHypothesisValidationRGBD::GenerateAndAddForegroundHypotheses(const std::vector<CHypothesisPoint*>& aOldDepthMapPoints, const CByteImage* pForegroundImage,
168  const CCalibration* calibration, CObjectHypothesisArray* pObjectHypotheses)
169 {
170  std::vector<CHypothesisPoint*> aForegroundPointsFromOldPoints;
171  COLPTools::FilterForegroundPoints(aOldDepthMapPoints, pForegroundImage, calibration, aForegroundPointsFromOldPoints);
172  ARMARX_VERBOSE_S << aForegroundPointsFromOldPoints.size() << " points for foreground hypotheses";
173 
174  // cluster the points and create foreground hypotheses
175  std::vector<std::vector<CHypothesisPoint*> > aForegroundPointsFromOldPointsClusters;
176  COLPTools::ClusterXMeans(aForegroundPointsFromOldPoints, 2, 5, OLP_CLUSTERING_FACTOR_FOREGROUND_HYPOTHESES, aForegroundPointsFromOldPointsClusters);
177  ARMARX_VERBOSE_S << aForegroundPointsFromOldPointsClusters.size() << " foreground hypotheses";
178 
179  for (size_t i = 0; i < aForegroundPointsFromOldPointsClusters.size(); i++)
180  {
181  COLPTools::RemoveOutliers(aForegroundPointsFromOldPointsClusters.at(i), 1.7f);
182 
183  if (aForegroundPointsFromOldPointsClusters.at(i).size() > OLP_MIN_NUM_FEATURES)
184  {
185  CObjectHypothesis* pForegroundHypo = new CObjectHypothesis();
186  Vec3d vCenter = {0, 0, 0};
187 
188  for (size_t j = 0; j < aForegroundPointsFromOldPointsClusters.at(i).size(); j++)
189  {
190  pForegroundHypo->aNewPoints.push_back(aForegroundPointsFromOldPointsClusters.at(i).at(j)->GetCopy());
191  Math3d::AddToVec(vCenter, aForegroundPointsFromOldPointsClusters.at(i).at(j)->vPosition);
192  }
193 
194  Math3d::MulVecScalar(vCenter, 1.0 / aForegroundPointsFromOldPointsClusters.at(i).size(), pForegroundHypo->vCenter);
195  pForegroundHypo->nHypothesisNumber = pObjectHypotheses->GetSize();
196  pForegroundHypo->eType = CObjectHypothesis::eRGBD;
197  pObjectHypotheses->AddElement(pForegroundHypo);
198  }
199  }
200 }
201 
202 
203 
204 
205 
206 bool CHypothesisValidationRGBD::ValidateHypothesis(CPointCloudRegistration* pPointCloudRegistration, const std::vector<CHypothesisPoint*>& aNewDepthMapPoints, const std::vector<CHypothesisPoint*>& aForegroundPoints, const CByteImage* pForegroundImage,
207  const CByteImage* pHSVImage, const CCalibration* calibration, const Vec3d upwardsVector, CObjectHypothesis* pObjectHypothesis, CObjectHypothesis*& pConfirmedHypothesis,
208  const int nEffortLevel)
209 {
210  Mat3d mRot = Math3d::unit_mat;
211  Vec3d vTrans = Math3d::zero_vec;
212  float errorAtOriginalPosition = FLT_MAX;
213 
214  // check if the hypothesis moved
215  if (pPointCloudRegistration->CheckObjectMatchAtOriginalPosition(pObjectHypothesis, errorAtOriginalPosition, nEffortLevel))
216  {
217  ARMARX_VERBOSE_S << "CHypothesisValidationRGBD: Initial check at old object position: the transformation shows no motion.";
218  pObjectHypothesis->aHypothesisHasMoved.push_back(false);
219  return false;
220  }
221 
222  // get possible object locations based on color region similarity
223  std::vector<Vec3d> aPossibleLocations;
224  CObjectRecognition::FindPossibleObjectLocations(pObjectHypothesis, pHSVImage, aNewDepthMapPoints, calibration, aPossibleLocations, 5 + 3 * OLP_EFFORT_POINTCLOUD_MATCHING);
225 
226  // estimate the transformation of the hypothesis
227  std::vector<float> aPointMatchDistances;
228  std::vector<CColorICP::CPointXYZRGBI> aNearestNeighbors;
229  pPointCloudRegistration->EstimateTransformation(pObjectHypothesis, mRot, vTrans, upwardsVector, nEffortLevel, &aPossibleLocations, &aNearestNeighbors, &aPointMatchDistances, 2 * errorAtOriginalPosition);
230  //CPointCloudRegistration::EstimateTransformation(pObjectHypothesis, aForegroundPoints, mRot, vTrans, &aNearestNeighbors, &aPointMatchDistances);
231  Math3d::SetVec(pObjectHypothesis->vLastTranslation, vTrans);
232  Math3d::SetMat(pObjectHypothesis->mLastRotation, mRot);
233 
234  // get the rotation angle
235  float fRotationAngle;
236  Vec3d vTemp;
237  Math3d::GetAxisAndAngle(mRot, vTemp, fRotationAngle);
238  fRotationAngle *= 180.0f / M_PI;
239 
240  // get the translation of the object center
241  Math3d::MulMatVec(mRot, pObjectHypothesis->vCenter, vTrans, vTemp);
242  float fDistance = Math3d::Distance(vTemp, pObjectHypothesis->vCenter);
243 
244  // accept only transformations with significant motion
245  float fMotionMeasure = 2 * fRotationAngle + fDistance; // 0.5 deg rotation <=> 1mm translation
246  ARMARX_VERBOSE_S << "Motion measure: " << fMotionMeasure << " (" << fDistance << "mm, " << fRotationAngle << "deg)";
247 
248  if ((fMotionMeasure > OLP_MINIMAL_MOTION_MEASURE) && (2 * fDistance > OLP_MINIMAL_MOTION_MEASURE))
249  {
250  // debug
251  int nNoForeground = 0;
252  int nLargeDistance = 0;
253 
254 
255  // create transformed hypothesis
256  CObjectHypothesis* pTransformedHypo = new CObjectHypothesis();
257  pTransformedHypo->nHypothesisNumber = pObjectHypothesis->nHypothesisNumber;
258  pTransformedHypo->eType = pObjectHypothesis->eType;
259  Math3d::SetVec(pTransformedHypo->vLastTranslation, vTrans);
260  Math3d::SetMat(pTransformedHypo->mLastRotation, mRot);
261  CHypothesisPoint* pPoint;
262 
263 
264  // check for each candidate point if it is in the area of the image that changed; and if yes, check if it concurred with the transformation
265  for (size_t j = 0; j < pObjectHypothesis->aNewPoints.size(); j++)
266  {
267  pPoint = pObjectHypothesis->aNewPoints.at(j)->GetCopy();
268 
269  // check if old and transformed position of the point are in the image region that changed
270  bool bIsInForeground = COLPTools::PointIsInForeground(pPoint, pForegroundImage, calibration);
271  Math3d::MulMatVec(mRot, pPoint->vPosition, vTrans, pPoint->vPosition);
272  bIsInForeground &= COLPTools::PointIsInForeground(pPoint, pForegroundImage, calibration);
273 
274  if (bIsInForeground)
275  {
276  // check if the point matches well after the transformation
277  if (aPointMatchDistances.at(pObjectHypothesis->aConfirmedPoints.size() + j) < OLP_TOLERANCE_CONCURRENT_MOTION)
278  {
279  pTransformedHypo->aConfirmedPoints.push_back(pPoint);
280  }
281  else
282  {
283  delete pPoint;
284  nLargeDistance++;
285  }
286  }
287  else
288  {
289  delete pPoint;
290  nNoForeground++;
291  }
292  }
293 
294 
295  // check the confirmed points
296  for (size_t j = 0; j < pObjectHypothesis->aConfirmedPoints.size(); j++)
297  {
298  pPoint = pObjectHypothesis->aConfirmedPoints.at(j)->GetCopy();
299 
300  // check if old and transformed position of the point are in the image region that changed
301  bool bIsInForeground = COLPTools::PointIsInForeground(pPoint, pForegroundImage, calibration);
302  Math3d::MulMatVec(mRot, pPoint->vPosition, vTrans, pPoint->vPosition);
303  bIsInForeground &= COLPTools::PointIsInForeground(pPoint, pForegroundImage, calibration);
304 
305  if (bIsInForeground)
306  {
307  // check if the point matches well after the transformation
308  if (aPointMatchDistances.at(j) < OLP_TOLERANCE_CONCURRENT_MOTION)
309  {
310  pTransformedHypo->aConfirmedPoints.push_back(pPoint);
311  }
312  else
313  {
314  // interpolate with nearest neighbor
315  CColorICP::CPointXYZRGBI pNeighbor = aNearestNeighbors.at(j);
316  pPoint->vPosition.x = 0.5f * (pPoint->vPosition.x + pNeighbor.x);
317  pPoint->vPosition.y = 0.5f * (pPoint->vPosition.y + pNeighbor.y);
318  pPoint->vPosition.z = 0.5f * (pPoint->vPosition.z + pNeighbor.z);
319  pPoint->fColorR = 0.5f * (pPoint->fColorR + pNeighbor.r);
320  pPoint->fColorG = 0.5f * (pPoint->fColorG + pNeighbor.g);
321  pPoint->fColorB = 0.5f * (pPoint->fColorB + pNeighbor.b);
322  pPoint->fIntensity = 0.5f * (pPoint->fIntensity + pNeighbor.i);
323 
324  pTransformedHypo->aNewPoints.push_back(pPoint);
325  nLargeDistance++;
326  }
327  }
328  else
329  {
330  if (aPointMatchDistances.at(j) < OLP_TOLERANCE_CONCURRENT_MOTION)
331  {
332  pTransformedHypo->aDoubtablePoints.push_back(pPoint);
333  nNoForeground++;
334  }
335  else
336  {
337  delete pPoint;
338  nNoForeground++;
339  }
340  }
341  }
342 
343  // check the doubtable points
344  for (size_t j = 0; j < pObjectHypothesis->aDoubtablePoints.size(); j++)
345  {
346  pPoint = pObjectHypothesis->aDoubtablePoints.at(j)->GetCopy();
347 
348  // check if old and transformed position of the point are in the image region that changed
349  bool bIsInForeground = COLPTools::PointIsInForeground(pPoint, pForegroundImage, calibration);
350  Math3d::MulMatVec(mRot, pPoint->vPosition, vTrans, pPoint->vPosition);
351  bIsInForeground &= COLPTools::PointIsInForeground(pPoint, pForegroundImage, calibration);
352 
353  if (bIsInForeground)
354  {
355  // check if the point matches well after the transformation
356  if (aPointMatchDistances.at(pObjectHypothesis->aConfirmedPoints.size() + pObjectHypothesis->aNewPoints.size() + j) < OLP_TOLERANCE_CONCURRENT_MOTION)
357  {
358  //pTransformedHypo->aNewPoints.push_back(pPoint);
359  pTransformedHypo->aConfirmedPoints.push_back(pPoint);
360  }
361  else
362  {
363  delete pPoint;
364  nLargeDistance++;
365  }
366  }
367  else
368  {
369  delete pPoint;
370  nNoForeground++;
371  }
372  }
373 
374  ARMARX_VERBOSE_S << "Discarded features: no foreground " << nNoForeground << ", bad match " << nLargeDistance;
375 
376 
377  // if the hypothesis still looks good after the transformation, clean up and add new candidate points
378 
379  if (pTransformedHypo->aConfirmedPoints.size() > OLP_MIN_NUM_FEATURES)
380  {
381  // fuse very close points
382 
383  int nTooClose = 0;
384  std::vector<Vec2d> aPointPositions2D;
385  aPointPositions2D.resize(pTransformedHypo->aConfirmedPoints.size());
386 
387  for (size_t i = 0; i < pTransformedHypo->aConfirmedPoints.size(); i++)
388  {
389  calibration->WorldToImageCoordinates(pTransformedHypo->aConfirmedPoints.at(i)->vPosition, aPointPositions2D.at(i), false);
390  }
391 
392  for (size_t i = 0; i < pTransformedHypo->aConfirmedPoints.size(); i++)
393  {
394  const Vec2d vPos2D = aPointPositions2D.at(i);
395 
396  for (size_t j = i + 1; j < pTransformedHypo->aConfirmedPoints.size(); j++)
397  {
398  if (Math2d::Distance(vPos2D, aPointPositions2D.at(j)) < 0.4f * OLP_DEPTH_MAP_PIXEL_DISTANCE)
399  {
400  CHypothesisPoint* fusedPoint = FuseTwoPoints(pTransformedHypo->aConfirmedPoints.at(i), pTransformedHypo->aConfirmedPoints.at(j));
401  delete pTransformedHypo->aConfirmedPoints.at(i);
402  pTransformedHypo->aConfirmedPoints.at(i) = fusedPoint;
403  delete pTransformedHypo->aConfirmedPoints.at(j);
404  pTransformedHypo->aConfirmedPoints.at(j) = pTransformedHypo->aConfirmedPoints.at(pTransformedHypo->aConfirmedPoints.size() - 1);
405  pTransformedHypo->aConfirmedPoints.pop_back();
406  aPointPositions2D.at(j) = aPointPositions2D.at(aPointPositions2D.size() - 1);
407  nTooClose++;
408  }
409  }
410  }
411 
412  ARMARX_VERBOSE_S << "Discarded " << nTooClose << " points because they were too close to another one";
413 
414 
415  // add new candidate points
416 
417  std::vector<CHypothesisPoint*> aNewCandidatePoints;
418  std::vector<Vec2d> aForegroundPoints2D, aConfirmedPoints2D, aNewAndDoubtablePoints2D;
419  std::vector<Vec3d> aForegroundPoints3D, aConfirmedPoints3D;
420  #pragma omp sections
421  {
422  #pragma omp section
423  {
424  for (size_t i = 0; i < aForegroundPoints.size(); i++)
425  {
426  Vec3d point3D = aForegroundPoints.at(i)->vPosition;
427  aForegroundPoints3D.push_back(point3D);
428  Vec2d point2D;
429  calibration->WorldToImageCoordinates(point3D, point2D, false);
430  aForegroundPoints2D.push_back(point2D);
431  }
432  }
433 
434  #pragma omp section
435  {
436  for (size_t i = 0; i < pTransformedHypo->aConfirmedPoints.size(); i++)
437  {
438  Vec3d point3D = pTransformedHypo->aConfirmedPoints.at(i)->vPosition;
439  aConfirmedPoints3D.push_back(point3D);
440  Vec2d point2D;
441  calibration->WorldToImageCoordinates(point3D, point2D, false);
442  aConfirmedPoints2D.push_back(point2D);
443  }
444  }
445 
446  #pragma omp section
447  {
448  for (size_t i = 0; i < pTransformedHypo->aDoubtablePoints.size(); i++)
449  {
450  Vec2d point2D;
451  calibration->WorldToImageCoordinates(pTransformedHypo->aDoubtablePoints.at(i)->vPosition, point2D, false);
452  aNewAndDoubtablePoints2D.push_back(point2D);
453  }
454 
455  for (size_t i = 0; i < pTransformedHypo->aNewPoints.size(); i++)
456  {
457  Vec2d point2D;
458  calibration->WorldToImageCoordinates(pTransformedHypo->aNewPoints.at(i)->vPosition, point2D, false);
459  aNewAndDoubtablePoints2D.push_back(point2D);
460  }
461  }
462  }
463 
464 
465  Mat3d mRotInv;
466  Math3d::Transpose(mRot, mRotInv);
467 
468  const float fMinDistanceLimit2D = 0.9f * OLP_DEPTH_MAP_PIXEL_DISTANCE;
469 
470  #pragma omp parallel for
471  for (size_t j = 0; j < aForegroundPoints.size(); j++)
472  {
473  // check if the old position of the point was in foreground
474  Vec3d vOldPointPosition;
475  Math3d::SubtractVecVec(aForegroundPoints.at(j)->vPosition, vTrans, vTemp);
476  Math3d::MulMatVec(mRotInv, vTemp, vOldPointPosition);
477 
478  if (COLPTools::PointIsInForeground(vOldPointPosition, pForegroundImage, calibration))
479  {
480  // add only if it is close to the object, but not too close to the points already in the hypothesis
481  Vec2d vPos2D = aForegroundPoints2D.at(j);
482  Vec3d vPos3D = aForegroundPoints3D.at(j);
483  float fDist2D, fDist3D;
484  float fMinDist2D = FLT_MAX;
485  float fMinDist3D = FLT_MAX;
486 
487  for (size_t k = 0; k < aConfirmedPoints2D.size(); k++)
488  {
489  fDist2D = Math2d::Distance(vPos2D, aConfirmedPoints2D.at(k));
490 
491  if (fDist2D < fMinDist2D)
492  {
493  fMinDist2D = fDist2D;
494  }
495 
496  fDist3D = Math3d::Distance(vPos3D, aConfirmedPoints3D.at(k));
497 
498  if (fDist3D < fMinDist3D)
499  {
500  fMinDist3D = fDist3D;
501  }
502  }
503 
504  if ((fMinDist2D < OLP_MAX_DISTANCE_FOR_ADDING_FOREGROUND_CANDIDATE_2D) && (fMinDist3D < OLP_MAX_DISTANCE_FOR_ADDING_FOREGROUND_CANDIDATE_3D) && (fMinDist2D > fMinDistanceLimit2D))
505  {
506  for (size_t k = 0; k < aNewAndDoubtablePoints2D.size(); k++)
507  {
508  fDist2D = Math2d::Distance(vPos2D, aNewAndDoubtablePoints2D.at(k));
509 
510  if (fDist2D < fMinDist2D)
511  {
512  fMinDist2D = fDist2D;
513  }
514  }
515 
516  if (fMinDist2D > fMinDistanceLimit2D)
517  {
518  #pragma omp critical
519  {
520  aNewCandidatePoints.push_back(aForegroundPoints.at(j)->GetCopy());
521  }
522  }
523  }
524  }
525  }
526 
527  for (size_t i = 0; i < aNewCandidatePoints.size(); i++)
528  {
529  // float fDist2;
530  // float fMinDist = FLT_MAX;
531  // const Vec2d vPos2D = aCandidatePositions2D.at(i);
532  // for (size_t j=0; j<i; j++)
533  // {
534  // const Vec2d vPos2DComp = aCandidatePositions2D.at(j);
535  // fDist2 = (vPos2D.x-vPos2DComp.x)*(vPos2D.x-vPos2DComp.x) + (vPos2D.y-vPos2DComp.y)*(vPos2D.y-vPos2DComp.y);
536  // if (fDist2<fMinDist) fMinDist = fDist2;
537  // }
538 
539  //if ((sqrt(fMinDist) > fMinDistanceLimit) || (rand()%3==0))
540  //if (fMinDist > 0.98*fMinDistanceLimit*fMinDistanceLimit)
541  {
542  pTransformedHypo->aNewPoints.push_back(aNewCandidatePoints.at(i));
543  }
544  }
545 
546 
547 
548 
549  // copy visible points
550  pTransformedHypo->aVisibleConfirmedPoints.resize(pTransformedHypo->aConfirmedPoints.size());
551 
552  for (size_t i = 0; i < pTransformedHypo->aConfirmedPoints.size(); i++)
553  {
554  pTransformedHypo->aVisibleConfirmedPoints.at(i) = pTransformedHypo->aConfirmedPoints.at(i)->GetCopy();
555  }
556 
557 
558 
559 #ifdef OLP_ADD_POINTS_FROM_SEGMENTED_REGION
560  CByteImage* pSegmentationMask = NULL;
561  COLPTools::CreateObjectSegmentationMask(pTransformedHypo, calibration, pSegmentationMask);
562  COLPTools::FilterForegroundPoints(aNewDepthMapPoints, pSegmentationMask, calibration, pTransformedHypo->aConfirmedPoints);
563 
564  for (size_t i = pTransformedHypo->aVisibleConfirmedPoints.size(); i < pTransformedHypo->aConfirmedPoints.size(); i++)
565  {
566  pTransformedHypo->aVisibleConfirmedPoints.push_back(pTransformedHypo->aConfirmedPoints.at(i)->GetCopy());
567  }
568 #endif
569 
570 
571 
572  // update the position of the center of the hypothesis
573  Vec3d vNewCenter = {0, 0, 0};
574 
575  for (size_t i = 0; i < pTransformedHypo->aVisibleConfirmedPoints.size(); i++)
576  {
577  vNewCenter.x += pTransformedHypo->aVisibleConfirmedPoints.at(i)->vPosition.x;
578  vNewCenter.y += pTransformedHypo->aVisibleConfirmedPoints.at(i)->vPosition.y;
579  vNewCenter.z += pTransformedHypo->aVisibleConfirmedPoints.at(i)->vPosition.z;
580  }
581 
582  if (pTransformedHypo->aVisibleConfirmedPoints.size() > 0)
583  {
584  pTransformedHypo->vCenter.x = vNewCenter.x / (float)pTransformedHypo->aVisibleConfirmedPoints.size();
585  pTransformedHypo->vCenter.y = vNewCenter.y / (float)pTransformedHypo->aVisibleConfirmedPoints.size();
586  pTransformedHypo->vCenter.z = vNewCenter.z / (float)pTransformedHypo->aVisibleConfirmedPoints.size();
587  }
588 
589  // update the maximal extent
590  pTransformedHypo->fMaxExtent = 0;
591  float fDist;
592 
593  for (size_t i = 0; i < pTransformedHypo->aVisibleConfirmedPoints.size(); i++)
594  {
595  fDist = Math3d::Distance(pTransformedHypo->vCenter, pTransformedHypo->aVisibleConfirmedPoints.at(i)->vPosition);
596 
597  if (fDist > pTransformedHypo->fMaxExtent)
598  {
599  pTransformedHypo->fMaxExtent = fDist;
600  }
601  }
602 
603 
604  // return validated hypothesis
605  pConfirmedHypothesis = pTransformedHypo;
606  pConfirmedHypothesis->aHypothesisHasMoved.push_back(true);
607  pObjectHypothesis->aHypothesisHasMoved.push_back(true);
608  return true;
609  }
610  else
611  {
612  ARMARX_VERBOSE_S << "CHypothesisValidationRGBD: not enough points left after transformation";
613  delete pTransformedHypo;
614  pObjectHypothesis->aHypothesisHasMoved.push_back(false);
615  return false;
616  }
617  }
618  else
619  {
620  ARMARX_VERBOSE_S << "CHypothesisValidationRGBD: the transformation shows no motion.";
621  pObjectHypothesis->aHypothesisHasMoved.push_back(false);
622  return false;
623  }
624 }
625 
626 
627 
629 {
630  CHypothesisPoint* result = p1->GetCopy();
631  result->vPosition.x = 0.5f * (p1->vPosition.x + p2->vPosition.x);
632  result->vPosition.y = 0.5f * (p1->vPosition.y + p2->vPosition.y);
633  result->vPosition.z = 0.5f * (p1->vPosition.z + p2->vPosition.z);
634 
635  result->vOldPosition.x = 0.5f * (p1->vOldPosition.x + p2->vOldPosition.x);
636  result->vOldPosition.y = 0.5f * (p1->vOldPosition.y + p2->vOldPosition.y);
637  result->vOldPosition.z = 0.5f * (p1->vOldPosition.z + p2->vOldPosition.z);
638 
639  result->fColorR = 0.5f * (p1->fColorR + p2->fColorR);
640  result->fColorG = 0.5f * (p1->fColorG + p2->fColorG);
641  result->fColorB = 0.5f * (p1->fColorB + p2->fColorB);
642  result->fIntensity = 0.5f * (p1->fIntensity + p2->fIntensity);
643 
645 
646  return result;
647 }
648 
PointCloudRegistration.h
CPointCloudRegistration::EstimateTransformation
float EstimateTransformation(const std::vector< CHypothesisPoint * > &aObjectPoints, const Vec3d center, Mat3d &mRotation, Vec3d &vTranslation, const Vec3d upwardsVector, const int nEffortLevel=OLP_EFFORT_POINTCLOUD_MATCHING, const std::vector< Vec3d > *pPossibleLocationOffsets=NULL, std::vector< CColorICP::CPointXYZRGBI > *pNearestNeighbors=NULL, std::vector< float > *pPointMatchDistances=NULL, const std::vector< CHypothesisPoint * > *pAdditionalPoints=NULL, const float maxAcceptedDistance=FLT_MAX)
Definition: PointCloudRegistration.cpp:91
CHypothesisValidationRGBD::ValidateInitialHypotheses
static void ValidateInitialHypotheses(const std::vector< CHypothesisPoint * > &aOldDepthMapPoints, const std::vector< CHypothesisPoint * > &aNewDepthMapPoints, const CByteImage *pForegroundImage, const CByteImage *pHSVImage, const CCalibration *calibration, const Vec3d upwardsVector, CObjectHypothesisArray *pObjectHypotheses, CObjectHypothesisArray *pConfirmedHypotheses)
Definition: HypothesisValidationRGBD.cpp:46
COLPTools::CreateObjectSegmentationMask
void CreateObjectSegmentationMask(const CObjectHypothesis *pHypothesis, const CCalibration *calibration, CByteImage *&pForegroundImage)
Definition: OLPTools.cpp:1249
CHypothesisPoint::vOldPosition
Vec3d vOldPosition
Definition: ObjectHypothesis.h:239
OLP_DEPTH_MAP_PIXEL_DISTANCE
#define OLP_DEPTH_MAP_PIXEL_DISTANCE
Definition: ObjectLearningByPushingDefinitions.h:256
CHypothesisPoint::fIntensity
float fIntensity
Definition: ObjectHypothesis.h:241
OLP_MAX_DISTANCE_FOR_ADDING_FOREGROUND_CANDIDATE_3D
#define OLP_MAX_DISTANCE_FOR_ADDING_FOREGROUND_CANDIDATE_3D
Definition: ObjectLearningByPushingDefinitions.h:230
ColorICP.h
OLP_MAX_DISTANCE_FOR_ADDING_FOREGROUND_CANDIDATE_2D
#define OLP_MAX_DISTANCE_FOR_ADDING_FOREGROUND_CANDIDATE_2D
Definition: ObjectLearningByPushingDefinitions.h:228
GfxTL::Vec2d
VectorXD< 2, double > Vec2d
Definition: VectorXD.h:694
CHypothesisValidationRGBD::RevalidateHypotheses
static void RevalidateHypotheses(const std::vector< CHypothesisPoint * > &aNewDepthMapPoints, const CByteImage *pForegroundImage, const CByteImage *pHSVImage, const CCalibration *calibration, const Vec3d upwardsVector, CObjectHypothesisArray *pObjectHypotheses)
Definition: HypothesisValidationRGBD.cpp:117
CHypothesisValidationRGBD::ValidateHypothesis
static bool ValidateHypothesis(CPointCloudRegistration *pPointCloudRegistration, const std::vector< CHypothesisPoint * > &aNewDepthMapPoints, const std::vector< CHypothesisPoint * > &aForegroundPoints, const CByteImage *pForegroundImage, const CByteImage *pHSVImage, const CCalibration *calibration, const Vec3d upwardsVector, CObjectHypothesis *pObjectHypothesis, CObjectHypothesis *&pConfirmedHypothesis, const int nEffortLevel=OLP_EFFORT_POINTCLOUD_MATCHING)
Definition: HypothesisValidationRGBD.cpp:206
CObjectRecognition::FindPossibleObjectLocations
static void FindPossibleObjectLocations(const CObjectHypothesis *pHypothesis, const CByteImage *pHSVImage, const std::vector< CHypothesisPoint * > &aScenePoints, const CCalibration *calibration, std::vector< Vec3d > &aPossibleLocations, const int desiredNumberOfLocations=0)
Definition: ObjectRecognition.cpp:223
CColorICP::CPointXYZRGBI::x
float x
Definition: ColorICP.h:42
CColorICP::CPointXYZRGBI::y
float y
Definition: ColorICP.h:42
CObjectHypothesis::eType
eObjectType eType
Definition: ObjectHypothesis.h:289
CHypothesisPoint::fColorG
float fColorG
Definition: ObjectHypothesis.h:241
COLPTools::CalculateForegroundRatioOfHypothesis
void CalculateForegroundRatioOfHypothesis(const CObjectHypothesis *pHypothesis, const CByteImage *pForegroundImage, const CCalibration *calibration, float &fForegroundRatio, int &nNumForegroundPixels)
Definition: OLPTools.cpp:336
CColorICP::CPointXYZRGBI::g
float g
Definition: ColorICP.h:42
HypothesisValidationRGBD.h
CHypothesisPoint::vPosition
Vec3d vPosition
Definition: ObjectHypothesis.h:238
COLPTools::RemoveOutliers
void RemoveOutliers(std::vector< Vec3d > &aPoints, const float fStdDevFactor, std::vector< int > *pOldIndices)
Definition: OLPTools.cpp:759
CObjectHypothesis::eRGBD
@ eRGBD
Definition: ObjectHypothesis.h:285
CColorICP::CPointXYZRGBI::i
float i
Definition: ColorICP.h:42
COLPTools::PointIsInForeground
bool PointIsInForeground(const Vec3d vPoint, const CByteImage *pForegroundImage, const CCalibration *calibration)
Definition: OLPTools.cpp:310
CColorICP::CPointXYZRGBI::r
float r
Definition: ColorICP.h:42
OLPTools.h
CHypothesisPoint
Definition: ObjectHypothesis.h:171
OLP_MIN_NUM_FEATURES
#define OLP_MIN_NUM_FEATURES
Definition: ObjectLearningByPushingDefinitions.h:109
CObjectHypothesis::vLastTranslation
Vec3d vLastTranslation
Definition: ObjectHypothesis.h:318
CHypothesisPoint::GetCopy
CHypothesisPoint * GetCopy()
Definition: ObjectHypothesis.h:200
M_PI
#define M_PI
Definition: MathTools.h:17
ObjectRecognition.h
OLP_TOLERANCE_CONCURRENT_MOTION
#define OLP_TOLERANCE_CONCURRENT_MOTION
Definition: ObjectLearningByPushingDefinitions.h:224
OLP_EFFORT_POINTCLOUD_MATCHING
#define OLP_EFFORT_POINTCLOUD_MATCHING
Definition: ObjectLearningByPushingDefinitions.h:261
GfxTL::Vec3d
VectorXD< 3, double > Vec3d
Definition: VectorXD.h:695
CObjectHypothesis::vCenter
Vec3d vCenter
Definition: ObjectHypothesis.h:308
CPointCloudRegistration
Definition: PointCloudRegistration.h:34
CObjectHypothesis::nHypothesisNumber
int nHypothesisNumber
Definition: ObjectHypothesis.h:287
CColorICP::CPointXYZRGBI
Definition: ColorICP.h:40
CObjectHypothesis::aVisibleConfirmedPoints
std::vector< CHypothesisPoint * > aVisibleConfirmedPoints
Definition: ObjectHypothesis.h:294
CColorICP::CPointXYZRGBI::b
float b
Definition: ColorICP.h:42
CObjectHypothesis::eSingleColored
@ eSingleColored
Definition: ObjectHypothesis.h:285
CHypothesisPoint::fColorR
float fColorR
Definition: ObjectHypothesis.h:241
COLPTools::ClusterXMeans
void ClusterXMeans(const std::vector< CHypothesisPoint * > &aPoints, const int nMinNumClusters, const int nMaxNumClusters, const float fBICFactor, std::vector< std::vector< CHypothesisPoint * > > &aaPointClusters)
Definition: OLPTools.cpp:76
CObjectHypothesis
Definition: ObjectHypothesis.h:249
CObjectHypothesis::fMaxExtent
float fMaxExtent
Definition: ObjectHypothesis.h:315
OLP_CLUSTERING_FACTOR_FOREGROUND_HYPOTHESES
#define OLP_CLUSTERING_FACTOR_FOREGROUND_HYPOTHESES
Definition: ObjectLearningByPushingDefinitions.h:264
CObjectHypothesisArray
CDynamicArrayTemplate< CObjectHypothesis * > CObjectHypothesisArray
Definition: ObjectHypothesis.h:359
CObjectHypothesis::mLastRotation
Mat3d mLastRotation
Definition: ObjectHypothesis.h:317
float
#define float
Definition: 16_Level.h:22
CObjectHypothesis::aNewPoints
std::vector< CHypothesisPoint * > aNewPoints
Definition: ObjectHypothesis.h:291
CHypothesisValidationRGBD::GenerateAndAddForegroundHypotheses
static void GenerateAndAddForegroundHypotheses(const std::vector< CHypothesisPoint * > &aOldDepthMapPoints, const CByteImage *pForegroundImage, const CCalibration *calibration, CObjectHypothesisArray *pObjectHypotheses)
Definition: HypothesisValidationRGBD.cpp:167
CObjectHypothesis::aDoubtablePoints
std::vector< CHypothesisPoint * > aDoubtablePoints
Definition: ObjectHypothesis.h:293
CPointCloudRegistration::CheckObjectMatchAtOriginalPosition
bool CheckObjectMatchAtOriginalPosition(const CObjectHypothesis *pHypothesis, float &distance, const int nEffortLevel=OLP_EFFORT_POINTCLOUD_MATCHING)
Definition: PointCloudRegistration.cpp:460
CColorICP::CPointXYZRGBI::z
float z
Definition: ColorICP.h:42
CObjectHypothesis::aConfirmedPoints
std::vector< CHypothesisPoint * > aConfirmedPoints
Definition: ObjectHypothesis.h:292
ARMARX_VERBOSE_S
#define ARMARX_VERBOSE_S
Definition: Logging.h:200
COLPTools::FilterForegroundPoints
void FilterForegroundPoints(const std::vector< CHypothesisPoint * > &aAllPoints, const CByteImage *pForegroundImage, const CCalibration *calibration, std::vector< CHypothesisPoint * > &aForegroundPoints)
Definition: OLPTools.cpp:289
Logging.h
OLP_MINIMAL_MOTION_MEASURE
#define OLP_MINIMAL_MOTION_MEASURE
Definition: ObjectLearningByPushingDefinitions.h:226
CHypothesisPoint::fColorB
float fColorB
Definition: ObjectHypothesis.h:241
CHypothesisPoint::fMembershipProbability
float fMembershipProbability
Definition: ObjectHypothesis.h:242
CPointCloudRegistration::SetScenePointcloud
void SetScenePointcloud(const std::vector< CHypothesisPoint * > &aScenePoints)
Definition: PointCloudRegistration.cpp:69
CObjectHypothesis::aHypothesisHasMoved
std::vector< bool > aHypothesisHasMoved
Definition: ObjectHypothesis.h:297
CHypothesisValidationRGBD::FuseTwoPoints
static CHypothesisPoint * FuseTwoPoints(CHypothesisPoint *p1, CHypothesisPoint *p2)
Definition: HypothesisValidationRGBD.cpp:628