37 #include <Calibration/Calibration.h>
40 #include <TexturedRecognition/TexturedRecognition.h>
41 #include <TexturedRecognition/TexturedObjectDatabase.h>
42 #include <TexturedRecognition/TexturedFeatureSet.h>
50 bool TexturedObjectRecognition::initRecognizer()
52 float SIFTThreshold = getProperty<float>(
"SIFTThreshold").getValue();
54 texturedRecognition =
new CTexturedRecognition(getImageFormat().dimension.width, getImageFormat().dimension.height, SIFTThreshold);
55 texturedRecognition->SetVerbose(
true);
56 texturedRecognition->SetStereo(
true);
57 texturedRecognition->SetQualityThreshold(getProperty<float>(
"QualityThreshold").getValue());
58 texturedRecognition->SetRecognitionThresholds(getProperty<int>(
"nMinValidFeatures").getValue(), getProperty<float>(
"MaxError").getValue());
61 Eigen::Vector3f minPoint = getProperty<Eigen::Vector3f>(
"MinPoint").getValue();
62 Eigen::Vector3f maxPoint = getProperty<Eigen::Vector3f>(
"MaxPoint").getValue();
64 Math3d::SetVec(validResultBoundingBoxMin, minPoint(0), minPoint(1), minPoint(2));
65 Math3d::SetVec(validResultBoundingBoxMax, maxPoint(0), maxPoint(1), maxPoint(2));
68 correlationWindowSize = getProperty<int>(
"StereoCorrelationWindowSize").getValue();
69 correlationThreshold = getProperty<float>(
"StereoCorrelationThreshold").getValue();
71 texturedRecognition->GetObjectDatabase()->InitCameraParameters(getStereoCalibration(),
true);
72 texturedRecognition->GetObjectDatabase()->SetCorrelationParameters(correlationWindowSize, minPoint(2), maxPoint(2), correlationThreshold);
80 std::string fileName = recognitionWrapper->getFeatureFile();
84 std::string className = objectClassEntity->getName();
86 if (texturedRecognition->GetObjectDatabase()->AddClass(className, fileName))
93 ARMARX_WARNING_S <<
"No descriptor file defined for object " << objectClassEntity->getName();
99 memoryx::ObjectLocalizationResultList TexturedObjectRecognition::localizeObjectClasses(
const std::vector<std::string>& objectClassNames, CByteImage** cameraImages, armarx::MetaInfoSizeBasePtr imageMetaInfo, CByteImage** resultImages)
101 std::string allObjectNames;
103 for (
size_t i = 0; i < objectClassNames.size(); i++)
105 allObjectNames.append(objectClassNames.at(i));
106 allObjectNames.append(
" ");
112 CByteImage** tmpResultImages = getResultImagesEnabled() ? resultImages : NULL;
114 if (objectClassNames.size() == 1)
116 texturedRecognition->DoRecognitionSingleObject(cameraImages, tmpResultImages, objectClassNames.at(0).c_str(),
true, 50, getImagesAreUndistorted());
121 texturedRecognition->DoRecognition(cameraImages, tmpResultImages,
true, 50, getImagesAreUndistorted());
124 Object3DList objectList = texturedRecognition->GetObject3DList();
126 ARMARX_DEBUG_S <<
"Localized " << objectList.size() <<
" objects";
127 const auto agentName = getProperty<std::string>(
"AgentName").getValue();
129 memoryx::ObjectLocalizationResultList resultList;
131 for (Object3DList::iterator iter = objectList.begin() ; iter < objectList.end() ; iter++)
134 bool queriedClass = (std::find(objectClassNames.begin(), objectClassNames.end(), iter->sName) != objectClassNames.end());
136 if (iter->localizationValid && queriedClass)
138 float x = iter->pose.translation.x;
139 float y = iter->pose.translation.y;
140 float z = iter->pose.translation.z;
143 mapValues[
"x"] =
new Variant(x);
144 mapValues[
"y"] =
new Variant(y);
145 mapValues[
"z"] =
new Variant(z);
146 mapValues[
"name"] =
new Variant(iter->sName);
147 mapValues[
"sequence"] =
new Variant(seq++);
148 mapValues[
"timestamp"] =
new Variant(imageMetaInfo->timeProvided / 1000.0 / 1000.0);
149 debugObserver->setDebugChannel(
"ObjectRecognition", mapValues);
153 if (x > validResultBoundingBoxMin.x && y > validResultBoundingBoxMin.y && z > validResultBoundingBoxMin.z &&
154 x < validResultBoundingBoxMax.x && y < validResultBoundingBoxMax.y && z < validResultBoundingBoxMax.z)
157 memoryx::ObjectLocalizationResult result;
160 Eigen::Vector3f position(x, y, z);
162 orientation << iter->pose.rotation.r1, iter->pose.rotation.r2, iter->pose.rotation.r3,
163 iter->pose.rotation.r4, iter->pose.rotation.r5, iter->pose.rotation.r6,
164 iter->pose.rotation.r7, iter->pose.rotation.r8, iter->pose.rotation.r9;
170 result.recognitionCertainty = 0.5f + 0.5f * calculateRecognitionCertainty(iter->sName, *iter);
174 Eigen::MatrixXf cov = posUncertainty->toEigenCovariance();
175 cov *= 4.0f / pow(result.recognitionCertainty, 4);
178 result.objectClassName = iter->sName;
181 resultList.push_back(result);
183 FramedPose objectPose(orientation, position, referenceFrameName, agentName);
188 ARMARX_DEBUG_S <<
"Refused unrealistic localization at position: " << x <<
" " << y <<
" " << z;
198 float TexturedObjectRecognition::calculateRecognitionCertainty(
const std::string& objectClassName,
const Object3DEntry& entry)
206 int objectIndex = -1;
207 int n = texturedRecognition->GetObjectDatabase()->GetSize();
209 for (
int i = 0; i < n; i++)
211 if (objectClassName.compare(texturedRecognition->GetObjectDatabase()->GetFeatureSet(i)->GetName()) == 0)
218 if (objectIndex == -1)
223 int numberObjectFeatures = texturedRecognition->GetObjectDatabase()->GetFeatureSet(objectIndex)->GetSize();
226 float temp = (entry.quality > 0.7f) ? entry.quality - 0.7f : 0;
227 float ratingPixelError = expf(-(4.0f / 9.0f) * temp * temp);
228 ratingPixelError = 0.15f + 0.7f * ratingPixelError;
231 temp = 0.1f * (entry.quality2 - 20.0f);
232 float ratingMatchedFeaturesAbsolute = 0.5f + 0.5f * (temp / sqrtf(1.0f + temp * temp));
233 ratingMatchedFeaturesAbsolute = 0.15f + 0.7f * ratingMatchedFeaturesAbsolute;
236 if (numberObjectFeatures > 300)
238 numberObjectFeatures = 300;
241 temp = entry.quality2;
243 if (temp > (
float)numberObjectFeatures)
245 temp = (
float)numberObjectFeatures;
248 float ratingMatchedFeaturesRelative = temp / (0.15f * (
float)numberObjectFeatures);
249 ratingMatchedFeaturesRelative = (ratingMatchedFeaturesRelative > 1.0f) ? 1.0f : ratingMatchedFeaturesRelative;
250 ratingMatchedFeaturesRelative = 0.15f + 0.7f * ratingMatchedFeaturesRelative;
253 float recognitionCertainty = (ratingPixelError * ratingMatchedFeaturesAbsolute * ratingMatchedFeaturesRelative)
254 / ((ratingPixelError * ratingMatchedFeaturesAbsolute * ratingMatchedFeaturesRelative)
255 + ((1 - ratingPixelError) * (1 - ratingMatchedFeaturesAbsolute) * (1 - ratingMatchedFeaturesRelative)));
257 return recognitionCertainty;