35 #include <Image/ByteImage.h>
36 #include <Image/ImageProcessor.h>
37 #include <Image/PrimitivesDrawer.h>
38 #include <Helpers/helpers.h>
39 #include <Math/FloatMatrix.h>
48 #include <gui/GLContext.h>
50 #include <SimoxUtility/algorithm/string/string_tools.h>
52 #include <boost/scope_exit.hpp>
54 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
63 struct HSVSegmentationParams
72 float minRatio = 0.0f;
75 HSVSegmentationParams getSegmentationParamsFromColor(CColorParameterSet
const& colorParameters, ObjectColor color)
77 HSVSegmentationParams result = {};
78 int const* colorArray = colorParameters.GetColorParameters(color);
81 result.hueValue = colorArray[0];
82 result.hueTolerance = colorArray[1];
83 result.saturationMin = colorArray[2];
84 result.saturationMax = colorArray[3];
85 result.valueMin = colorArray[4];
86 result.valueMax = colorArray[5];
97 BlobDetector(
int width,
int height)
98 : segmented(width, height, CByteImage::eGrayScale)
99 , temp(width, height, CByteImage::eGrayScale)
100 , hsv(width, height, CByteImage::eRGB24)
103 BlobDetector(BlobDetector
const&) =
delete;
104 BlobDetector& operator = (BlobDetector
const&) =
delete;
106 Object2DList findColoredRegions(CByteImage* inputImage,
int minPixelsPerRegion, std::vector<HSVSegmentationParams>& segmentationParams,
int dilationCount = 2)
108 ::ImageProcessor::CalculateHSVImage(inputImage, &hsv);
110 ::ImageProcessor::Zero(&segmented);
111 for (HSVSegmentationParams
const& segParam : segmentationParams)
113 ::ImageProcessor::FilterHSV(&hsv, &temp,
114 segParam.hueValue, segParam.hueTolerance,
115 segParam.saturationMin, segParam.saturationMax,
116 segParam.valueMin, segParam.valueMax);
117 ::ImageProcessor::Or(&segmented, &temp, &segmented);
120 for (
int i = 0; i < dilationCount / 2; ++i)
122 ::ImageProcessor::Erode(&segmented, &temp);
123 ::ImageProcessor::Dilate(&temp, &segmented);
125 for (
int i = 0; i < dilationCount / 2; ++i)
127 ::ImageProcessor::Dilate(&segmented, &temp);
128 ::ImageProcessor::Erode(&temp, &segmented);
130 if (dilationCount == 0)
132 ::ImageProcessor::Dilate(&segmented, &temp);
133 ::ImageProcessor::Dilate(&temp, &segmented);
134 ::ImageProcessor::Erode(&segmented, &temp);
135 ::ImageProcessor::CopyImage(&temp, &segmented);
139 ::ImageProcessor::FindRegions(&segmented, regions, minPixelsPerRegion);
141 Object2DList objects;
142 objects.reserve(regions.size());
144 for (MyRegion
const& region : regions)
146 Object2DEntry object;
148 object.region = region;
150 for (HSVSegmentationParams& seg : segmentationParams)
152 if (seg.minRatio > 0.0f)
156 int min_hue = (int) seg.hueValue - (
int) seg.hueTolerance;
157 int max_hue = (int) seg.hueValue + (
int) seg.hueTolerance;
169 for (
int y = region.min_y; y < region.max_y; ++y)
171 for (
int x = region.min_x; x < region.max_x; ++x)
173 int pixelIndex = y * hsv.width + x;
174 int h = hsv.pixels[pixelIndex * 3 + 0];
175 int s = hsv.pixels[pixelIndex * 3 + 1];
176 int v = hsv.pixels[pixelIndex * 3 + 2];
177 if (seg.saturationMin <=
s &&
s <= seg.saturationMax &&
178 seg.valueMin <=
v &&
v <= seg.valueMax)
180 if (max_hue >= min_hue)
182 if (min_hue <= h && h <= max_hue)
189 if (h <= max_hue || h >= min_hue)
199 float ratio =
float(count) / region.nPixels;
200 if (ratio < seg.minRatio)
202 ARMARX_VERBOSE <<
"Discarding region because ratio too small: " << ratio;
208 object.type = eCompactObject;
209 object.sName =
"CompactObject";
210 object.id = objectId++;
212 objects.push_back(
object);
217 CByteImage segmented;
222 void CropImageToTemplate(CByteImage* inputImage, MyRegion
const& region, CByteImage* outputImage)
224 int width = region.max_x - region.min_x + 1;
225 int height = region.max_y - region.min_y + 1;
226 int k = outputImage->width;
231 int new_height = int((k * height) /
float(width) + 0.5f);
232 CByteImage temp_image(k, new_height, CByteImage::eGrayScale);
234 ::ImageProcessor::Resize(inputImage, &temp_image, ®ion);
236 const int nPixels = k * new_height;
237 unsigned char* output = outputImage->pixels;
239 memcpy(output, temp_image.pixels, nPixels);
241 const int nTotalPixels = k * k;
243 for (
int i = nPixels; i < nTotalPixels; i++)
250 int new_width = int((k * width) /
float(height) + 0.5f);
251 CByteImage temp_image(new_width, k, CByteImage::eGrayScale);
253 ::ImageProcessor::Resize(inputImage, &temp_image, ®ion);
255 const unsigned char*
input = temp_image.pixels;
256 unsigned char* output = outputImage->pixels;
258 for (
int i = 0, offset = 0, offset2 = 0; i < k; i++)
262 for (j = 0; j < new_width; j++, offset++, offset2++)
264 output[offset] =
input[offset2];
267 for (j = new_width; j < k; j++, offset++)
278 SegmentableTemplateRecognition::SegmentableTemplateRecognition()
282 SegmentableTemplateRecognition::~SegmentableTemplateRecognition()
287 void SegmentableTemplateRecognition::onInitObjectLocalizerProcessor()
289 offeringTopic(getProperty<std::string>(
"DebugObserverName").getValue());
291 paramMinPixelsPerRegion = getProperty<int>(
"MinPixelsPerRegion").getValue();
294 void SegmentableTemplateRecognition::onExitObjectLocalizerProcessor()
301 std::vector<std::string> result;
304 namespace fs = std::filesystem;
306 fs::path apk_path(path);
307 fs::recursive_directory_iterator end;
309 for (fs::recursive_directory_iterator i(apk_path); i != end; ++i)
311 const fs::path cp = (*i);
312 result.push_back(cp.string());
318 bool SegmentableTemplateRecognition::initRecognizer()
322 Eigen::Vector3f minPoint = getProperty<Eigen::Vector3f>(
"MinPoint").getValue();
323 Eigen::Vector3f maxPoint = getProperty<Eigen::Vector3f>(
"MaxPoint").getValue();
325 Math3d::SetVec(validResultBoundingBoxMin, minPoint(0), minPoint(1), minPoint(2));
326 Math3d::SetVec(validResultBoundingBoxMax, maxPoint(0), maxPoint(1), maxPoint(2));
328 maxEpipolarDistance = getProperty<float>(
"MaxEpipolarDistance").getValue();
329 std::string colorParemeterFilename = getProperty<std::string>(
"ColorParameterFile").getValue();
331 paramTemplateMatchThreshold = getProperty<float>(
"TemplateMatchThreshold");
332 paramSizeRatioThreshold = getProperty<float>(
"SizeRatioThreshold");
333 paramCorrelationThreshold = getProperty<float>(
"CorrelationThreshold");
334 paramSingleInstance = getProperty<bool>(
"SingleInstance");
338 ARMARX_ERROR <<
"Could not find color parameter file in ArmarXDataPath: " << colorParemeterFilename;
341 templatePath = getProperty<std::string>(
"TemplatePath").getValue();
344 ARMARX_ERROR <<
"Could not find template path file in ArmarXDataPath: " << templatePath;
349 for (std::string
const& templateFile : templateFiles)
358 ARMARX_INFO <<
"Loading template: " << templateFile;
360 FILE* file = fopen(templateFile.c_str(),
"rb");
361 BOOST_SCOPE_EXIT((file))
365 BOOST_SCOPE_EXIT_END;
367 fseek(file, 0L, SEEK_END);
368 long fileSize = ftell(file);
369 fseek(file, 0L, SEEK_SET);
371 void*
memory =
operator new (fileSize);
375 ARMARX_WARNING <<
"File is too short for template header: " << templateFile
377 <<
", but got size: " << fileSize <<
")";
382 if (
read != fileSize)
384 ARMARX_WARNING <<
"Could not read template file completely (expected size: "
385 << fileSize <<
", but read: " <<
read <<
")";
389 std::string objectName = entry.
data->name;
392 std::string modelFilename = templatePath +
"/" + objectName +
".mat";
393 entry.
model.reset(
new CFloatMatrix());
394 if (!entry.
model->LoadFromFile(modelFilename.c_str()))
400 std::string colorString;
403 std::string colorFilename = templatePath +
"/" + objectName +
".color";
404 std::ifstream
input(colorFilename.c_str());
405 if (!(
input >> colorString))
410 ObjectColor color = CColorParameterSet::Translate(colorString.c_str());
419 ARMARX_INFO <<
"Inserting template into database: " << objectName
420 <<
", template count: " << entry.
data->templateCount
421 <<
", color: " << colorString <<
" (" << entry.
color <<
")";
422 database.emplace(objectName, std::move(entry));
425 if (!colorParameters.LoadFromFile(colorParemeterFilename.c_str()))
427 throw armarx::LocalException(
"Could not read color parameter file.");
431 ImageFormatInfo imageFormat = getImageFormat();
432 contextGL.reset(
new CGLContext());
433 contextGL->CreateContext(imageFormat.dimension.width, imageFormat.dimension.height);
434 contextGL->MakeCurrent();
436 m_pOpenGLVisualizer.reset(
new COpenGLVisualizer());
437 m_pOpenGLVisualizer->InitByCalibration(getStereoCalibration()->GetRightCalibration());
439 m_pObjectFinderStereo.reset(
new CObjectFinderStereo());
440 m_pObjectFinderStereo->SetColorParameterSet(&colorParameters);
441 m_pObjectFinderStereo->Init(getStereoCalibration());
443 ARMARX_INFO <<
"SegmentableTemplateRecognition is initialized";
450 std::string entityName = objectClassEntity->getName();
453 auto iter = database.find(entityName);
454 return iter != database.end();
457 memoryx::ObjectLocalizationResultList SegmentableTemplateRecognition::localizeObjectClasses(
const std::vector<std::string>& objectClassNames, CByteImage** cameraImages, armarx::MetaInfoSizeBasePtr imageMetaInfo, CByteImage** resultImages)
460 if (objectClassNames.size() < 1)
462 ARMARX_WARNING <<
"objectClassNames.size() = " << objectClassNames.size() <<
", something is wrong here! ";
464 memoryx::ObjectLocalizationResultList resultList;
470 contextGL->MakeCurrent();
472 Object3DList objectList;
474 for (std::string
const& className : objectClassNames)
476 Object3DList classObjectList;
478 int dilationCount = 2;
480 auto classEntryIter = database.find(className);
481 if (classEntryIter == database.end())
483 ARMARX_WARNING <<
"Could not find database entry for: " << className;
487 ObjectColor color = classEntry.
color;
489 std::vector<HSVSegmentationParams> hsvSegmentationParams;
491 if (color == eYellow3)
493 hsvSegmentationParams.push_back(getSegmentationParamsFromColor(colorParameters, eRed));
494 hsvSegmentationParams.push_back(getSegmentationParamsFromColor(colorParameters, eYellow));
495 hsvSegmentationParams.back().minRatio = 0.2f;
499 HSVSegmentationParams hsvSegmentationParam = getSegmentationParamsFromColor(colorParameters, color);
500 hsvSegmentationParams.push_back(hsvSegmentationParam);
504 CByteImage* leftInput = cameraImages[0];
505 CByteImage* rightInput = cameraImages[1];
506 int width = leftInput->width;
507 int height = leftInput->height;
510 CByteImage grayImageBig(width, height, CByteImage::eGrayScale);
511 ::ImageProcessor::ConvertImage(leftInput, &grayImageBig,
true);
513 BlobDetector leftBlobDetector(width, height);
514 BlobDetector rightBlobDetector(width, height);
516 Object2DList leftObjects = leftBlobDetector.findColoredRegions(leftInput, paramMinPixelsPerRegion, hsvSegmentationParams, dilationCount);
517 Object2DList rightObjects = rightBlobDetector.findColoredRegions(rightInput, paramMinPixelsPerRegion, hsvSegmentationParams, dilationCount);
519 for (Object2DEntry
const& entry : leftObjects)
521 ::PrimitivesDrawer::DrawRegion(resultImages[0], entry.region, 255, 0, 0, 2);
525 Object3DList blobList;
527 float maxEpipolarDistance = this->maxEpipolarDistance;
528 CStereoCalibration* stereoCalibration = getStereoCalibration();
529 bool inputImagesAreRectified =
false;
530 bool useDistortionParameters = !getImagesAreUndistorted();
531 float minZDistance = 500;
532 float maxZDistance = 3000;
534 for (Object2DEntry& leftObject : leftObjects)
536 Object2DEntry
const* bestMatch =
nullptr;
537 float bestDiff = maxEpipolarDistance;
538 for (Object2DEntry& rightObject : rightObjects)
540 float pixelRatio = leftObject.region.nPixels < rightObject.region.nPixels
541 ? (
float) leftObject.region.nPixels / rightObject.region.nPixels
542 : (
float) rightObject.region.nPixels / leftObject.region.nPixels;
543 float aspectRatioRatio = leftObject.region.ratio < rightObject.region.ratio
544 ? (
float) leftObject.region.ratio / rightObject.region.ratio
545 : (
float) rightObject.region.ratio / leftObject.region.ratio;
547 Vec2d leftCentroid = leftObject.region.centroid;
548 Vec2d rightCentroid = rightObject.region.centroid;
551 if (inputImagesAreRectified)
553 yDiff = fabsf(leftCentroid.y - rightCentroid.y);
557 yDiff = stereoCalibration->CalculateEpipolarLineInLeftImageDistance(leftCentroid, rightCentroid);
562 stereoCalibration->Calculate3DPoint(leftCentroid, rightCentroid, position,
563 inputImagesAreRectified, useDistortionParameters);
566 if (pixelRatio > 0.5f && aspectRatioRatio > 0.5f && yDiff <= maxEpipolarDistance &&
567 position.z >= minZDistance && position.z <= maxZDistance && yDiff <= bestDiff)
570 bestMatch = &rightObject;
577 Object2DEntry
const& rightObject = *bestMatch;
580 entry.region_left = leftObject.region;
581 entry.region_right = rightObject.region;
582 entry.region_id_left = leftObject.id;
583 entry.region_id_right = rightObject.id;
585 stereoCalibration->Calculate3DPoint(entry.region_left.centroid, entry.region_right.centroid,
586 entry.pose.translation, inputImagesAreRectified, useDistortionParameters);
588 entry.world_point = entry.pose.translation;
590 blobList.push_back(entry);
596 CByteImage* segmented = &leftBlobDetector.segmented;
597 CByteImage* resultImage = resultImages[0];
599 const unsigned char*
input = segmented->pixels;
600 unsigned char* output = resultImage->pixels;
601 int nPixels = segmented->width * segmented->height;
605 hsv2rgb(0, 255, 100, r, g, b);
607 for (
int i = 0; i < nPixels; i++)
611 const int offset = 3 * i;
612 output[offset + 0] = r;
613 output[offset + 1] = g;
614 output[offset + 2] = b;
622 CByteImage segmentedObject(width, height, CByteImage::eGrayScale);
623 ::ImageProcessor::ConvertImage(leftInput, &segmentedObject);
624 ::ImageProcessor::And(segmented, &segmentedObject, &segmentedObject);
628 for (Object3DEntry& blob : blobList)
630 const MyRegion& region = blob.region_left;
632 int length = SegmentableBitmapWidth;
633 CByteImage objectGray(length, length, CByteImage::eGrayScale);
638 CropImageToTemplate(&segmentedObject, region, &objectGray);
642 ::ImageProcessor::ThresholdBinarize(&objectGray, &objectGray, 1);
646 CByteImage*
input = &objectGray;
652 for (std::uint32_t i = 0; i < templateHeader.
templateCount; ++i)
658 int stride =
input->width;
660 for (
int y = 0; y < (int)SegmentableBitmapWidth; ++y)
662 std::uint32_t
const* templateRow =
template_.bitmap + y * 2;
663 unsigned char const* inputRow =
input->pixels + y * stride;
664 for (
int x = 0; x < (int)(SegmentableBitmapWidth / 32); ++x)
666 std::uint32_t compressed = templateRow[x];
668 for (
int i = 0; i < 32; ++i)
670 bool inputPixel = inputRow[x * 32 + i] > 0;
671 bool templatePixel = (compressed & (1 << i)) > 0;
672 if ((inputPixel && templatePixel) || (!inputPixel && !templatePixel))
681 if (overlap > bestOverlap)
683 bestOverlap = overlap;
684 bestTemplate = &template_;
688 float overlapRatio = 1.0f * bestOverlap / (SegmentableBitmapWidth * SegmentableBitmapWidth);
690 if (!bestTemplate || overlapRatio < paramTemplateMatchThreshold)
692 ARMARX_VERBOSE <<
"Could not find a matching template (overlap ratio: " << overlapRatio <<
")";
698 Transformation3d& poseData = blob.pose;
706 Math3d::SetRotationMatX(poseData.rotation, convention.x);
707 Math3d::SetRotationMatZ(temp, convention.z);
708 Math3d::MulMatMat(poseData.rotation, temp, poseData.rotation);
709 Math3d::SetRotationMatY(temp, convention.y);
710 Math3d::MulMatMat(poseData.rotation, temp, poseData.rotation);
714 Vec3d position = blob.pose.translation;
715 Mat3d orientation = blob.pose.rotation;
716 Mat3d& resultOrientation = blob.pose.rotation;
718 Vec3d u0 = { 0, 0, 1 };
720 Math3d::NormalizeVec(u);
723 Mat3d correctiveRotation;
725 Math3d::CrossProduct(u0, u, axis);
726 float angle = Math3d::Angle(u0, u, axis);
727 Math3d::SetRotationMatAxis(correctiveRotation, axis,
angle);
730 Math3d::MulMatMat(correctiveRotation, orientation, resultOrientation);
734 orientation = resultOrientation;
735 float resultCorrelation = 0.0f;
736 float resultSizeRatio = 0.0f;
737 CByteImage* initialMask = &objectGray;
738 int nSize = region.nPixels;
739 int nSimulatedSize = 0;
741 Vec3d initialPosition = position;
746 CStereoCalibration* stereoCalibration = getStereoCalibration();
747 const int width = stereoCalibration->width;
748 const int height = stereoCalibration->height;
750 Transformation3d pose;
751 Math3d::SetVec(pose.translation, position);
752 Math3d::SetMat(pose.rotation, orientation);
754 CByteImage image_left(width, height, CByteImage::eGrayScale);
755 CByteImage image_right(width, height, CByteImage::eGrayScale);
756 CByteImage* ppImages[2] = { &image_left, &image_right };
759 m_pOpenGLVisualizer->Clear();
760 m_pOpenGLVisualizer->ActivateShading(
false);
761 m_pOpenGLVisualizer->SetProjectionMatrix(stereoCalibration->GetLeftCalibration());
763 CFloatMatrix* model = database[className].model.get();
764 m_pOpenGLVisualizer->DrawObject(model, pose);
765 m_pOpenGLVisualizer->GetImage(&image_left);
766 ::ImageProcessor::FlipY(&image_left, &image_left);
769 m_pOpenGLVisualizer->Clear();
770 m_pOpenGLVisualizer->SetProjectionMatrix(stereoCalibration->GetRightCalibration());
772 m_pOpenGLVisualizer->DrawObject(model, pose);
773 m_pOpenGLVisualizer->GetImage(&image_right);
774 ::ImageProcessor::FlipY(&image_right, &image_right);
780 m_pObjectFinderStereo->ClearObjectList();
781 m_pObjectFinderStereo->FindObjectsInSegmentedImage(ppImages, 0, eRed, 100,
false);
782 m_pObjectFinderStereo->Finalize(0, 10000,
false, eRed, 10,
false);
783 const Object3DList& objectList = m_pObjectFinderStereo->GetObject3DList();
785 if (objectList.size() == 1)
787 Object3DEntry
const& simEntry = objectList[0];
789 Vec3d positionCorrection;
790 Math3d::SubtractVecVec(position, simEntry.pose.translation, positionCorrection);
791 Math3d::AddVecVec(initialPosition, positionCorrection, position);
795 nSimulatedSize = objectList.at(0).region_left.nPixels;
798 const int k = SegmentableBitmapWidth;
799 CByteImage simulatedObject(k, k, CByteImage::eGrayScale);
800 CropImageToTemplate(&image_left, simEntry.region_left, &simulatedObject);
803 for (std::size_t j = 0; j < SegmentableBitmapSize; j++)
805 sum +=
abs(simulatedObject.pixels[j] - initialMask->pixels[j]);
809 float divisor = 255.0f * SegmentableBitmapSize;
810 resultCorrelation = 1.0f - sum / divisor;
811 resultSizeRatio = nSimulatedSize < nSize ?
float(nSimulatedSize) / nSize :
float(nSize) / nSimulatedSize;
815 ARMARX_VERBOSE <<
"Could not find object '" << className <<
"' in simulated image";
819 if (resultSizeRatio < paramSizeRatioThreshold)
821 ARMARX_VERBOSE <<
"Detected blob size is too different in real image compared to simulated image ("
822 <<
"real size: " << nSize <<
", simulated size: " << nSimulatedSize <<
", ratio: " << resultSizeRatio
825 else if (resultCorrelation < paramCorrelationThreshold)
827 ARMARX_VERBOSE <<
"Detected blob does not correlate with the simulated blob ("
828 <<
"ratio: " << resultCorrelation <<
")";
832 blob.sName = className;
834 blob.quality = resultSizeRatio;
835 blob.quality2 = resultCorrelation;
836 blob.localizationValid =
true;
838 ARMARX_VERBOSE <<
"Found a match (overlap ratio: " << overlapRatio
839 <<
", size ratio: " << resultSizeRatio
840 <<
", correlation: " << resultCorrelation
841 <<
", blob size: " << nSize
844 classObjectList.push_back(blob);
848 if (classObjectList.size() > 1 && paramSingleInstance)
850 std::sort(classObjectList.begin(), classObjectList.end(), [](Object3DEntry
const & left, Object3DEntry
const & right)
852 return left.quality > right.quality;
854 Object3DEntry& bestMatch = classObjectList.front();
855 objectList.push_back(bestMatch);
859 objectList.insert(objectList.end(), classObjectList.begin(), classObjectList.end());
867 if (objectList.size() == 0)
872 for (std::string
const& className : objectClassNames)
874 CColorParameterSet::Translate(database[className].color, color);
880 visualizeResults(objectList, resultImages);
883 const auto agentName = getProperty<std::string>(
"AgentName").getValue();
885 memoryx::ObjectLocalizationResultList resultList;
887 for (Object3DList::iterator iter = objectList.begin() ; iter != objectList.end() ; iter++)
889 float x = iter->pose.translation.x;
890 float y = iter->pose.translation.y;
891 float z = iter->pose.translation.z;
893 if (seq.count(iter->sName))
899 seq[iter->sName] = 0;
903 mapValues[
"x"] =
new Variant(x);
904 mapValues[
"y"] =
new Variant(y);
905 mapValues[
"z"] =
new Variant(z);
906 mapValues[
"name"] =
new Variant(iter->sName);
907 mapValues[
"sequence"] =
new Variant(seq[iter->sName]);
908 mapValues[
"timestamp"] =
new Variant(imageMetaInfo->timeProvided / 1000.0 / 1000.0);
909 debugObserver->setDebugChannel(
"ObjectRecognition", mapValues);
912 if (x > validResultBoundingBoxMin.x && y > validResultBoundingBoxMin.y && z > validResultBoundingBoxMin.z &&
913 x < validResultBoundingBoxMax.x && y < validResultBoundingBoxMax.y && z < validResultBoundingBoxMax.z)
916 memoryx::ObjectLocalizationResult result;
919 Eigen::Vector3f position(iter->pose.translation.x, iter->pose.translation.y, iter->pose.translation.z);
921 orientation << iter->pose.rotation.r1, iter->pose.rotation.r2, iter->pose.rotation.r3,
922 iter->pose.rotation.r4, iter->pose.rotation.r5, iter->pose.rotation.r6,
923 iter->pose.rotation.r7, iter->pose.rotation.r8, iter->pose.rotation.r9;
930 result.positionNoise = calculateLocalizationUncertainty(iter->region_left.centroid, iter->region_right.centroid);
933 result.recognitionCertainty = 0.5f + 0.5f * calculateRecognitionCertainty(iter->sName, *iter);
934 result.objectClassName = iter->sName;
937 resultList.push_back(result);
941 ARMARX_VERBOSE <<
"Refused unrealistic localization at position: " << x <<
" " << y <<
" " << z;
945 ARMARX_VERBOSE <<
"Finished localizing " << objectClassNames.at(0);
952 float SegmentableTemplateRecognition::calculateRecognitionCertainty(
const std::string& objectClassName,
const Object3DEntry& entry)
954 float foundProb = entry.quality * entry.quality2;
955 float notFoundProb = (1 - entry.quality) * (1 - entry.quality2);
962 return foundProb / (foundProb + notFoundProb);
967 void SegmentableTemplateRecognition::visualizeResults(
const Object3DList& objectList, CByteImage** resultImages)
969 m_pOpenGLVisualizer->ActivateShading(
true);
970 glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
973 m_pOpenGLVisualizer->SetProjectionMatrix(getStereoCalibration()->GetRightCalibration());
975 m_pOpenGLVisualizer->Clear();
978 for (
int i = 0; i < (int) objectList.size(); i++)
980 const Object3DEntry& entry = objectList.at(i);
981 CFloatMatrix* model = database[entry.sName].model.get();
982 m_pOpenGLVisualizer->DrawObject(model, entry.pose);
985 const int nImageIndex = 1;
987 if (resultImages && resultImages[nImageIndex])
989 CByteImage tempImage(resultImages[nImageIndex]);
990 m_pOpenGLVisualizer->GetImage(&tempImage);
991 ::ImageProcessor::FlipY(&tempImage, &tempImage);
992 const int nBytes = 3 * tempImage.width * tempImage.height;
993 const unsigned char* pixels = tempImage.pixels;
994 unsigned char* output = resultImages[nImageIndex]->pixels;
996 for (
int i = 0; i < nBytes; i += 3)
1000 const unsigned char g = pixels[i];