35 #include <Helpers/helpers.h>
36 #include <Image/ByteImage.h>
37 #include <Image/ImageProcessor.h>
38 #include <Image/PrimitivesDrawer.h>
39 #include <Math/FloatMatrix.h>
49 #include <boost/scope_exit.hpp>
51 #include <SimoxUtility/algorithm/string/string_tools.h>
53 #include <gui/GLContext.h>
55 #pragma GCC diagnostic ignored "-Wmissing-field-initializers"
64 struct HSVSegmentationParams
73 float minRatio = 0.0f;
77 getSegmentationParamsFromColor(CColorParameterSet
const& colorParameters, ObjectColor color)
79 HSVSegmentationParams result = {};
80 int const* colorArray = colorParameters.GetColorParameters(color);
83 result.hueValue = colorArray[0];
84 result.hueTolerance = colorArray[1];
85 result.saturationMin = colorArray[2];
86 result.saturationMax = colorArray[3];
87 result.valueMin = colorArray[4];
88 result.valueMax = colorArray[5];
99 BlobDetector(
int width,
int height) :
100 segmented(width, height, CByteImage::eGrayScale),
101 temp(width, height, CByteImage::eGrayScale),
102 hsv(width, height, CByteImage::eRGB24)
106 BlobDetector(BlobDetector
const&) =
delete;
107 BlobDetector& operator=(BlobDetector
const&) =
delete;
110 findColoredRegions(CByteImage* inputImage,
111 int minPixelsPerRegion,
112 std::vector<HSVSegmentationParams>& segmentationParams,
113 int dilationCount = 2)
115 ::ImageProcessor::CalculateHSVImage(inputImage, &hsv);
117 ::ImageProcessor::Zero(&segmented);
118 for (HSVSegmentationParams
const& segParam : segmentationParams)
120 ::ImageProcessor::FilterHSV(&hsv,
123 segParam.hueTolerance,
124 segParam.saturationMin,
125 segParam.saturationMax,
128 ::ImageProcessor::Or(&segmented, &temp, &segmented);
131 for (
int i = 0; i < dilationCount / 2; ++i)
133 ::ImageProcessor::Erode(&segmented, &temp);
134 ::ImageProcessor::Dilate(&temp, &segmented);
136 for (
int i = 0; i < dilationCount / 2; ++i)
138 ::ImageProcessor::Dilate(&segmented, &temp);
139 ::ImageProcessor::Erode(&temp, &segmented);
141 if (dilationCount == 0)
143 ::ImageProcessor::Dilate(&segmented, &temp);
144 ::ImageProcessor::Dilate(&temp, &segmented);
145 ::ImageProcessor::Erode(&segmented, &temp);
146 ::ImageProcessor::CopyImage(&temp, &segmented);
150 ::ImageProcessor::FindRegions(&segmented, regions, minPixelsPerRegion);
152 Object2DList objects;
153 objects.reserve(regions.size());
155 for (MyRegion
const& region : regions)
157 Object2DEntry object;
159 object.region = region;
161 for (HSVSegmentationParams& seg : segmentationParams)
163 if (seg.minRatio > 0.0f)
167 int min_hue = (int)seg.hueValue - (
int)seg.hueTolerance;
168 int max_hue = (int)seg.hueValue + (
int)seg.hueTolerance;
180 for (
int y = region.min_y; y < region.max_y; ++y)
182 for (
int x = region.min_x;
x < region.max_x; ++
x)
184 int pixelIndex = y * hsv.width +
x;
185 int h = hsv.pixels[pixelIndex * 3 + 0];
186 int s = hsv.pixels[pixelIndex * 3 + 1];
187 int v = hsv.pixels[pixelIndex * 3 + 2];
188 if (seg.saturationMin <=
s &&
s <= seg.saturationMax &&
189 seg.valueMin <=
v &&
v <= seg.valueMax)
191 if (max_hue >= min_hue)
193 if (min_hue <= h && h <= max_hue)
200 if (h <= max_hue || h >= min_hue)
209 float ratio =
float(count) / region.nPixels;
210 if (ratio < seg.minRatio)
219 object.type = eCompactObject;
220 object.sName =
"CompactObject";
221 object.id = objectId++;
223 objects.push_back(
object);
228 CByteImage segmented;
234 CropImageToTemplate(CByteImage* inputImage, MyRegion
const& region, CByteImage* outputImage)
236 int width = region.max_x - region.min_x + 1;
237 int height = region.max_y - region.min_y + 1;
238 int k = outputImage->width;
243 int new_height = int((k * height) /
float(width) + 0.5f);
244 CByteImage temp_image(k, new_height, CByteImage::eGrayScale);
246 ::ImageProcessor::Resize(inputImage, &temp_image, ®ion);
248 const int nPixels = k * new_height;
249 unsigned char* output = outputImage->pixels;
251 memcpy(output, temp_image.pixels, nPixels);
253 const int nTotalPixels = k * k;
255 for (
int i = nPixels; i < nTotalPixels; i++)
262 int new_width = int((k * width) /
float(height) + 0.5f);
263 CByteImage temp_image(new_width, k, CByteImage::eGrayScale);
265 ::ImageProcessor::Resize(inputImage, &temp_image, ®ion);
267 const unsigned char*
input = temp_image.pixels;
268 unsigned char* output = outputImage->pixels;
270 for (
int i = 0, offset = 0, offset2 = 0; i < k; i++)
274 for (j = 0; j < new_width; j++, offset++, offset2++)
276 output[offset] =
input[offset2];
279 for (j = new_width; j < k; j++, offset++)
289 SegmentableTemplateRecognition::SegmentableTemplateRecognition()
293 SegmentableTemplateRecognition::~SegmentableTemplateRecognition()
298 SegmentableTemplateRecognition::onInitObjectLocalizerProcessor()
300 offeringTopic(getProperty<std::string>(
"DebugObserverName").getValue());
302 paramMinPixelsPerRegion = getProperty<int>(
"MinPixelsPerRegion").getValue();
306 SegmentableTemplateRecognition::onExitObjectLocalizerProcessor()
310 std::vector<std::string>
313 std::vector<std::string> result;
316 namespace fs = std::filesystem;
318 fs::path apk_path(path);
319 fs::recursive_directory_iterator end;
321 for (fs::recursive_directory_iterator i(apk_path); i != end; ++i)
323 const fs::path cp = (*i);
324 result.push_back(cp.string());
331 SegmentableTemplateRecognition::initRecognizer()
335 Eigen::Vector3f minPoint = getProperty<Eigen::Vector3f>(
"MinPoint").getValue();
336 Eigen::Vector3f maxPoint = getProperty<Eigen::Vector3f>(
"MaxPoint").getValue();
338 Math3d::SetVec(validResultBoundingBoxMin, minPoint(0), minPoint(1), minPoint(2));
339 Math3d::SetVec(validResultBoundingBoxMax, maxPoint(0), maxPoint(1), maxPoint(2));
341 maxEpipolarDistance = getProperty<float>(
"MaxEpipolarDistance").getValue();
342 std::string colorParemeterFilename = getProperty<std::string>(
"ColorParameterFile").getValue();
344 paramTemplateMatchThreshold = getProperty<float>(
"TemplateMatchThreshold");
345 paramSizeRatioThreshold = getProperty<float>(
"SizeRatioThreshold");
346 paramCorrelationThreshold = getProperty<float>(
"CorrelationThreshold");
347 paramSingleInstance = getProperty<bool>(
"SingleInstance");
351 ARMARX_ERROR <<
"Could not find color parameter file in ArmarXDataPath: "
352 << colorParemeterFilename;
355 templatePath = getProperty<std::string>(
"TemplatePath").getValue();
358 ARMARX_ERROR <<
"Could not find template path file in ArmarXDataPath: " << templatePath;
363 for (std::string
const& templateFile : templateFiles)
372 ARMARX_INFO <<
"Loading template: " << templateFile;
374 FILE* file = fopen(templateFile.c_str(),
"rb");
375 BOOST_SCOPE_EXIT((file))
379 BOOST_SCOPE_EXIT_END;
381 fseek(file, 0L, SEEK_END);
382 long fileSize = ftell(file);
383 fseek(file, 0L, SEEK_SET);
385 void*
memory =
operator new(fileSize);
389 ARMARX_WARNING <<
"File is too short for template header: " << templateFile
391 <<
", but got size: " << fileSize <<
")";
396 if (
read != fileSize)
398 ARMARX_WARNING <<
"Could not read template file completely (expected size: " << fileSize
399 <<
", but read: " <<
read <<
")";
403 std::string objectName = entry.
data->name;
406 std::string modelFilename = templatePath +
"/" + objectName +
".mat";
407 entry.
model.reset(
new CFloatMatrix());
408 if (!entry.
model->LoadFromFile(modelFilename.c_str()))
414 std::string colorString;
417 std::string colorFilename = templatePath +
"/" + objectName +
".color";
418 std::ifstream
input(colorFilename.c_str());
419 if (!(
input >> colorString))
424 ObjectColor color = CColorParameterSet::Translate(colorString.c_str());
433 ARMARX_INFO <<
"Inserting template into database: " << objectName
434 <<
", template count: " << entry.
data->templateCount
435 <<
", color: " << colorString <<
" (" << entry.
color <<
")";
436 database.emplace(objectName, std::move(entry));
439 if (!colorParameters.LoadFromFile(colorParemeterFilename.c_str()))
441 throw armarx::LocalException(
"Could not read color parameter file.");
445 ImageFormatInfo imageFormat = getImageFormat();
446 contextGL.reset(
new CGLContext());
447 contextGL->CreateContext(imageFormat.dimension.width, imageFormat.dimension.height);
448 contextGL->MakeCurrent();
450 m_pOpenGLVisualizer.reset(
new COpenGLVisualizer());
451 m_pOpenGLVisualizer->InitByCalibration(getStereoCalibration()->GetRightCalibration());
453 m_pObjectFinderStereo.reset(
new CObjectFinderStereo());
454 m_pObjectFinderStereo->SetColorParameterSet(&colorParameters);
455 m_pObjectFinderStereo->Init(getStereoCalibration());
457 ARMARX_INFO <<
"SegmentableTemplateRecognition is initialized";
466 std::string entityName = objectClassEntity->getName();
469 auto iter = database.find(entityName);
470 return iter != database.end();
473 memoryx::ObjectLocalizationResultList
474 SegmentableTemplateRecognition::localizeObjectClasses(
475 const std::vector<std::string>& objectClassNames,
476 CByteImage** cameraImages,
477 armarx::MetaInfoSizeBasePtr imageMetaInfo,
478 CByteImage** resultImages)
481 if (objectClassNames.size() < 1)
483 ARMARX_WARNING <<
"objectClassNames.size() = " << objectClassNames.size()
484 <<
", something is wrong here! ";
486 memoryx::ObjectLocalizationResultList resultList;
492 contextGL->MakeCurrent();
494 Object3DList objectList;
496 for (std::string
const& className : objectClassNames)
498 Object3DList classObjectList;
500 int dilationCount = 2;
502 auto classEntryIter = database.find(className);
503 if (classEntryIter == database.end())
505 ARMARX_WARNING <<
"Could not find database entry for: " << className;
509 ObjectColor color = classEntry.
color;
511 std::vector<HSVSegmentationParams> hsvSegmentationParams;
513 if (color == eYellow3)
515 hsvSegmentationParams.push_back(getSegmentationParamsFromColor(colorParameters, eRed));
516 hsvSegmentationParams.push_back(
517 getSegmentationParamsFromColor(colorParameters, eYellow));
518 hsvSegmentationParams.back().minRatio = 0.2f;
522 HSVSegmentationParams hsvSegmentationParam =
523 getSegmentationParamsFromColor(colorParameters, color);
524 hsvSegmentationParams.push_back(hsvSegmentationParam);
528 CByteImage* leftInput = cameraImages[0];
529 CByteImage* rightInput = cameraImages[1];
530 int width = leftInput->width;
531 int height = leftInput->height;
534 CByteImage grayImageBig(width, height, CByteImage::eGrayScale);
535 ::ImageProcessor::ConvertImage(leftInput, &grayImageBig,
true);
537 BlobDetector leftBlobDetector(width, height);
538 BlobDetector rightBlobDetector(width, height);
540 Object2DList leftObjects = leftBlobDetector.findColoredRegions(
541 leftInput, paramMinPixelsPerRegion, hsvSegmentationParams, dilationCount);
542 Object2DList rightObjects = rightBlobDetector.findColoredRegions(
543 rightInput, paramMinPixelsPerRegion, hsvSegmentationParams, dilationCount);
545 for (Object2DEntry
const& entry : leftObjects)
547 ::PrimitivesDrawer::DrawRegion(resultImages[0], entry.region, 255, 0, 0, 2);
551 Object3DList blobList;
553 float maxEpipolarDistance = this->maxEpipolarDistance;
554 CStereoCalibration* stereoCalibration = getStereoCalibration();
555 bool inputImagesAreRectified =
false;
556 bool useDistortionParameters = !getImagesAreUndistorted();
557 float minZDistance = 500;
558 float maxZDistance = 3000;
560 for (Object2DEntry& leftObject : leftObjects)
562 Object2DEntry
const* bestMatch =
nullptr;
563 float bestDiff = maxEpipolarDistance;
564 for (Object2DEntry& rightObject : rightObjects)
567 leftObject.region.nPixels < rightObject.region.nPixels
568 ? (
float)leftObject.region.nPixels / rightObject.region.nPixels
569 : (
float)rightObject.region.nPixels / leftObject.region.nPixels;
570 float aspectRatioRatio =
571 leftObject.region.ratio < rightObject.region.ratio
572 ? (
float)leftObject.region.ratio / rightObject.region.ratio
573 : (
float)rightObject.region.ratio / leftObject.region.ratio;
575 Vec2d leftCentroid = leftObject.region.centroid;
576 Vec2d rightCentroid = rightObject.region.centroid;
579 if (inputImagesAreRectified)
581 yDiff = fabsf(leftCentroid.y - rightCentroid.y);
585 yDiff = stereoCalibration->CalculateEpipolarLineInLeftImageDistance(
586 leftCentroid, rightCentroid);
591 stereoCalibration->Calculate3DPoint(leftCentroid,
594 inputImagesAreRectified,
595 useDistortionParameters);
598 if (pixelRatio > 0.5f && aspectRatioRatio > 0.5f &&
599 yDiff <= maxEpipolarDistance && position.z >= minZDistance &&
600 position.z <= maxZDistance && yDiff <= bestDiff)
603 bestMatch = &rightObject;
610 Object2DEntry
const& rightObject = *bestMatch;
613 entry.region_left = leftObject.region;
614 entry.region_right = rightObject.region;
615 entry.region_id_left = leftObject.id;
616 entry.region_id_right = rightObject.id;
618 stereoCalibration->Calculate3DPoint(entry.region_left.centroid,
619 entry.region_right.centroid,
620 entry.pose.translation,
621 inputImagesAreRectified,
622 useDistortionParameters);
624 entry.world_point = entry.pose.translation;
626 blobList.push_back(entry);
632 CByteImage* segmented = &leftBlobDetector.segmented;
633 CByteImage* resultImage = resultImages[0];
635 const unsigned char*
input = segmented->pixels;
636 unsigned char* output = resultImage->pixels;
637 int nPixels = segmented->width * segmented->height;
641 hsv2rgb(0, 255, 100, r, g, b);
643 for (
int i = 0; i < nPixels; i++)
647 const int offset = 3 * i;
648 output[offset + 0] = r;
649 output[offset + 1] = g;
650 output[offset + 2] = b;
658 CByteImage segmentedObject(width, height, CByteImage::eGrayScale);
659 ::ImageProcessor::ConvertImage(leftInput, &segmentedObject);
660 ::ImageProcessor::And(segmented, &segmentedObject, &segmentedObject);
664 for (Object3DEntry& blob : blobList)
666 const MyRegion& region = blob.region_left;
668 int length = SegmentableBitmapWidth;
669 CByteImage objectGray(length, length, CByteImage::eGrayScale);
674 CropImageToTemplate(&segmentedObject, region, &objectGray);
678 ::ImageProcessor::ThresholdBinarize(&objectGray, &objectGray, 1);
682 CByteImage*
input = &objectGray;
688 for (std::uint32_t i = 0; i < templateHeader.
templateCount; ++i)
694 int stride =
input->width;
696 for (
int y = 0; y < (int)SegmentableBitmapWidth; ++y)
698 std::uint32_t
const* templateRow =
template_.bitmap + y * 2;
699 unsigned char const* inputRow =
input->pixels + y * stride;
700 for (
int x = 0;
x < (int)(SegmentableBitmapWidth / 32); ++
x)
702 std::uint32_t compressed = templateRow[
x];
704 for (
int i = 0; i < 32; ++i)
706 bool inputPixel = inputRow[
x * 32 + i] > 0;
707 bool templatePixel = (compressed & (1 << i)) > 0;
708 if ((inputPixel && templatePixel) ||
709 (!inputPixel && !templatePixel))
718 if (overlap > bestOverlap)
720 bestOverlap = overlap;
721 bestTemplate = &template_;
726 1.0f * bestOverlap / (SegmentableBitmapWidth * SegmentableBitmapWidth);
728 if (!bestTemplate || overlapRatio < paramTemplateMatchThreshold)
731 <<
"Could not find a matching template (overlap ratio: " << overlapRatio
738 Transformation3d& poseData = blob.pose;
746 Math3d::SetRotationMatX(poseData.rotation, convention.x);
747 Math3d::SetRotationMatZ(temp, convention.z);
748 Math3d::MulMatMat(poseData.rotation, temp, poseData.rotation);
749 Math3d::SetRotationMatY(temp, convention.y);
750 Math3d::MulMatMat(poseData.rotation, temp, poseData.rotation);
754 Vec3d position = blob.pose.translation;
755 Mat3d orientation = blob.pose.rotation;
756 Mat3d& resultOrientation = blob.pose.rotation;
758 Vec3d u0 = {0, 0, 1};
760 Math3d::NormalizeVec(u);
763 Mat3d correctiveRotation;
765 Math3d::CrossProduct(u0, u, axis);
766 float angle = Math3d::Angle(u0, u, axis);
767 Math3d::SetRotationMatAxis(correctiveRotation, axis,
angle);
770 Math3d::MulMatMat(correctiveRotation, orientation, resultOrientation);
774 orientation = resultOrientation;
775 float resultCorrelation = 0.0f;
776 float resultSizeRatio = 0.0f;
777 CByteImage* initialMask = &objectGray;
778 int nSize = region.nPixels;
779 int nSimulatedSize = 0;
781 Vec3d initialPosition = position;
786 CStereoCalibration* stereoCalibration = getStereoCalibration();
787 const int width = stereoCalibration->width;
788 const int height = stereoCalibration->height;
790 Transformation3d pose;
791 Math3d::SetVec(pose.translation, position);
792 Math3d::SetMat(pose.rotation, orientation);
794 CByteImage image_left(width, height, CByteImage::eGrayScale);
795 CByteImage image_right(width, height, CByteImage::eGrayScale);
796 CByteImage* ppImages[2] = {&image_left, &image_right};
799 m_pOpenGLVisualizer->Clear();
800 m_pOpenGLVisualizer->ActivateShading(
false);
801 m_pOpenGLVisualizer->SetProjectionMatrix(
802 stereoCalibration->GetLeftCalibration());
804 CFloatMatrix* model = database[className].model.get();
805 m_pOpenGLVisualizer->DrawObject(model, pose);
806 m_pOpenGLVisualizer->GetImage(&image_left);
807 ::ImageProcessor::FlipY(&image_left, &image_left);
810 m_pOpenGLVisualizer->Clear();
811 m_pOpenGLVisualizer->SetProjectionMatrix(
812 stereoCalibration->GetRightCalibration());
814 m_pOpenGLVisualizer->DrawObject(model, pose);
815 m_pOpenGLVisualizer->GetImage(&image_right);
816 ::ImageProcessor::FlipY(&image_right, &image_right);
822 m_pObjectFinderStereo->ClearObjectList();
823 m_pObjectFinderStereo->FindObjectsInSegmentedImage(
824 ppImages, 0, eRed, 100,
false);
825 m_pObjectFinderStereo->Finalize(0, 10000,
false, eRed, 10,
false);
826 const Object3DList& objectList = m_pObjectFinderStereo->GetObject3DList();
828 if (objectList.size() == 1)
830 Object3DEntry
const& simEntry = objectList[0];
832 Vec3d positionCorrection;
833 Math3d::SubtractVecVec(
834 position, simEntry.pose.translation, positionCorrection);
835 Math3d::AddVecVec(initialPosition, positionCorrection, position);
839 nSimulatedSize = objectList.at(0).region_left.nPixels;
842 const int k = SegmentableBitmapWidth;
843 CByteImage simulatedObject(k, k, CByteImage::eGrayScale);
845 &image_left, simEntry.region_left, &simulatedObject);
848 for (std::size_t j = 0; j < SegmentableBitmapSize; j++)
850 sum +=
abs(simulatedObject.pixels[j] - initialMask->pixels[j]);
854 float divisor = 255.0f * SegmentableBitmapSize;
855 resultCorrelation = 1.0f - sum / divisor;
856 resultSizeRatio = nSimulatedSize < nSize
857 ?
float(nSimulatedSize) / nSize
858 :
float(nSize) / nSimulatedSize;
863 <<
"' in simulated image";
867 if (resultSizeRatio < paramSizeRatioThreshold)
869 ARMARX_VERBOSE <<
"Detected blob size is too different in real image "
870 "compared to simulated image ("
871 <<
"real size: " << nSize
872 <<
", simulated size: " << nSimulatedSize
873 <<
", ratio: " << resultSizeRatio <<
")";
875 else if (resultCorrelation < paramCorrelationThreshold)
878 <<
"Detected blob does not correlate with the simulated blob ("
879 <<
"ratio: " << resultCorrelation <<
")";
883 blob.sName = className;
885 blob.quality = resultSizeRatio;
886 blob.quality2 = resultCorrelation;
887 blob.localizationValid =
true;
889 ARMARX_VERBOSE <<
"Found a match (overlap ratio: " << overlapRatio
890 <<
", size ratio: " << resultSizeRatio
891 <<
", correlation: " << resultCorrelation
892 <<
", blob size: " << nSize <<
")";
894 classObjectList.push_back(blob);
898 if (classObjectList.size() > 1 && paramSingleInstance)
900 std::sort(classObjectList.begin(),
901 classObjectList.end(),
902 [](Object3DEntry
const&
left, Object3DEntry
const&
right)
903 { return left.quality > right.quality; });
904 Object3DEntry& bestMatch = classObjectList.front();
905 objectList.push_back(bestMatch);
909 objectList.insert(objectList.end(), classObjectList.begin(), classObjectList.end());
917 if (objectList.size() == 0)
922 for (std::string
const& className : objectClassNames)
924 CColorParameterSet::Translate(database[className].color, color);
930 visualizeResults(objectList, resultImages);
933 const auto agentName = getProperty<std::string>(
"AgentName").getValue();
935 memoryx::ObjectLocalizationResultList resultList;
937 for (Object3DList::iterator iter = objectList.begin(); iter != objectList.end(); iter++)
939 float x = iter->pose.translation.x;
940 float y = iter->pose.translation.y;
941 float z = iter->pose.translation.z;
943 if (seq.count(iter->sName))
949 seq[iter->sName] = 0;
954 mapValues[
"y"] =
new Variant(y);
955 mapValues[
"z"] =
new Variant(z);
956 mapValues[
"name"] =
new Variant(iter->sName);
957 mapValues[
"sequence"] =
new Variant(seq[iter->sName]);
958 mapValues[
"timestamp"] =
new Variant(imageMetaInfo->timeProvided / 1000.0 / 1000.0);
959 debugObserver->setDebugChannel(
"ObjectRecognition", mapValues);
962 if (
x > validResultBoundingBoxMin.x && y > validResultBoundingBoxMin.y &&
963 z > validResultBoundingBoxMin.z &&
x < validResultBoundingBoxMax.x &&
964 y < validResultBoundingBoxMax.y && z < validResultBoundingBoxMax.z)
967 memoryx::ObjectLocalizationResult result;
970 Eigen::Vector3f position(
971 iter->pose.translation.x, iter->pose.translation.y, iter->pose.translation.z);
973 orientation << iter->pose.rotation.r1, iter->pose.rotation.r2, iter->pose.rotation.r3,
974 iter->pose.rotation.r4, iter->pose.rotation.r5, iter->pose.rotation.r6,
975 iter->pose.rotation.r7, iter->pose.rotation.r8, iter->pose.rotation.r9;
983 result.positionNoise = calculateLocalizationUncertainty(iter->region_left.centroid,
984 iter->region_right.centroid);
987 result.recognitionCertainty =
988 0.5f + 0.5f * calculateRecognitionCertainty(iter->sName, *iter);
989 result.objectClassName = iter->sName;
992 resultList.push_back(result);
996 ARMARX_VERBOSE <<
"Refused unrealistic localization at position: " <<
x <<
" " << y
1001 ARMARX_VERBOSE <<
"Finished localizing " << objectClassNames.at(0);
1007 SegmentableTemplateRecognition::calculateRecognitionCertainty(
const std::string& objectClassName,
1008 const Object3DEntry& entry)
1010 float foundProb = entry.quality * entry.quality2;
1011 float notFoundProb = (1 - entry.quality) * (1 - entry.quality2);
1018 return foundProb / (foundProb + notFoundProb);
1022 SegmentableTemplateRecognition::visualizeResults(
const Object3DList& objectList,
1023 CByteImage** resultImages)
1025 m_pOpenGLVisualizer->ActivateShading(
true);
1026 glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
1029 m_pOpenGLVisualizer->SetProjectionMatrix(getStereoCalibration()->GetRightCalibration());
1031 m_pOpenGLVisualizer->Clear();
1034 for (
int i = 0; i < (int)objectList.size(); i++)
1036 const Object3DEntry& entry = objectList.at(i);
1037 CFloatMatrix* model = database[entry.sName].model.get();
1038 m_pOpenGLVisualizer->DrawObject(model, entry.pose);
1041 const int nImageIndex = 1;
1043 if (resultImages && resultImages[nImageIndex])
1045 CByteImage tempImage(resultImages[nImageIndex]);
1046 m_pOpenGLVisualizer->GetImage(&tempImage);
1047 ::ImageProcessor::FlipY(&tempImage, &tempImage);
1048 const int nBytes = 3 * tempImage.width * tempImage.height;
1049 const unsigned char* pixels = tempImage.pixels;
1050 unsigned char* output = resultImages[nImageIndex]->pixels;
1052 for (
int i = 0; i < nBytes; i += 3)
1056 const unsigned char g = pixels[i];