37 #include <RobotAPI/interface/visualization/DebugDrawerInterface.h>
42 #include <MemoryX/interface/observers/ObjectMemoryObserverInterface.h>
46 #include <VisionX/interface/components/ObjectLearningByPushing.h>
47 #include <VisionX/interface/components/PointCloudAndImageAndCalibrationProviderInterface.h>
48 #include <VisionX/interface/components/RGBDImageProvider.h>
52 #include <Math/Math3d.h>
53 #include <Math/Math2d.h>
65 class CStereoCalibration;
70 class CommunicationWithRobotArmarX;
86 defineOptionalProperty<std::string>(
"ImageProviderAdapterName",
"Armar3ImageProvider",
"Ice Adapter name of the image provider");
87 defineOptionalProperty<std::string>(
"PointCloudProviderAdapterName",
"PointCloudProvider",
"Ice Adapter name of the point cloud provider");
88 defineOptionalProperty<std::string>(
"RobotStateProxyName",
"RobotStateComponent",
"Ice Adapter name of the robot state proxy");
89 defineOptionalProperty<std::string>(
"CameraFrameName",
"EyeLeftCamera",
"Name of the robot state frame of the primary camera");
91 defineOptionalProperty<std::string>(
"DebugDrawerTopicName",
"DebugDrawerUpdates",
"name of DebugDrawer topic");
113 virtual public visionx::ObjectLearningByPushingInterface
122 return "ObjectLearningByPushing";
148 visionx::types::PointList
getScenePoints(const ::Ice::Current&
c = Ice::emptyCurrent)
override;
151 armarx::Vector3BasePtr
getUpwardsVector(const ::Ice::Current&
c = Ice::emptyCurrent)
override;
165 void recognizeObject(
const std::string& objectName, const ::Ice::Current&
c = Ice::emptyCurrent)
override;
198 void CreateInitialObjectHypothesesInternal(CByteImage* pImageGreyLeft, CByteImage* pImageGreyRight, CByteImage* pImageColorLeft, CByteImage* pImageColorRight,
int nImageNumber);
200 bool ValidateInitialObjectHypothesesInternal(CByteImage* pImageGreyLeft, CByteImage* pImageGreyRight, CByteImage* pImageColorLeft, CByteImage* pImageColorRight,
int nImageNumber);
202 bool RevalidateConfirmedObjectHypothesesInternal(CByteImage* pImageGreyLeft, CByteImage* pImageGreyRight, CByteImage* pImageColorLeft, CByteImage* pImageColorRight,
int nImageNumber);
206 bool SaveHistogramOfConfirmedHypothesis(std::string sObjectName,
int nDescriptorNumber = 0);
207 void RecognizeHypotheses(CByteImage* pImageColorLeft,
const std::string objectName =
"");
210 void VisualizeHypotheses(CByteImage* pImageGreyLeft, CByteImage* pImageGreyRight, CByteImage* pImageColorLeft, CByteImage* pImageColorRight,
bool bShowConfirmedHypotheses,
211 CByteImage* pResultImageLeft = NULL, CByteImage* pResultImageRight = NULL,
bool bMakeScreenshot =
false);
212 void RefreshVisualization(
bool bConfirmedHypotheses)
218 void UpdateDataFromRobot();
220 void ApplyHeadMotionTransformation(Mat3d mRotation,
Vec3d vTranslation);
222 void SetHeadToPlatformTransformation(
Vec3d vTranslation, Mat3d mRotation,
bool bResetOldTransformation =
false);
225 int GetNumberOfNonconfirmedHypotheses()
227 return m_pObjectHypotheses->GetSize();
231 Vec3d GetObjectPosition(
float& fObjectExtent,
bool bPreferCentralObject =
true);
233 void GetHypothesisBoundingBox(
int& nMinX,
int& nMaxX,
int& nMinY,
int& nMaxY);
235 void GetHypothesisPrincipalAxesAndBoundingBox(
Vec3d& vPrincipalAxis1,
Vec3d& vPrincipalAxis2,
Vec3d& vPrincipalAxis3,
236 Vec3d& vEigenValues,
Vec3d& vMaxExtentFromCenter,
239 void ReportObjectPositionInformationToObserver();
242 void LoadAndFuseObjectSegmentations(std::string sObjectName);
245 void SwapAllPointsArraysToOld();
248 CObjectHypothesis* SelectPreferredHypothesis(std::vector<CHypothesisPoint*>*& pPoints,
const bool bPreferCentralObject =
true);
251 void convertFileOLPtoPCL(std::string
filename,
bool includeCandidatePoints =
false);
253 void BoundingBoxInForegroundImage(CByteImage* image,
int minX,
int maxX,
int minY,
int maxY);
258 std::string imageProviderName, pointcloudProviderName, robotStateProxyName, cameraFrameName;
260 ImageProviderInterfacePrx imageProviderProxy;
262 RGBDPointCloudProviderInterfacePrx pointcloudProviderProxy;
263 bool connected =
false;
266 ObjectLearningByPushingListenerPrx listener;
275 CCalibration* calibration;
277 bool m_bMakeIntermediateScreenshots;
278 std::string m_sScreenshotPath;
280 CByteImage* colorImageLeft, *colorImageRight, *greyImageLeft, *greyImageRight, *colorImageLeftOld, *resultImageLeft, *resultImageRight, *tempResultImage;
281 CByteImage** cameraImages, ** resultImages;
283 pcl::PointCloud<pcl::PointXYZRGBA>::Ptr pointCloudPtr;
285 Transformation3d tHeadToPlatformTransformation;
287 int iterationCounter;
293 std::vector<CMSERDescriptor3D*>* m_pAllNewMSERs;
294 std::vector<CMSERDescriptor3D*>* m_pAllOldMSERs;
295 std::vector<CMSERDescriptor3D*>* m_pCorrespondingMSERs;
296 std::vector<CHypothesisPoint*>* m_pAllNewDepthMapPoints;
297 std::vector<CHypothesisPoint*>* m_pAllOldDepthMapPoints;
301 Transformation3d m_tHeadToPlatformTransformation, m_tHeadToPlatformTransformationOld;
305 CByteImage* m_pSegmentedBackgroundImage;
306 CByteImage* m_pDisparityImage;
308 std::mutex processingLock;