24#include <VirtualRobot/Robot.h>
30#include <RobotAPI/interface/visualization/DebugDrawerInterface.h>
33#include <VisionX/components/image_processor/OpenPoseEstimation/ImageKeypointBuffer.h>
34#include <VisionX/components/image_processor/OpenPoseEstimation/KeypointManager.h>
35#include <VisionX/components/image_processor/OpenPoseEstimation/Util.h>
37#include <VisionX/interface/components/OpenPoseEstimationInterface.h>
38#include <VisionX/interface/components/PointCloudAndImageAndCalibrationProviderInterface.h>
41#include <Calibration/Calibration.h>
42#include <Calibration/StereoCalibration.h>
45#include <openpose/core/headers.hpp>
46#include <openpose/pose/headers.hpp>
47#include <openpose/utilities/headers.hpp>
76 "OpenPoseEstimation2D");
78 "OpenPoseEstimation3D");
82 "Radius of the circle around a target pixel in the depth image, which is used to "
83 "calculate the median around that pixel.");
86 "Whether to use distortion parameters when transforming "
87 "image coordinates into world coordinates");
90 "Name of the ImageProviderComponent");
92 "DebugDrawerTopicName",
94 "Name of the debug drawer topic that should be used");
99 "Sets the mode of this instance of OpenPoseEstiamtion. There are currently three "
101 "'FromDepthImage', which lets the image processor expect a rgb-image and a "
103 "'FromStereoImage', which lets the image processor expect two images as a stereo "
105 "which will be used for calculating the 3D-values and"
106 "'FromTopic', which lets the image processor listen on a topic which provides "
107 "openpose-keypoints.")
108 .setMatchRegex(
"FromDepthImage|FromStereoImage|FromTopic")
116 "Multiples of 16. If it is increased, the accuracy potentially increases. If it is "
117 "decreased, the speed increases. For maximum speed-accuracy balance, it should "
119 "closest aspect ratio possible to the images or videos to be processed.\n Using "
121 "any of the dimensions, OP will choose the optimal aspect ratio depending on the "
123 "input value.\n E.g. the default `-1x368` is equivalent to `656x368` in 16:9 "
125 "e.g. full HD (1980x1080) and HD (1280x720) resolutions.");
128 "The image resolution (display and output). Use "
129 "\"-1x-1\" to force the program to use the"
130 " input image resolution.");
133 "Scale gap between scales. No effect unless "
134 "scale_number > 1. Initial scale is always 1. "
135 "If you want to change the initial calib->get scale, "
136 "you actually want to multiply the "
137 "`net_resolution` by your desired initial scale.");
142 "Model to be used. E.g. `BODY_25` (25 keypoints, best model), `COCO` (18 "
143 "keypoints), `MPI` (15 keypoints, ~10% faster), "
144 "MPI_4_layers` (15 keypoints, even faster but less accurate).");
147 "Folder path (absolute or relative) where the "
148 "models (pose, face, ...) are located.");
152 "Only estimated keypoints whose score confidences are "
153 "higher than this threshold will be"
154 " rendered.\n Generally, a high threshold (> 0.5) will "
155 "only render very clear body parts;"
156 " while small thresholds (~0.1) will also output guessed "
157 "and occluded keypoints, but also"
158 " more false positives (i.e. wrong detections).");
161 "Pixels with a distance higher than this value are masked "
162 "out. Only for depth camera mode.",
165 "MaxDepthDifference",
167 "Allowed difference of depth value for one keypoint to median of all keypoints.",
170 "MaskBrightnessIncrease",
172 "All masked pixels will be increased by this brightness for visualization. If "
173 "value is <0, the original mask is shown.",
178 "CameraNodeName",
"DepthCamera",
"Name of the robot node for the input camera");
180 "FilterWithWorkspacePolygonActive",
182 "Whether to use the WorkspacePolygon for filtering keypoints.");
185 "-5000,-5000;5000,-5000;5000,5000;-5000,5000;-5000,-5000",
186 "A string that describes a polygon which represents the workspace of the robot.\n"
187 "Every keypoint outside of this polygon will be masked out.\n"
188 "Every point is seperated by a ';' (semicolon) and every point is described as "
189 "'x-value, y-value' (comma-seperated).\n"
190 "The last point must be identical to the first point.");
193 "Minimal amount of keypoints per person. Detected persons "
194 "with less valid keypoints will be discarded.");
197 "If true, only one person is reported in the 3DTopic. The "
198 "reported person is the nearest person to the robot.");
201 "If true, poseEstimation-tasks are started after starting "
202 "the component. If false, the component idles.");
207 "Only for: 'Mode'='FromTopic'!! The dimensions of the image whose keypoints are "
208 "reported over the topic.");
225 public OpenPoseEstimationInterface,
237 return "OpenPoseEstimation";
240 void start(
const Ice::Current& = Ice::emptyCurrent)
override;
241 void stop(
const Ice::Current& = Ice::emptyCurrent)
override;
250 const CByteImage& maskedInputImage,
252 CByteImage& resultImage,
253 op::PoseModel poseModel,
254 float renderThreshold,
255 int brightnessIncrease = 100);
257 const CByteImage& maskedInputImage,
259 CByteImage& resultImage,
260 op::PoseModel poseModel,
261 float renderThreshold,
262 int brightnessIncrease = 100);
264 const CByteImage& maskedInputImage,
265 int brightnessIncrease,
266 const CByteImage& inputImage);
277 const CByteImage& rgbImage)
const;
290 int windowSize = 10)
const;
293 void onMessage(
const Texting::TextMessage& text,
294 const Ice::Current& = Ice::emptyCurrent)
override;
299 std::string providerName;
301 bool useDistortionParameters;
302 bool reportOnlyNearestPerson;
303 bool filterWithWorkspacePolygonActive;
304 std::string cameraNodeName;
306 int minimalValidKeypoints;
307 float renderThreshold;
315 std::mutex keypointManagerMutex;
318 CByteImage** imageBuffer;
319 CByteImage *rgbImageBuffer, *maskedrgbImageBuffer;
320 std::mutex rgbImageBufferMutex;
321 CByteImage* depthImageBuffer;
322 std::mutex depthImageBufferMutex;
323 CByteImage** openPoseResultImage;
324 std::mutex resultImageBufferMutex;
326 armarx::MetaInfoSizeBasePtr imageMetaInfo;
327 unsigned int numImages;
331 visionx::ImageDimension incomingKeypointDimensions;
334 const CCalibration* calibration =
nullptr;
335 CStereoCalibration* stereoCalibration =
nullptr;
339 std::string layerName;
340 int layerCounter = 0;
343 OpenPose2DListenerPrx listener2DPrx;
344 OpenPose3DListenerPrx listener3DPrx;
358 std::shared_ptr<op::ScaleAndSizeExtractor> scaleAndSizeExtractor;
359 std::shared_ptr<op::CvMatToOpInput> cvMatToOpInput;
360 std::shared_ptr<op::CvMatToOpOutput> cvMatToOpOutput;
361 std::shared_ptr<op::PoseExtractorCaffe> poseExtractorCaffe;
362 std::shared_ptr<op::OpOutputToCvMat> opOutputToCvMat;
363 void setupOpenPoseEnvironment();
365 op::PoseModel poseModel;
368 std::uint64_t timeoutCounter2d{0};
369 std::uint64_t readErrorCounter2d{0};
370 std::uint64_t sucessCounter2d{0};
Provides a ready-to-use ArViz client arviz as member variable.
OpenPoseEstimationPropertyDefinitions(std::string prefix)
Brief description of class OpenPoseEstimation.
void start3DPoseEstimation(const Ice::Current &=Ice::emptyCurrent) override
void calculate2DFromOpenPose()
void onConnectImageProcessor() override
Implement this method in the ImageProcessor in order execute parts when the component is fully initia...
void stop(const Ice::Current &=Ice::emptyCurrent) override
void onExitImageProcessor() override
Exit the ImapeProcessor component.
DrawColor24Bit getDominantColorOfPatch(const CByteImage &image, const Vec2d &point, int windowSize=10) const
void start(const Ice::Current &=Ice::emptyCurrent) override
void stop3DPoseEstimation(const Ice::Current &=Ice::emptyCurrent) override
void calculate3DFromStereoImage(KeypointManagerPtr manager)
static void VisualizeTransparentImageMask(CByteImage &resultImage, const CByteImage &maskedInputImage, int brightnessIncrease, const CByteImage &inputImage)
int getMedianDepthFromImage(int x, int y, int radius) const
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
void visualize3DKeypoints()
void process() override
Process the vision component.
KeypointManagerPtr generate2DKeypoints(PoseKeypoints &keypoints, const CByteImage &rgbImage) const
void calculate2DFromTopic()
static void Render2DResultImage(const CByteImage &inputImage, const CByteImage &maskedInputImage, KeypointManagerPtr keypointManager, CByteImage &resultImage, op::PoseModel poseModel, float renderThreshold, int brightnessIncrease=100)
void onInitImageProcessor() override
Setup the vision component.
void calculate3DFromDepthImage(KeypointManagerPtr manager)
void maskOutBasedOnDepth(CByteImage &image, int maxDepth)
void onMessage(const Texting::TextMessage &text, const Ice::Current &=Ice::emptyCurrent) override
void onDisconnectImageProcessor() override
Implement this method in the ImageProcessor in order execute parts when the component looses network ...
op::Array< float > PoseKeypoints
void filterKeypointsBasedOnWorkspacePolygon(KeypointObjectPtr object)
std::string getDefaultName() const override
std::string prefix
Prefix of the properties such as namespace, domain, component name, etc.
PropertyDefinition< PropertyType > & defineOptionalProperty(const std::string &name, PropertyType defaultValue, const std::string &description="", PropertyDefinitionBase::PropertyConstness constness=PropertyDefinitionBase::eConstant)
PropertyDefinition< PropertyType > & defineRequiredProperty(const std::string &name, const std::string &description="", PropertyDefinitionBase::PropertyConstness constness=PropertyDefinitionBase::eConstant)
IceUtil::Handle< RunningTask< T > > pointer_type
Shared pointer type for convenience.
ImageProcessorPropertyDefinitions(std::string prefix)
The ImageProcessor class provides an interface for access to ImageProviders via Ice and shared memory...
std::shared_ptr< class Robot > RobotPtr
const std::string ModelId
This file offers overloads of toIce() and fromIce() functions for STL container types.
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
std::shared_ptr< ImageKeypointBuffer > ImageKeypointBufferPtr
std::shared_ptr< KeypointManager > KeypointManagerPtr
::IceInternal::ProxyHandle<::IceProxy::armarx::RobotStateComponentInterface > RobotStateComponentInterfacePrx
std::shared_ptr< KeypointObject > KeypointObjectPtr
::IceInternal::ProxyHandle<::IceProxy::armarx::DebugDrawerInterface > DebugDrawerInterfacePrx