27#include <Eigen/Geometry>
29#include <pcl/filters/approximate_voxel_grid.h>
31#include <VisionX/interface/components/PointCloudAndImageAndCalibrationProviderInterface.h>
32#include <VisionX/interface/components/RGBDImageProvider.h>
35#include <Core/Model/Model.h>
37#include <Image/IplImageAdaptor.h>
38#include <pangolin/pangolin.h>
60 ARMARX_ERROR <<
"Number of images provided by " << imageProviderName <<
" ("
62 <<
") does not match the expected number of images (" << numImages <<
")";
66 std::lock_guard<std::mutex> lock(imagesMutex);
70 images[0] = imageRgb.get();
71 images[1] = imageDepth.get();
78 std::lock_guard<std::mutex> lock(imagesMutex);
82 imageRgb.reset(
nullptr);
83 imageDepth.reset(
nullptr);
85 imageProvider =
nullptr;
107 ImageProcessor::onInitComponent();
108 CapturingPointCloudProvider::onInitComponent();
113 framesSinceLastUpdate = framesBetweenUpdates;
115 resultPointCloud.reset(
new pcl::PointCloud<OutPointT>());
120 cofusionParams.timeDelta =
getProperty<int>(
"cofusion.TimeDelta").getValue();
121 cofusionParams.countThresh =
getProperty<int>(
"cofusion.CountThresh").getValue();
128 cofusionParams.initConfidenceGlobal =
130 cofusionParams.initConfidenceObject =
137 cofusionParams.frameToFrameRGB =
getProperty<bool>(
"cofusion.FrameToFrameRGB").getValue();
139 cofusionParams.matchingType =
141 cofusionParams.exportDirectory =
143 cofusionParams.exportSegmentationResults =
146#if USE_MASKFUSION == 1
147 cofusionParams.usePrecomputedMasksOnly =
149 cofusionParams.frameQueueSize =
getProperty<int>(
"cofusion.FrameQueueSize").getValue();
158 ImageProcessor::onConnectComponent();
159 CapturingPointCloudProvider::onConnectComponent();
164 visionx::StereoCalibrationProviderInterfacePrx calibrationProvider =
165 visionx::StereoCalibrationProviderInterfacePrx::checkedCast(imageProvider);
167 if (calibrationProvider)
169 ARMARX_LOG <<
"reading intrinsics from calibration provider";
170 visionx::StereoCalibration calibration = calibrationProvider->getStereoCalibration();
172 cofusionParams.fx = calibration.calibrationRight.cameraParam.focalLength[0];
173 cofusionParams.fy = calibration.calibrationRight.cameraParam.focalLength[1];
174 cofusionParams.cx = calibration.calibrationRight.cameraParam.principalPoint[0];
175 cofusionParams.cy = calibration.calibrationRight.cameraParam.principalPoint[1];
177 referenceFrameName = calibrationProvider->getReferenceFrame();
181 cofusionParams.fx = 525.0;
182 cofusionParams.fy = 525.0;
183 cofusionParams.cx = 320;
184 cofusionParams.cy = 240;
185 referenceFrameName =
"unkown";
186 ARMARX_WARNING <<
"unable to get camera intrinsics. using dummy values";
190 visionx::ImageFormatInfo imageFormat = imageProvider->getImageFormat();
192 cofusionParams.width = imageFormat.dimension.width;
193 cofusionParams.height = imageFormat.dimension.height;
198 if (stopwatchReportTask)
200 stopwatchReportTask->stop();
204 this, &CoFusionProcessor::reportStopwatchStats, 10000);
205 stopwatchReportTask->start();
214 ImageProcessor::onDisconnectComponent();
215 CapturingPointCloudProvider::onDisconnectComponent();
221 ImageProcessor::onExitComponent();
222 CapturingPointCloudProvider::onExitComponent();
243 ARMARX_WARNING <<
"Timeout or error in waitForImages(" << imageProviderName <<
")";
249 std::lock_guard<std::mutex> lock(imagesMutex);
251 armarx::MetaInfoSizeBasePtr metaInfo;
253 int numImages =
getImages(imageProviderName, images, metaInfo);
256 ARMARX_WARNING <<
"Did not receive correct number of images! (Expected 2, but was "
257 << numImages <<
".) Aborting!";
263 initializeCoFusion();
276 stopwatch.start(
"CoFusion::ProcessFrame");
278#if USE_MASKFUSION == 1
279 std::shared_ptr<FrameData> framePtr = std::make_shared<FrameData>(frame);
281 cofusion->processFrame(framePtr);
283 cofusion->processFrame(frame);
286 stopwatch.stop(
"CoFusion::ProcessFrame");
289 framesSinceLastUpdate++;
290 if (framesSinceLastUpdate >= framesBetweenUpdates)
293 framesSinceLastUpdate = 0;
300CoFusionProcessor::initializeCoFusion()
302 ARMARX_INFO <<
"Initializing CoFusion (resolution: " << cofusionParams.
width <<
"x"
303 << cofusionParams.
height <<
")";
305 pangolin::Params windowParams;
306 windowParams.Set(
"SAMPLE_BUFFERS", 0);
307 windowParams.Set(
"SAMPLES", 0);
309 pangolin::CreateWindowAndBind(
310 "Main", cofusionParams.
width, cofusionParams.
height, windowParams);
313 GLenum glewInitError = glewInit();
316 ARMARX_ERROR <<
"glewInit() failed with error: " << glewInitError;
319 int prog = glCreateProgram();
320 ARMARX_INFO <<
"Created gl program (" << prog <<
")";
327 cofusion->addNewModelListener([
this](std::shared_ptr<Model> m) {
onNewModel(m); });
328 cofusion->addInactiveModelListener([
this](std::shared_ptr<Model> m) {
onInactiveModel(m); });
330#if USE_MASKFUSION == 1
336CoFusionProcessor::constructFrameData()
338 bool copyData =
false;
339 cv::Mat matRgb = cv::cvarrToMat(IplImageAdaptor::Adapt(imageRgb.get()), copyData);
340 cv::Mat matDepth = cv::cvarrToMat(IplImageAdaptor::Adapt(imageDepth.get()), copyData);
349 int rows = matDepth.rows;
350 int cols = matDepth.cols;
354 cv::Mat matDepthCofusion(rows, cols, CV_32FC1);
356 for (
int x = 0; x < rows; ++x)
358 for (
int y = 0; y < cols; ++y)
380 cv::Vec3b depthPixel = matDepth.at<cv::Vec3b>(x, y);
383 value += depthPixel[0];
384 value += depthPixel[1] << 8;
385 value += depthPixel[2] << 16;
400 matDepthCofusion.at<
float>(x, y) = depth;
404 frame.depth = matDepthCofusion;
410CoFusionProcessor::updateSurfelMaps()
412 std::list<std::shared_ptr<Model>>& models = cofusion->getModels();
420 std::lock_guard<std::mutex> lock(surfelMapMutex);
422 ARMARX_INFO <<
"Downloading surfel maps of " << models.size() <<
" models ...";
424 for (std::shared_ptr<Model>
const& model : models)
426 unsigned int id = model->getID();
432 stopwatch.start(
"Model::downloadMap()");
433 surfelMaps[id] = model->downloadMap();
436 Model::SurfelMap& surfelMap = surfelMaps[id];
437 ARMARX_VERBOSE <<
"SurfelMap contains " << surfelMap.numPoints <<
" points "
438 <<
"(container size: " << surfelMap.data->size() <<
")";
440 stopwatch.stop(
"Model::downloadMap()");
444 newFrameAvailable =
true;
456 <<
"CoFusionProcessor does currently not support startCaptureForNumFrames(numFrames)";
467 if (newFrameAvailable)
469 newFrameAvailable =
false;
470 resultPointCloud.reset();
471 reconstructResultPointCloud();
482CoFusionProcessor::reconstructResultPointCloud()
484 std::lock_guard<std::mutex> lock(surfelMapMutex);
486 resultPointCloud.reset(
new pcl::PointCloud<OutPointT>());
489 ARMARX_INFO <<
"Reconstructing point cloud from " << surfelMaps.size() <<
" SurfelMaps.";
490 stopwatch.start(
"PointCloud construction");
492 for (
auto& it : surfelMaps)
494 unsigned int id = it.first;
495 Model::SurfelMap& surfelMap = it.second;
498 ARMARX_INFO <<
"SurfelMap of model " <<
id <<
" contains " << surfelMap.numPoints
500 <<
"(container size: " << surfelMap.data->size() <<
")";
511 resultPointCloud->reserve(resultPointCloud->size() + surfelMap.numPoints);
513 for (
unsigned int i = 0; i < surfelMap.numPoints; ++i)
515 Eigen::Vector4f& position4f = (*surfelMap.data)[i * 3 + 0];
516 position4f.head<3>() *= 1000;
518 Eigen::Vector4f& color4f = (*surfelMap.data)[i * 3 + 1];
521 uint32_t color24b =
static_cast<uint32_t
>(color4f(0));
522 uchar r = (color24b >> 16) & 0xFF;
523 uchar g = (color24b >> 8) & 0xFF;
524 uchar b = (color24b >> 0) & 0xFF;
531 pcl::PointXYZRGBL point;
532 point.x = position4f(0);
533 point.y = position4f(1);
534 point.z = position4f(2);
539 resultPointCloud->push_back(point);
545 if (downsamplingLeafSize > 0.f)
547 pcl::PointCloud<OutPointT>::Ptr filtered(
new pcl::PointCloud<OutPointT>());
548 std::size_t sizeBefore = resultPointCloud->size();
550 pcl::ApproximateVoxelGrid<OutPointT> grid;
551 grid.setLeafSize(downsamplingLeafSize, downsamplingLeafSize, downsamplingLeafSize);
552 grid.setInputCloud(resultPointCloud);
553 grid.filter(*filtered);
554 resultPointCloud.swap(filtered);
556 std::size_t sizeAfter = resultPointCloud->size();
558 ARMARX_INFO <<
"Downsampled to " << sizeAfter <<
" points (before: " << sizeBefore <<
")";
561 stopwatch.stop(
"PointCloud construction");
564visionx::MetaPointCloudFormatPtr
567 visionx::MetaPointCloudFormatPtr info =
new visionx::MetaPointCloudFormat();
568 info->capacity = 50 * 640 * 480 *
sizeof(visionx::ColoredLabeledPoint3D);
569 info->size = info->capacity;
570 info->type = visionx::eColoredLabeledPoints;
575CoFusionProcessor::reportStopwatchStats()
SpamFilterDataPtr deactivateSpam(SpamFilterDataPtr const &spamFilter, float deactivationDurationSec, const std::string &identifier, bool deactivate)
std::unique_ptr< CoFusion > makeCoFusion() const
virtual void onInitComponent() override
Pure virtual hook for the subclass.
virtual void onConnectImageProcessor() override
Allocates imageRgb and imageDepth and sets images[].
virtual bool doCapture() override
Main capturing function.
virtual void onExitCapturingPointCloudProvider() override
This is called when the Component::onExitComponent() setup is called.
virtual void onDisconnectComponent() override
Hook for subclass.
virtual void onExitImageProcessor() override
Exit the ImapeProcessor component.
void onNewModel(std::shared_ptr< Model > model)
virtual void onStartCapture(float framesPerSecond) override
This is called when the point cloud provider capturing has been started.
virtual visionx::MetaPointCloudFormatPtr getDefaultPointCloudFormat() override
default point cloud format used to initialize shared memory
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
virtual void process() override
Performs the image processing.
virtual void onInitCapturingPointCloudProvider() override
This is called when the Component::onInitComponent() is called.
virtual void onInitImageProcessor() override
Setup the vision component.
virtual void onConnectComponent() override
Pure virtual hook for the subclass.
virtual void onStopCapture() override
This is called when the point cloud provider capturing has been stopped.
virtual void onDisconnectImageProcessor() override
Frees imageRgb and imageDepth.
virtual void onExitComponent() override
Hook for subclass.
virtual void startCaptureForNumFrames(Ice::Int numFrames, const Ice::Current &c=Ice::emptyCurrent) override
void onInactiveModel(std::shared_ptr< Model > model)
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Property< PropertyType > getProperty(const std::string &name)
Ice::ObjectPrx getProxy(long timeoutMs=0, bool waitForScheduler=true) const
Returns the proxy of this object (optionally it waits for the proxy)
The periodic task executes one thread method repeatedly using the time period specified in the constr...
Property< PropertyType > getProperty(const std::string &name)
Property creation and retrieval.
void setPointCloudSyncMode(ImageSyncMode pointCloudSyncMode)
Sets the point cloud synchronization mode.
void usingImageProvider(std::string name)
Registers a delayed topic subscription and a delayed provider proxy retrieval which all will be avail...
bool waitForImages(int milliseconds=1000)
Wait for new images.
ImageProviderInfo getImageProvider(std::string name, ImageType destinationImageType=eRgb, bool waitForProxy=false)
Select an ImageProvider.
int getImages(CByteImage **ppImages)
Poll images from provider.
int numberImages
Number of images.
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
#define ARMARX_INFO
The normal logging level.
#define ARMARX_ERROR
The logging level for unexpected behaviour, that must be fixed.
#define ARMARX_WARNING
The logging level for unexpected behaviour, but not a serious problem.
#define ARMARX_VERBOSE
The logging level for verbose information.
T getValue(nlohmann::json &userConfig, nlohmann::json &defaultConfig, const std::string &entryName)
This file offers overloads of toIce() and fromIce() functions for STL container types.
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
std::shared_ptr< Value > value()