IntelRealSenseProvider.cpp
Go to the documentation of this file.
1/*
2 * This file is part of ArmarX.
3 *
4 * ArmarX is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
7 *
8 * ArmarX is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 *
13 * You should have received a copy of the GNU General Public License
14 * along with this program. If not, see <http://www.gnu.org/licenses/>.
15 *
16 * @package VisionX::ArmarXObjects::IntelRealSenseProvider
17 * @author Simon Thelen ( urday at student dot kit dot edu )
18 * @date 2018
19 * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
20 * GNU General Public License
21 */
22
24
25#include <stdio.h>
26
27#include <filesystem>
28
32
35
36#include <Image/ImageProcessor.h>
37#include <librealsense2/h/rs_sensor.h>
38#include <librealsense2/rs.hpp>
39#include <librealsense2/rs_advanced_mode.hpp>
40#include <librealsense2/rsutil.h>
41
42using namespace armarx;
43using namespace visionx;
44
45void
47{
48 Eigen::Vector2f fov = getProperty<Eigen::Vector2f>("FieldOfView");
49 depthUtils.setFieldOfView(fov(0), fov(1));
51 Eigen::Vector2i dimensions = getProperty<Eigen::Vector2i>("Resolution");
52 setImageFormat(visionx::ImageDimension(dimensions(0), dimensions(1)),
53 visionx::eRgb,
54 visionx::eBayerPatternGr);
55 depthImage.reset(visionx::tools::createByteImage(getImageFormat(), visionx::eRgb));
56 pointcloud.reset(new pcl::PointCloud<CloudPointType>());
57
58 alignFilter.reset(new rs2::align(RS2_STREAM_COLOR));
59}
60
61void
65
66void
70
71void
73{
74 ARMARX_VERBOSE << "starting pipe";
75
76 //Create a configuration for configuring the pipeline with a non default profile
77 rs2::config cfg;
78
79 Eigen::Vector2i dimensions = getProperty<Eigen::Vector2i>("Resolution").getValue();
80 //Add desired streams to configuration
81 cfg.enable_stream(
82 RS2_STREAM_COLOR, dimensions(0), dimensions(1), RS2_FORMAT_RGB8, framesPerSecond);
83 cfg.enable_stream(
84 RS2_STREAM_DEPTH, dimensions(0), dimensions(1), RS2_FORMAT_Z16, framesPerSecond);
85 rs2::pipeline_profile profile = pipe.start(cfg);
86
87 rs2::device dev = profile.get_device();
88
89 rs2::depth_sensor ds = dev.query_sensors().front().as<rs2::depth_sensor>();
90
91 if (getProperty<std::string>("ConfigFile").isSet())
92 {
93 using namespace rs2;
94 context ctx;
95 auto devices = ctx.query_devices();
96 size_t device_count = devices.size();
97 if (!device_count)
98 {
99 throw armarx::LocalException("No device detected. Is it plugged in?\n");
100 }
101
102 // Get the first connected device
103 auto dev = devices[0];
104
105 // Enter advanced mode
106 if (dev.is<rs400::advanced_mode>())
107 {
108 // Get the advanced mode functionality
109 auto advanced_mode_dev = dev.as<rs400::advanced_mode>();
110
111 // Load and configure .json file to device
112 std::string path = getProperty<std::string>("ConfigFile");
113 std::string absPath;
114 auto configFileFound = armarx::ArmarXDataPath::getAbsolutePath(path, absPath);
115 ARMARX_CHECK_EXPRESSION(configFileFound) << path;
116 ARMARX_INFO << "Loading realsense config from " << path;
117 std::ifstream t(path);
118 std::string str((std::istreambuf_iterator<char>(t)), std::istreambuf_iterator<char>());
119 advanced_mode_dev.load_json(str);
120 }
121 else
122 {
123 throw armarx::LocalException() << "Current device doesn't support advanced-mode!\n";
124 }
125 }
126 else
127 {
128 auto preset = getProperty<rs2_rs400_visual_preset>("Preset").getValue();
129 ARMARX_INFO << "Setting preset to: " << rs2_rs400_visual_preset_to_string(preset);
130 ds.set_option(RS2_OPTION_VISUAL_PRESET, preset);
131 }
132
133 depthScale = ds.get_depth_scale();
134 ARMARX_VERBOSE << "pipe started";
135 ARMARX_VERBOSE << "depthScale: " << depthScale;
136 cloudFormat = getPointCloudFormat();
137 calibration = createStereoCalibration(profile);
138
140 "Baseline",
141 new Variant(std::to_string(calibration.calibrationRight.cameraParam.translation[0]) + ", " +
142 std::to_string(calibration.calibrationRight.cameraParam.translation[1]) + ", " +
143 std::to_string(calibration.calibrationRight.cameraParam.translation[2])));
144}
145
146void
148{
149 pipe.stop();
150}
151
152bool
154{
155 rs2::frameset data;
156 int timeout = 1000;
157 try
158 {
159 data = pipe.wait_for_frames(timeout);
160 }
161 catch (...)
162 {
163 ARMARX_INFO << deactivateSpam(30) << "Did not get frame until timeout of " << timeout
164 << " ms";
165 return false;
166 }
167
168 auto rstimestamp = data.get_timestamp();
169 IceUtil::Time timestamp;
170 if (data.get_frame_timestamp_domain() == RS2_TIMESTAMP_DOMAIN_GLOBAL_TIME)
171 {
172 timestamp = IceUtil::Time::milliSeconds(rstimestamp);
173 }
174 else
175 {
177 ARMARX_WARNING << deactivateSpam(999999999)
178 << "Timestamp of realsense is not global - measuring time myself";
179 }
181 << deactivateSpam(1) << "frame age: "
182 << (IceUtil::Time::now() - IceUtil::Time::milliSeconds(rstimestamp)).toMilliSeconds()
183 << " ms";
184
185
186 if (getProperty<bool>("ApplyAlignmentFilter").getValue())
187 {
188 // TIMING_START(alignment);
189 data = alignFilter->process(data);
190 // TIMING_END(alignment);
191 }
192
193 auto depthFrame = data.get_depth_frame();
194
195 const bool applyDisparityFilter = getProperty<bool>("ApplyDisparityFilter").getValue();
196 if (applyDisparityFilter)
197 {
198 // TIMING_START(disparityFilter);
199 depthFrame = depth_to_disparity_filter.process(depthFrame);
200 // TIMING_END(disparityFilter);
201 }
202
203
204 if (getProperty<bool>("ApplySpatialFilter").getValue())
205 {
206 // TIMING_START(filter);
207 depthFrame = spat_filter.process(depthFrame);
208 // TIMING_END(filter);
209 }
210 if (getProperty<bool>("ApplyTemporalFilter").getValue())
211 {
212 // TIMING_START(tempFilter);
213 depthFrame = temporal_filter.process(depthFrame);
214 // TIMING_END(tempFilter);
215 }
216
217 if (applyDisparityFilter)
218 {
219 // TIMING_START(disparityFilterBack);
220 depthFrame = disparity_to_depth_filter.process(depthFrame);
221 // TIMING_END(disparityFilterBack);
222 }
223
224 rs2::frame color = data.get_color_frame();
225
226
227 if (!color)
228 {
229 color = data.get_infrared_frame();
230 }
231
232
233 const int cw = color.as<rs2::video_frame>().get_width();
234 const int ch = color.as<rs2::video_frame>().get_height();
235 auto colorBuffer = reinterpret_cast<const unsigned char*>(color.get_data());
236 CByteImage rgbImage(cw, ch, CByteImage::eRGB24, true);
237 rgbImage.pixels = const_cast<unsigned char*>(colorBuffer);
238
239
240 int index = 0;
241 int index2 = 0;
242 auto resultDepthBuffer = depthImage->pixels;
243 auto depthBuffer = reinterpret_cast<const unsigned short*>(depthFrame.get_data());
244 const int dw = depthFrame.as<rs2::video_frame>().get_width();
245 const int dh = depthFrame.as<rs2::video_frame>().get_height();
246
247 for (int y = 0; y < dh; ++y)
248 {
249 for (int x = 0; x < dw; ++x)
250 {
251 int depthValue = 1000.f * depthBuffer[index2] * depthScale;
253 resultDepthBuffer[index],
254 resultDepthBuffer[index + 1],
255 resultDepthBuffer[index + 2]);
256 index += 3;
257 index2++;
258 }
259 }
260
261 CByteImage* images[2] = {&rgbImage, depthImage.get()};
262 provideImages(images, timestamp);
263
264
265 // Tell pointcloud object to map to this color frame
266 pc.map_to(color);
267
268 // Generate the pointcloud and texture mappings
269
270 points = pc.calculate(depthFrame);
271
272
273 //convert to pcl cloud
274 {
275 ARMARX_CHECK_EXPRESSION(pointcloud);
276 pointcloud->width = dw;
277 pointcloud->height = dh;
278 pointcloud->is_dense = false;
279 pointcloud->points.resize(points.size());
280 auto ptr = points.get_vertices();
281 auto texPtr = points.get_texture_coordinates();
284 float maxDepth = getProperty<float>("MaxDepth").getValue() / 1000.f; // in m
285 if (ptr && texPtr)
286 {
287 for (auto& p : pointcloud->points)
288 {
289 p.x = ptr->x * 1000.f; // m to mm
290 p.y = ptr->y * 1000.f;
291 p.z = ptr->z <= maxDepth ? ptr->z * 1000.f : std::nan("");
292 if (texPtr)
293 {
294 int x = texPtr->u * cw;
295 int y = texPtr->v * ch;
296 if (x >= 0 && x < cw && y >= 0 && y < ch)
297 {
298 auto colorIndex = int((x + y * cw) * 3);
299 p.r = colorBuffer[colorIndex];
300 p.g = colorBuffer[colorIndex + 1];
301 p.b = colorBuffer[colorIndex + 2];
302 }
303 else
304 {
305 p.r = p.g = p.b = 255;
306 }
307 }
308 ptr++;
309 texPtr++;
310 }
311 }
312 }
313
314 pointcloud->header.stamp = timestamp.toMicroSeconds();
315 providePointCloud(pointcloud);
316 return true;
317}
318
319std::string
321{
322 return "IntelRealSenseProvider";
323}
324
331
332void
338
339void
346
347void
355
356void
362
363visionx::StereoCalibration
365{
366 return calibration;
367}
368
369bool
371{
372 return false;
373}
374
375std::string
377{
378 return getProperty<std::string>("frameName");
379}
380
381StereoCalibration
382IntelRealSenseProvider::createStereoCalibration(const rs2::pipeline_profile& selection) const
383{
384
385
386 auto getIntrinsics =
387 [](visionx::CameraParameters& params, rs2::video_stream_profile& streamProfile)
388 {
389 // auto resolution = std::make_pair(streamProfile.width(), streamProfile.height());
390 auto i = streamProfile.get_intrinsics();
391 // auto principal_point = std::make_pair(i.ppx, i.ppy);
392 // auto focal_length = std::make_pair(i.fx, i.fy);
393 // rs2_distortion model = i.model;
394
395
396 params.focalLength.resize(2, 0.0f);
397 params.principalPoint.resize(2, 0.0f);
398 params.distortion.resize(4, 0.0f);
399
400 //Eigen::Matrix3f id = Eigen::Matrix3f::Identity();
401 params.rotation = visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Identity());
402 params.translation = visionx::tools::convertEigenVecToVisionX(Eigen::Vector3f::Zero());
403
404 params.principalPoint[0] = i.ppx;
405 params.principalPoint[1] = i.ppy;
406 params.focalLength[0] = i.fx;
407 params.focalLength[1] = i.fy;
408 params.width = streamProfile.width();
409 params.height = streamProfile.height();
410 };
411 rs2::video_stream_profile rgb_stream =
412 selection.get_stream(RS2_STREAM_COLOR).as<rs2::video_stream_profile>();
413 rs2::video_stream_profile depth_stream =
414 selection.get_stream(RS2_STREAM_DEPTH).as<rs2::video_stream_profile>();
415
416 visionx::CameraParameters rgbParams;
417 visionx::CameraParameters depthParams;
418
419 getIntrinsics(rgbParams, rgb_stream);
420 getIntrinsics(depthParams, depth_stream);
421
422 StereoCalibration calibration;
423 {
424 auto depth_stream = selection.get_stream(RS2_STREAM_DEPTH);
425 auto color_stream = selection.get_stream(RS2_STREAM_COLOR);
426 rs2_extrinsics e = depth_stream.get_extrinsics_to(color_stream);
427 // Apply extrinsics to the origin
428 float origin[3]{0.f, 0.f, 0.f};
429 float target[3];
430 rs2_transform_point_to_point(target, &e, origin);
431
432
433 calibration.calibrationLeft = visionx::tools::createDefaultMonocularCalibration();
434 calibration.calibrationRight = visionx::tools::createDefaultMonocularCalibration();
435 calibration.calibrationLeft.cameraParam = rgbParams;
436 calibration.calibrationRight.cameraParam = depthParams;
437 calibration.rectificationHomographyLeft =
438 visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Zero());
439 calibration.rectificationHomographyRight =
440 visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Zero());
441 calibration.calibrationRight.cameraParam.translation = {target[0], target[1], target[2]};
442 }
443 return calibration;
444}
std::string timestamp()
uint8_t index
SpamFilterDataPtr deactivateSpam(SpamFilterDataPtr const &spamFilter, float deactivationDurationSec, const std::string &identifier, bool deactivate)
Definition Logging.cpp:75
constexpr T c
std::string str(const T &t)
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition Component.cpp:90
Property< PropertyType > getProperty(const std::string &name)
virtual void onExitComponent()
Hook for subclass.
virtual void onDisconnectComponent()
Hook for subclass.
virtual void onConnectComponent()=0
Pure virtual hook for the subclass.
virtual void onInitComponent()=0
Pure virtual hook for the subclass.
void setMetaInfo(const std::string &id, const VariantBasePtr &value)
Allows to set meta information that can be queried live via Ice interface on the ArmarXManager.
static IceUtil::Time GetTime(TimeMode timeMode=TimeMode::VirtualTime)
Get the current time.
Definition TimeUtil.cpp:42
The Variant class is described here: Variants.
Definition Variant.h:224
std::atomic_bool captureEnabled
Indicates that capturing is enabled and running.
void onInitComponent() override
void onDisconnectComponent() override
Hook for subclass.
ImageFormatInfo getImageFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the entire image format info struct via Ice.
void setImageFormat(ImageDimension imageDimension, ImageType imageType, BayerPatternType bayerPatternType=visionx::eBayerPatternRg)
Sets the image basic format data.
void provideImages(void **inputBuffers, const IceUtil::Time &imageTimestamp=IceUtil::Time())
send images raw.
void onConnectComponent() override
void setNumberImages(int numberImages)
Sets the number of images on each capture.
void onExitComponent() override
std::string getReferenceFrame(const Ice::Current &c=Ice::emptyCurrent) override
void onInitComponent() override
Pure virtual hook for the subclass.
visionx::StereoCalibration createStereoCalibration(const rs2::pipeline_profile &selection) const
bool doCapture() override
Main capturing function.
void onExitCapturingPointCloudProvider() override
This is called when the Component::onExitComponent() setup is called.
void onDisconnectComponent() override
Hook for subclass.
void onStopCapture()
This is called when the point cloud provider capturing has been stopped.
visionx::StereoCalibration getStereoCalibration(const Ice::Current &c=Ice::emptyCurrent) override
virtual armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
void onStartCapture(float framesPerSecond)
This is called when the point cloud provider capturing has been started.
void onInitCapturingPointCloudProvider() override
This is called when the Component::onInitComponent() is called.
void onConnectComponent() override
Pure virtual hook for the subclass.
void onInitImageProvider() override
This is called when the Component::onInitComponent() is called.
virtual std::string getDefaultName() const override
bool getImagesAreUndistorted(const ::Ice::Current &c=Ice::emptyCurrent) override
MetaPointCloudFormatPtr getPointCloudFormat(const Ice::Current &c=Ice::emptyCurrent) override
Returns the point cloud format info struct via Ice.
void providePointCloud(PointCloudPtrT pointCloudPtr)
offer the new point cloud.
#define ARMARX_CHECK_EXPRESSION(expression)
This macro evaluates the expression and if it turns out to be false it will throw an ExpressionExcept...
#define ARMARX_INFO
The normal logging level.
Definition Logging.h:181
#define ARMARX_DEBUG
The logging level for output that is only interesting while debugging.
Definition Logging.h:184
#define ARMARX_WARNING
The logging level for unexpected behaviour, but not a serious problem.
Definition Logging.h:193
#define ARMARX_VERBOSE
The logging level for verbose information.
Definition Logging.h:187
This file offers overloads of toIce() and fromIce() functions for STL container types.
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
void depthValueToRGB(unsigned int depthInMM, unsigned char &r, unsigned char &g, unsigned char &b, bool noiseResistant=false)
Definition ImageUtil.h:183
visionx::types::Vec convertEigenVecToVisionX(Eigen::VectorXf v)
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.
visionx::types::Mat convertEigenMatToVisionX(Eigen::MatrixXf m)
MonocularCalibration createDefaultMonocularCalibration()
Creates a MonocularCalibration with all parameters set to a neutral value.
ArmarX headers.