IntelRealSenseProvider.cpp
Go to the documentation of this file.
1 /*
2  * This file is part of ArmarX.
3  *
4  * ArmarX is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  *
8  * ArmarX is distributed in the hope that it will be useful, but
9  * WITHOUT ANY WARRANTY; without even the implied warranty of
10  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11  * GNU General Public License for more details.
12  *
13  * You should have received a copy of the GNU General Public License
14  * along with this program. If not, see <http://www.gnu.org/licenses/>.
15  *
16  * @package VisionX::ArmarXObjects::IntelRealSenseProvider
17  * @author Simon Thelen ( urday at student dot kit dot edu )
18  * @date 2018
19  * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
20  * GNU General Public License
21  */
22 
23 #include "IntelRealSenseProvider.h"
24 
25 
26 #include <stdio.h>
27 #include <filesystem>
28 
29 #include <Image/ImageProcessor.h>
30 
31 #include <librealsense2/rs.hpp>
32 #include <librealsense2/rs_advanced_mode.hpp>
33 #include <librealsense2/h/rs_sensor.h>
34 #include <librealsense2/rsutil.h>
35 
41 
42 using namespace armarx;
43 using namespace visionx;
44 
45 void IntelRealSenseProvider::onInitImageProvider()
46 {
47  Eigen::Vector2f fov = getProperty<Eigen::Vector2f>("FieldOfView");
48  depthUtils.setFieldOfView(fov(0), fov(1));
49  setNumberImages(2);
50  Eigen::Vector2i dimensions = getProperty<Eigen::Vector2i>("Resolution");
51  setImageFormat(visionx::ImageDimension(dimensions(0), dimensions(1)), visionx::eRgb, visionx::eBayerPatternGr);
52  depthImage.reset(visionx::tools::createByteImage(getImageFormat(), visionx::eRgb));
53  pointcloud.reset(new pcl::PointCloud<CloudPointType>());
54 
55  alignFilter.reset(new rs2::align(RS2_STREAM_COLOR));
56 }
57 
58 void IntelRealSenseProvider::onInitCapturingPointCloudProvider()
59 {
60 }
61 
62 void IntelRealSenseProvider::onExitCapturingPointCloudProvider()
63 {
64 
65 }
66 
67 
68 void IntelRealSenseProvider::onStartCapture(float framesPerSecond)
69 {
70  ARMARX_VERBOSE << "starting pipe";
71 
72  //Create a configuration for configuring the pipeline with a non default profile
73  rs2::config cfg;
74 
75  Eigen::Vector2i dimensions = getProperty<Eigen::Vector2i>("Resolution").getValue();
76  //Add desired streams to configuration
77  cfg.enable_stream(RS2_STREAM_COLOR, dimensions(0), dimensions(1), RS2_FORMAT_RGB8, framesPerSecond);
78  cfg.enable_stream(RS2_STREAM_DEPTH, dimensions(0), dimensions(1), RS2_FORMAT_Z16, framesPerSecond);
79  rs2::pipeline_profile profile = pipe.start(cfg);
80 
81  rs2::device dev = profile.get_device();
82 
83  rs2::depth_sensor ds = dev.query_sensors().front().as<rs2::depth_sensor>();
84 
85  if (getProperty<std::string>("ConfigFile").isSet())
86  {
87  using namespace rs2;
88  context ctx;
89  auto devices = ctx.query_devices();
90  size_t device_count = devices.size();
91  if (!device_count)
92  {
93  throw armarx::LocalException("No device detected. Is it plugged in?\n");
94  }
95 
96  // Get the first connected device
97  auto dev = devices[0];
98 
99  // Enter advanced mode
100  if (dev.is<rs400::advanced_mode>())
101  {
102  // Get the advanced mode functionality
103  auto advanced_mode_dev = dev.as<rs400::advanced_mode>();
104 
105  // Load and configure .json file to device
106  std::string path = getProperty<std::string>("ConfigFile");
107  std::string absPath;
108  auto configFileFound = armarx::ArmarXDataPath::getAbsolutePath(path, absPath);
109  ARMARX_CHECK_EXPRESSION(configFileFound) << path;
110  ARMARX_INFO << "Loading realsense config from " << path;
111  std::ifstream t(path);
112  std::string str((std::istreambuf_iterator<char>(t)), std::istreambuf_iterator<char>());
113  advanced_mode_dev.load_json(str);
114  }
115  else
116  {
117  throw armarx::LocalException() << "Current device doesn't support advanced-mode!\n";
118  }
119  }
120  else
121  {
122  auto preset = getProperty<rs2_rs400_visual_preset>("Preset").getValue();
123  ARMARX_INFO << "Setting preset to: " << rs2_rs400_visual_preset_to_string(preset);
124  ds.set_option(RS2_OPTION_VISUAL_PRESET, preset);
125 
126  }
127 
128  depthScale = ds.get_depth_scale();
129  ARMARX_VERBOSE << "pipe started";
130  ARMARX_VERBOSE << "depthScale: " << depthScale;
131  cloudFormat = getPointCloudFormat();
132  calibration = createStereoCalibration(profile);
133 
134  setMetaInfo("Baseline", new Variant(std::to_string(calibration.calibrationRight.cameraParam.translation[0])
135  + ", " + std::to_string(calibration.calibrationRight.cameraParam.translation[1]) +
136  ", " + std::to_string(calibration.calibrationRight.cameraParam.translation[2])));
137 
138 
139 }
140 
141 
142 void IntelRealSenseProvider::onStopCapture()
143 {
144  pipe.stop();
145 }
146 
147 
148 
149 bool IntelRealSenseProvider::doCapture()
150 {
151  rs2::frameset data;
152  int timeout = 1000;
153  try
154  {
155  data = pipe.wait_for_frames(timeout);
156  }
157  catch (...)
158  {
159  ARMARX_INFO << deactivateSpam(30) << "Did not get frame until timeout of " << timeout << " ms";
160  return false;
161  }
162 
163  auto rstimestamp = data.get_timestamp();
164  IceUtil::Time timestamp;
165  if (data.get_frame_timestamp_domain() == RS2_TIMESTAMP_DOMAIN_GLOBAL_TIME)
166  {
167  timestamp = IceUtil::Time::milliSeconds(rstimestamp);
168  }
169  else
170  {
171  timestamp = armarx::TimeUtil::GetTime();
172  ARMARX_WARNING << deactivateSpam(999999999) << "Timestamp of realsense is not global - measuring time myself";
173  }
174  ARMARX_DEBUG << deactivateSpam(1) << "frame age: " << (IceUtil::Time::now() - IceUtil::Time::milliSeconds(rstimestamp)).toMilliSeconds() << " ms";
175 
176 
177  if (getProperty<bool>("ApplyAlignmentFilter").getValue())
178  {
179  // TIMING_START(alignment);
180  data = alignFilter->process(data);
181  // TIMING_END(alignment);
182  }
183 
184  auto depthFrame = data.get_depth_frame();
185 
186  const bool applyDisparityFilter = getProperty<bool>("ApplyDisparityFilter").getValue();
187  if (applyDisparityFilter)
188  {
189  // TIMING_START(disparityFilter);
190  depthFrame = depth_to_disparity_filter.process(depthFrame);
191  // TIMING_END(disparityFilter);
192  }
193 
194 
195  if (getProperty<bool>("ApplySpatialFilter").getValue())
196  {
197  // TIMING_START(filter);
198  depthFrame = spat_filter.process(depthFrame);
199  // TIMING_END(filter);
200  }
201  if (getProperty<bool>("ApplyTemporalFilter").getValue())
202  {
203  // TIMING_START(tempFilter);
204  depthFrame = temporal_filter.process(depthFrame);
205  // TIMING_END(tempFilter);
206  }
207 
208  if (applyDisparityFilter)
209  {
210  // TIMING_START(disparityFilterBack);
211  depthFrame = disparity_to_depth_filter.process(depthFrame);
212  // TIMING_END(disparityFilterBack);
213  }
214 
215  rs2::frame color = data.get_color_frame();
216 
217 
218 
219  if (!color)
220  {
221  color = data.get_infrared_frame();
222  }
223 
224 
225 
226  const int cw = color.as<rs2::video_frame>().get_width();
227  const int ch = color.as<rs2::video_frame>().get_height();
228  auto colorBuffer = reinterpret_cast<const unsigned char*>(color.get_data());
229  CByteImage rgbImage(cw, ch, CByteImage::eRGB24, true);
230  rgbImage.pixels = const_cast<unsigned char*>(colorBuffer);
231 
232 
233  int index = 0;
234  int index2 = 0;
235  auto resultDepthBuffer = depthImage->pixels;
236  auto depthBuffer = reinterpret_cast<const unsigned short*>(depthFrame.get_data());
237  const int dw = depthFrame.as<rs2::video_frame>().get_width();
238  const int dh = depthFrame.as<rs2::video_frame>().get_height();
239 
240  for (int y = 0; y < dh; ++y)
241  {
242  for (int x = 0; x < dw; ++x)
243  {
244  int depthValue = 1000.f * depthBuffer[index2] * depthScale;
245  visionx::tools::depthValueToRGB(depthValue, resultDepthBuffer[index], resultDepthBuffer[index + 1], resultDepthBuffer[index + 2]);
246  index += 3;
247  index2++;
248 
249  }
250  }
251 
252  CByteImage* images[2] = {&rgbImage,
253  depthImage.get()
254  };
255  provideImages(images, timestamp);
256 
257 
258  // Tell pointcloud object to map to this color frame
259  pc.map_to(color);
260 
261  // Generate the pointcloud and texture mappings
262 
263  points = pc.calculate(depthFrame);
264 
265 
266 
267  //convert to pcl cloud
268  {
269  ARMARX_CHECK_EXPRESSION(pointcloud);
270  pointcloud->width = dw;
271  pointcloud->height = dh;
272  pointcloud->is_dense = false;
273  pointcloud->points.resize(points.size());
274  auto ptr = points.get_vertices();
275  auto texPtr = points.get_texture_coordinates();
277  ARMARX_CHECK_EXPRESSION(texPtr);
278  float maxDepth = getProperty<float>("MaxDepth").getValue() / 1000.f; // in m
279  if (ptr && texPtr)
280  {
281  for (auto& p : pointcloud->points)
282  {
283  p.x = ptr->x * 1000.f; // m to mm
284  p.y = ptr->y * 1000.f;
285  p.z = ptr->z <= maxDepth ? ptr->z * 1000.f : std::nan("");
286  if (texPtr)
287  {
288  int x = texPtr->u * cw;
289  int y = texPtr->v * ch;
290  if (x >= 0 && x < cw && y >= 0 && y < ch)
291  {
292  auto colorIndex = int((x + y * cw) * 3);
293  p.r = colorBuffer[colorIndex];
294  p.g = colorBuffer[colorIndex + 1];
295  p.b = colorBuffer[colorIndex + 2];
296  }
297  else
298  {
299  p.r = p.g = p.b = 255;
300  }
301  }
302  ptr++;
303  texPtr++;
304  }
305  }
306  }
307 
308  pointcloud->header.stamp = timestamp.toMicroSeconds();
309  providePointCloud(pointcloud);
310  return true;
311 }
312 
313 
314 
315 
316 
317 std::string IntelRealSenseProvider::getDefaultName() const
318 {
319  return "IntelRealSenseProvider";
320 }
321 
322 armarx::PropertyDefinitionsPtr IntelRealSenseProvider::createPropertyDefinitions()
323 {
325  getConfigIdentifier()));
326 }
327 
328 void IntelRealSenseProvider::onInitComponent()
329 {
330  ImageProvider::onInitComponent();
331  CapturingPointCloudProvider::onInitComponent();
332 }
333 
334 void IntelRealSenseProvider::onConnectComponent()
335 {
336 
337  ImageProvider::onConnectComponent();
338  CapturingPointCloudProvider::onConnectComponent();
339 }
340 
341 void IntelRealSenseProvider::onDisconnectComponent()
342 {
343  captureEnabled = false;
344 
345  ImageProvider::onDisconnectComponent();
346  CapturingPointCloudProvider::onDisconnectComponent();
347 }
348 
349 void IntelRealSenseProvider::onExitComponent()
350 {
351  ImageProvider::onExitComponent();
352  CapturingPointCloudProvider::onExitComponent();
353 
354 }
355 
356 visionx::StereoCalibration IntelRealSenseProvider::getStereoCalibration(const Ice::Current&)
357 {
358  return calibration;
359 }
360 
361 bool IntelRealSenseProvider::getImagesAreUndistorted(const Ice::Current& c)
362 {
363  return false;
364 }
365 
366 std::string IntelRealSenseProvider::getReferenceFrame(const Ice::Current& c)
367 {
368  return getProperty<std::string>("frameName");
369 }
370 
371 StereoCalibration IntelRealSenseProvider::createStereoCalibration(const rs2::pipeline_profile& selection) const
372 {
373 
374 
375  auto getIntrinsics = [](visionx::CameraParameters & params, rs2::video_stream_profile & streamProfile)
376  {
377  // auto resolution = std::make_pair(streamProfile.width(), streamProfile.height());
378  auto i = streamProfile.get_intrinsics();
379  // auto principal_point = std::make_pair(i.ppx, i.ppy);
380  // auto focal_length = std::make_pair(i.fx, i.fy);
381  // rs2_distortion model = i.model;
382 
383 
384  params.focalLength.resize(2, 0.0f);
385  params.principalPoint.resize(2, 0.0f);
386  params.distortion.resize(4, 0.0f);
387 
388  //Eigen::Matrix3f id = Eigen::Matrix3f::Identity();
390  params.translation = visionx::tools::convertEigenVecToVisionX(Eigen::Vector3f::Zero());
391 
392  params.principalPoint[0] = i.ppx;
393  params.principalPoint[1] = i.ppy;
394  params.focalLength[0] = i.fx;
395  params.focalLength[1] = i.fy;
396  params.width = streamProfile.width();
397  params.height = streamProfile.height();
398  };
399  rs2::video_stream_profile rgb_stream = selection.get_stream(RS2_STREAM_COLOR)
400  .as<rs2::video_stream_profile>();
401  rs2::video_stream_profile depth_stream = selection.get_stream(RS2_STREAM_DEPTH)
402  .as<rs2::video_stream_profile>();
403 
404  visionx::CameraParameters rgbParams;
405  visionx::CameraParameters depthParams;
406 
407  getIntrinsics(rgbParams, rgb_stream);
408  getIntrinsics(depthParams, depth_stream);
409 
410  StereoCalibration calibration;
411  {
412  auto depth_stream = selection.get_stream(RS2_STREAM_DEPTH);
413  auto color_stream = selection.get_stream(RS2_STREAM_COLOR);
414  rs2_extrinsics e = depth_stream.get_extrinsics_to(color_stream);
415  // Apply extrinsics to the origin
416  float origin[3] { 0.f, 0.f, 0.f };
417  float target[3];
418  rs2_transform_point_to_point(target, &e, origin);
419 
420 
421  calibration.calibrationLeft = visionx::tools::createDefaultMonocularCalibration();
422  calibration.calibrationRight = visionx::tools::createDefaultMonocularCalibration();
423  calibration.calibrationLeft.cameraParam = rgbParams;
424  calibration.calibrationRight.cameraParam = depthParams;
425  calibration.rectificationHomographyLeft = visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Zero());
426  calibration.rectificationHomographyRight = visionx::tools::convertEigenMatToVisionX(Eigen::Matrix3f::Zero());
427  calibration.calibrationRight.cameraParam.translation = {target[0], target[1], target[2]};
428 
429  }
430  return calibration;
431 }
432 
armarx::Variant
The Variant class is described here: Variants.
Definition: Variant.h:224
ARMARX_VERBOSE
#define ARMARX_VERBOSE
Definition: Logging.h:180
str
std::string str(const T &t)
Definition: UserAssistedSegmenterGuiWidgetController.cpp:42
visionx
ArmarX headers.
Definition: OpenPoseStressTest.h:38
index
uint8_t index
Definition: EtherCATFrame.h:59
IntelRealSenseProvider.h
boost::target
Vertex target(const detail::edge_base< Directed, Vertex > &e, const PCG &)
Definition: point_cloud_graph.h:688
c
constexpr T c
Definition: UnscentedKalmanFilterTest.cpp:43
visionx::IntelRealSenseProviderPropertyDefinitions
Definition: IntelRealSenseProvider.h:50
visionx::tools::createByteImage
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.
std::align
void * align(size_t alignment, size_t bytes, void *&bufferPlace, size_t &bufferSpace) noexcept
Definition: HeterogenousContinuousContainer.h:37
deactivateSpam
SpamFilterDataPtr deactivateSpam(SpamFilterDataPtr const &spamFilter, float deactivationDurationSec, const std::string &identifier, bool deactivate)
Definition: Logging.cpp:72
GfxTL::Identity
void Identity(MatrixXX< N, N, T > *a)
Definition: MatrixXX.h:523
visionx::tools::convertEigenVecToVisionX
visionx::types::Vec convertEigenVecToVisionX(Eigen::VectorXf v)
Definition: TypeMapping.cpp:655
data
uint8_t data[1]
Definition: EtherCATFrame.h:68
ARMARX_DEBUG
#define ARMARX_DEBUG
Definition: Logging.h:177
visionx::tools::depthValueToRGB
void depthValueToRGB(unsigned int depthInMM, unsigned char &r, unsigned char &g, unsigned char &b, bool noiseResistant=false)
Definition: ImageUtil.h:176
armarx::armem::Time
armarx::core::time::DateTime Time
Definition: forward_declarations.h:13
armarx::to_string
const std::string & to_string(const std::string &s)
Definition: StringHelpers.h:40
armarx::TimeUtil::GetTime
static IceUtil::Time GetTime(TimeMode timeMode=TimeMode::VirtualTime)
Get the current time.
Definition: TimeUtil.cpp:42
ARMARX_CHECK_EXPRESSION
#define ARMARX_CHECK_EXPRESSION(expression)
This macro evaluates the expression and if it turns out to be false it will throw an ExpressionExcept...
Definition: ExpressionException.h:73
TimeUtil.h
ARMARX_INFO
#define ARMARX_INFO
Definition: Logging.h:174
IceUtil::Handle< class PropertyDefinitionContainer >
visionx::tools::convertEigenMatToVisionX
visionx::types::Mat convertEigenMatToVisionX(Eigen::MatrixXf m)
Definition: TypeMapping.cpp:667
ImageUtil.h
TypeMapping.h
armarx::ArmarXDataPath::getAbsolutePath
static bool getAbsolutePath(const std::string &relativeFilename, std::string &storeAbsoluteFilename, const std::vector< std::string > &additionalSearchPaths={}, bool verbose=true)
Definition: ArmarXDataPath.cpp:111
pc
Introduction Thank you for taking interest in our work and downloading this software This library implements the algorithm described in the paper R R R Klein Efficient RANSAC for Point Cloud Shape in Computer Graphics Blackwell June If you use this software you should cite the aforementioned paper in any resulting publication Please send comments or bug reports to Ruwen Roland BUT NOT LIMITED THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY OR CONSEQUENTIAL WHETHER IN STRICT OR EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE Example usage This section shows how to use the library to detect the shapes in a point cloud PointCloud pc
Definition: ReadMe.txt:68
ARMARX_WARNING
#define ARMARX_WARNING
Definition: Logging.h:186
armarx::PropertyDefinitionsPtr
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
Definition: forward_declarations.h:34
ArmarXDataPath.h
Variant.h
visionx::tools::createDefaultMonocularCalibration
MonocularCalibration createDefaultMonocularCalibration()
Creates a MonocularCalibration with all parameters set to a neutral value.
Definition: ImageUtil.cpp:237
armarx
This file offers overloads of toIce() and fromIce() functions for STL container types.
Definition: ArmarXTimeserver.cpp:28