OpticalFlow.cpp
Go to the documentation of this file.
1/*
2 * This file is part of ArmarX.
3 *
4 * ArmarX is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
7 *
8 * ArmarX is distributed in the hope that it will be useful, but
9 * WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 * GNU General Public License for more details.
12 *
13 * You should have received a copy of the GNU General Public License
14 * along with this program. If not, see <http://www.gnu.org/licenses/>.
15 *
16 * @package VisionX::ArmarXObjects::OpticalFlow
17 * @author David Sippel ( uddoe at student dot kit dot edu )
18 * @date 2016
19 * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
20 * GNU General Public License
21 */
22
23#include "OpticalFlow.h"
24
26
27using namespace armarx;
28
35
36void
38{
39 providerName = getProperty<std::string>("providerName").getValue();
40 usingImageProvider(providerName);
41
42 offeringTopic(getProperty<std::string>("OpticalFlowTopicName").getValue());
43 offeringTopic(getProperty<std::string>("DebugObserverName").getValue());
44
45 frameRate = getProperty<float>("Framerate").getValue();
46
47 method = getProperty<OPTICAL_FLOW_METHOD>("Method").getValue();
48
49 chessboardWidth = getProperty<int>("Chessboard.Width").getValue();
50 chessboardHeight = getProperty<int>("Chessboard.Height").getValue();
51
52 usingTopic(getProperty<std::string>("CalibrationUpdateTopicName").getValue());
53}
54
55void
57{
59 getProperty<std::string>("DebugObserverName").getValue());
60
61
62 visionx::ImageProviderInfo imageProviderInfo = getImageProvider(providerName);
63 imageDimension = imageProviderInfo.imageFormat.dimension;
64
65 imageProviderPrx = getProxy<visionx::ImageProviderInterfacePrx>(providerName);
66
67 cameraImages = new CByteImage*[2];
68 cameraImages[0] = visionx::tools::createByteImage(imageProviderInfo);
69 cameraImages[1] = visionx::tools::createByteImage(imageProviderInfo);
70
71
73 getProperty<std::string>("OpticalFlowTopicName").getValue());
74
75 enableResultImages(1, imageDimension, visionx::ImageType::eRgb);
76
77
78 visionx::StereoCalibrationProviderInterfacePrx calibrationProvider =
79 visionx::StereoCalibrationProviderInterfacePrx::checkedCast(imageProviderPrx);
80
81 if (calibrationProvider)
82 {
83 reportStereoCalibrationChanged(calibrationProvider->getStereoCalibration(), false, "");
84 }
85 else
86 {
87 ARMARX_WARNING << "unable to obtain calibration parameters. using default values";
88
89 unitFactorX = (1.0 / 640.0) * (59.7);
90 unitFactorY = (1.0 / 480.0) * (44.775);
91 }
92}
93
94void
98
99void
101{
102 /******************
103 * Get gray image
104 ******************/
105
106 std::unique_lock lock(imageMutex);
107
108 if (!waitForImages(getProperty<std::string>("providerName").getValue(), 1000))
109 {
110 ARMARX_WARNING << "Timeout while waiting for camera images (>1000ms)";
111 return;
112 }
113
114 armarx::MetaInfoSizeBasePtr info;
115 int numImages =
116 getImages(getProperty<std::string>("providerName").getValue(), cameraImages, info);
117
118 if (numImages == 0)
119 {
120 ARMARX_WARNING << "Didn't receive one image! Aborting!";
121 return;
122 }
123
124 IplImage* ppIplImages[1] = {IplImageAdaptor::Adapt(cameraImages[1])};
125
126 // convert to gray
127 cv::Mat grayImage;
128 cv::Mat resultImage = cv::cvarrToMat(ppIplImages[0]);
129 cv::cvtColor(resultImage, grayImage, cv::COLOR_BGR2GRAY);
130
131 /******************
132 * Compute optical flow
133 ******************/
134
135 std::vector<cv::Point2f> features;
136
137 timeDiff = (info->timeProvided - previousTime) / 1000000.0;
138
139 if ((timeDiff > 1.0) || !previousFeatures.size())
140 {
141 if (timeDiff > 1.0)
142 {
143 ARMARX_INFO << "time delta is larger than 1 sec. discarding previous features.";
144 }
145
146 if (!previousFeatures.size())
147 {
148 ARMARX_INFO << "no feature found on previous image. discarding previous image.";
149 }
150
151 resultX = 0.;
152 resultY = 0.;
153 }
154 else
155 {
156 ComputeOpticalFlow(resultImage, grayImage);
157
158
159 // ARMARX_LOG << deactivateSpam(5) << "avgDiffX: " << resultX << " avgDiffY: " << resultY << " time " << timeDiff;
160 }
161
162
163 prx->reportNewOpticalFlow(resultX, resultY, timeDiff, previousTime);
164
165 StringVariantBaseMap debugValues;
166 debugValues["x"] = new Variant(resultX);
167 debugValues["y"] = new Variant(resultY);
168 debugValues["delta_t"] = new Variant((timeDiff));
169
170
171 debugObserver->setDebugChannel(getName(), debugValues);
172
173
174 /******************
175 * Get Next Features
176 ******************/
177
178 if (method == Chessboard)
179 {
180 cv::Size size = cv::Size(chessboardWidth, chessboardHeight);
181
182 // + cv::CALIB_CB_FAST_CHECK)
183 bool foundCorners = cv::findChessboardCorners(
184 grayImage, size, features, cv::CALIB_CB_ADAPTIVE_THRESH + cv::CALIB_CB_NORMALIZE_IMAGE);
185
186 cv::drawChessboardCorners(resultImage, size, features, foundCorners);
187 if (!foundCorners)
188 {
189 ARMARX_WARNING << "Chessboard NOT found!";
190 }
191 }
192
193 else if (method == Feature)
194 {
195 int maxCorners = 100;
196 double qualityLevel = 0.01;
197 double minDistance = 10.0;
198 cv::Mat mask1;
199 int blockSize = 3;
200 bool useHarrisDetector = true;
201 double k = 0.04;
202
203 cv::goodFeaturesToTrack(grayImage,
204 features,
205 maxCorners,
206 qualityLevel,
207 minDistance,
208 mask1,
209 blockSize,
210 useHarrisDetector,
211 k);
212 }
213
214 if (method == Dense)
215 {
216 cv::Mat flow = cv::Mat(grayImage.rows, grayImage.cols, CV_32FC1);
217 double mean_flow = 0.;
218 double rmse_flow = 0.;
219 if (method == Dense)
220 {
221 if (previousImage.size() == grayImage.size())
222 {
223 cv::calcOpticalFlowFarneback(
224 previousImage, grayImage, flow, 0.5, 10, 45, 3, 9, 1.2, 0);
225 flow = (59.7 / 640.) * flow; // pixel to deg
226 flow = (1.0 / timeDiff) * flow; // per frame to per s
227 computeAverageDenseFlow(flow, mean_flow, rmse_flow);
228 }
229 }
230
231 // drawn optical flow
232 drawDenseFlowMap(flow, resultImage, 10, CV_RGB(0, 255, 0));
233
234 // send values for plotting
235 StringVariantBaseMap debugValues;
236 debugValues["mean_dense"] = new Variant((mean_flow));
237 debugValues["rmse_dense"] = new Variant((rmse_flow));
238 debugObserver->setDebugChannel(getName(), debugValues);
239 }
240
241
242 CByteImage* resultImages[1] = {IplImageAdaptor::Adapt(ppIplImages[0])};
243 provideResultImages(resultImages);
244
245 if (method != Dense)
246 {
247 if (!features.size())
248 {
249 ARMARX_WARNING << "no features found";
250 //return;
251 }
252 }
253
254 previousImage = grayImage;
255 previousFeatures = features;
256 previousTime = info->timeProvided;
257
258 lock.unlock();
259
260 if (frameRate > 0.0)
261 {
262 fpsCounter.assureFPS(frameRate);
263 }
264}
265
266void
267OpticalFlow::ComputeOpticalFlow(cv::Mat resultImage, cv::Mat grayImage)
268{
269 std::vector<uchar> status;
270 std::vector<float> error;
271
272 std::vector<cv::Point2f> trackedFeatures;
273 cv::calcOpticalFlowPyrLK(previousImage,
274 grayImage,
275 previousFeatures,
276 trackedFeatures,
277 status,
278 error,
279 cv::Size(21, 21),
280 cv::OPTFLOW_USE_INITIAL_FLOW);
281
282 std::vector<float> avgDiffx, avgDiffy;
283 for (size_t i = 0; i < trackedFeatures.size(); i++)
284 {
285 if (status[i])
286 {
287 avgDiffx.push_back(previousFeatures[i].x - trackedFeatures[i].x);
288 avgDiffy.push_back(previousFeatures[i].y - trackedFeatures[i].y);
289
290 cv::circle(resultImage, previousFeatures[i], 2, CV_RGB(255, 0, 0), -1);
291 cv::circle(resultImage, trackedFeatures[i], 2, CV_RGB(0, 0, 255), -1);
292 cv::line(resultImage, previousFeatures[i], trackedFeatures[i], CV_RGB(255, 0, 0), 1);
293 }
294 }
295
296 if (method == Feature)
297 {
298 //ARMARX_LOG << deactivateSpam(1) << "Removing " << (avgDiffx.size() / 20) << " from x with size: " << avgDiffx.size();
299 //ARMARX_LOG << deactivateSpam(1) << "Removing " << (avgDiffy.size() / 20) << " from y with size: " << avgDiffy.size();
300
301 for (size_t i = 0; i < (avgDiffx.size() / 20); i++)
302 {
303 avgDiffx = removeHighestAndLowestMember(avgDiffx);
304 }
305 for (size_t i = 0; i < (avgDiffy.size() / 20); i++)
306 {
307 avgDiffy = removeHighestAndLowestMember(avgDiffy);
308 }
309 }
310
311 if (!avgDiffx.size() || !avgDiffy.size())
312 {
313 ARMARX_WARNING << "No features found in the image, return = optical Flow!";
314 resultX = 0.;
315 resultY = 0.;
316 return;
317 }
318
319 Eigen::Map<Eigen::ArrayXf> diffX(avgDiffx.data(), avgDiffx.size());
320 Eigen::Map<Eigen::ArrayXf> diffY(avgDiffy.data(), avgDiffy.size());
321
322 // convert to rad/sec
323 resultX = diffX.mean() / timeDiff;
324 resultY = diffY.mean() / timeDiff;
325
326 resultX *= unitFactorX;
327 resultY *= unitFactorY;
328}
329
330std::vector<float>
331OpticalFlow::removeHighestAndLowestMember(std::vector<float>& input)
332{
333 std::sort(input.begin(), input.end());
334 return std::vector<float>(&input[1], &input[input.size() - 1]);
335}
336
337void
338OpticalFlow::drawDenseFlowMap(const cv::Mat& flow,
339 cv::Mat& cflowmap,
340 int step,
341 const cv::Scalar& color)
342{
343
344 for (int y = 0; y < cflowmap.rows; y += step)
345 for (int x = 0; x < cflowmap.cols; x += step)
346 {
347 const cv::Point2f& fxy = flow.at<cv::Point2f>(y, x);
348 cv::line(cflowmap,
349 cv::Point(x, y),
350 cv::Point(cvRound(x + fxy.x), cvRound(y + fxy.y)),
351 color);
352 cv::circle(cflowmap, cv::Point(cvRound(x + fxy.x), cvRound(y + fxy.y)), 1, color, -1);
353 }
354}
355
356// average optical flow norm on a window of W/2 x H/2 centered in the image
357void
358OpticalFlow::computeAverageDenseFlow(const cv::Mat& flow, double& mean, double& rmse)
359{
360
361 double dist = 0.;
362 double dist2 = 0.;
363 double d;
364 double count = 0.;
365
366 for (int y = round(flow.rows / 4); y < round(flow.rows * 3 / 4); y++)
367 {
368 for (int x = round(flow.cols / 4); x < round(flow.cols * 3 / 4); x++)
369 {
370 const cv::Point2f& fxy = flow.at<cv::Point2f>(y, x);
371 d = fxy.x * fxy.x + fxy.y * fxy.y;
372 dist += sqrt(d);
373 dist2 += d;
374 count++;
375 }
376 }
377
378 rmse = sqrt(dist2 / count);
379 mean = dist / count;
380}
std::string getConfigIdentifier()
Retrieve config identifier for this component as set in constructor.
Definition Component.cpp:90
Property< PropertyType > getProperty(const std::string &name)
void offeringTopic(const std::string &name)
Registers a topic for retrival after initialization.
TopicProxyType getTopic(const std::string &name)
Returns a proxy of the specified topic.
void usingTopic(const std::string &name, bool orderedPublishing=false)
Registers a proxy for subscription after initialization.
std::string getName() const
Retrieve name of object.
Ice::ObjectPrx getProxy(long timeoutMs=0, bool waitForScheduler=true) const
Returns the proxy of this object (optionally it waits for the proxy)
void onConnectImageProcessor() override
Implement this method in the ImageProcessor in order execute parts when the component is fully initia...
void onExitImageProcessor() override
Exit the ImapeProcessor component.
armarx::PropertyDefinitionsPtr createPropertyDefinitions() override
void process() override
Process the vision component.
void onInitImageProcessor() override
Setup the vision component.
void reportStereoCalibrationChanged(const visionx::StereoCalibration &stereoCalibration, bool, const std::string &, const Ice::Current &c=Ice::emptyCurrent) override
The Variant class is described here: Variants.
Definition Variant.h:224
void enableResultImages(int numberImages, ImageDimension imageDimension, ImageType imageType, const std::string &name="")
Enables visualization.
void usingImageProvider(std::string name)
Registers a delayed topic subscription and a delayed provider proxy retrieval which all will be avail...
bool waitForImages(int milliseconds=1000)
Wait for new images.
ImageProviderInfo getImageProvider(std::string name, ImageType destinationImageType=eRgb, bool waitForProxy=false)
Select an ImageProvider.
int getImages(CByteImage **ppImages)
Poll images from provider.
void provideResultImages(CByteImage **images, armarx::MetaInfoSizeBasePtr info=nullptr)
sends result images for visualization
ImageFormatInfo imageFormat
Image format struct that contains all necessary image information.
#define ARMARX_INFO
The normal logging level.
Definition Logging.h:181
#define ARMARX_WARNING
The logging level for unexpected behaviour, but not a serious problem.
Definition Logging.h:193
VectorXD< D, T > sqrt(const VectorXD< D, T > &a)
Definition VectorXD.h:704
This file offers overloads of toIce() and fromIce() functions for STL container types.
std::optional< float > mean(const boost::circular_buffer< NameValueMap > &buffer, const std::string &key)
std::map< std::string, VariantBasePtr > StringVariantBaseMap
@ Chessboard
Definition OpticalFlow.h:50
IceUtil::Handle< class PropertyDefinitionContainer > PropertyDefinitionsPtr
PropertyDefinitions smart pointer type.
CByteImage * createByteImage(const ImageFormatInfo &imageFormat, const ImageType imageType)
Creates a ByteImage for the destination type specified in the given imageProviderInfo.