StreamReceiver.cpp
Go to the documentation of this file.
1 /**
2  * This file is part of ArmarX.
3  *
4  * Copyright (C) 2011-2016, High Performance Humanoid Technologies (H2T), Karlsruhe Institute of Technology (KIT), all rights reserved.
5  *
6  * ArmarX is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * ArmarX is distributed in the hope that it will be useful, but
11  * WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program. If not, see <http://www.gnu.org/licenses/>.
17  *
18  * @package TabletTeleoperation::ArmarXObjects::StreamReceiver
19  * @author Mirko Waechter ( mirko dot waechter at kit dot edu )
20  * @date 2013
21  * @copyright http://www.gnu.org/licenses/gpl-2.0.txt
22  * GNU General Public License
23  */
24 
25 #include "StreamReceiver.h"
26 
28 
29 #include <Image/ByteImage.h>
30 
31 extern "C"
32 {
33 #include <glib-object.h>
34 #include <glib.h>
35 #include <gst/app/gstappbuffer.h>
36 #include <gst/app/gstappsink.h>
37 #include <gst/app/gstappsrc.h>
38 #include <gst/gst.h>
39 }
40 
41 using namespace armarx;
42 using namespace Stream;
43 
44 
45 #define VP8DECODE_ELEMENT " vp8dec ! "
46 #define RTPVP8_ELEMENT \
47  "application/x-rtp, media=(string)video, clock-rate=(int)90000, " \
48  "encoding-name=(string)VP8-DRAFT-IETF-01, payload=(int)96 ! " \
49  "rtpvp8depay ! "
50 // "gstrtpjitterbuffer latency=500 drop-on-latency=true ! "
51 
52 #define H264DECODE_ELEMENT " ffdec_h264 ! "
53 #define RTPH264_ELEMENT \
54  "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, " \
55  "payload=(int)96 ! " \
56  "rtph264depay ! "
57 
58 // "gstrtpjitterbuffer latency=500 drop-on-latency=true ! "
59 
60 void
62 {
63  pCombinedDecodedImage = NULL;
64  usingProxy(getProperty<std::string>("UsedStreamProvider").getValue());
65  usingTopic("ImageStream");
66  numberImages = 0;
67  gst_init(NULL, NULL);
68 
69 
70  avcodec_register_all();
71  av_init_packet(&m_packet);
72  m_decoder = avcodec_find_decoder(AV_CODEC_ID_H264);
73  if (!m_decoder)
74  {
75  ARMARX_ERROR << ("Can't find H264 decoder!");
76  }
77  m_decoderContext = avcodec_alloc_context3(m_decoder);
78 
79  if (m_decoder->capabilities & CODEC_CAP_TRUNCATED)
80  {
81  m_decoderContext->flags |= CODEC_FLAG_TRUNCATED;
82  }
83 
84 
85  //we can receive truncated frames
86  m_decoderContext->flags2 |= CODEC_FLAG2_CHUNKS;
87  m_decoderContext->thread_count =
88  4; //TODO: random value. May be changing can make decoding faster
89 
90  AVDictionary* dictionary = nullptr;
91  if (avcodec_open2(m_decoderContext, m_decoder, &dictionary) < 0)
92  {
93  ARMARX_ERROR << "Could not open decoder";
94  }
95  ARMARX_INFO << "H264 Decoder successfully opened";
96  m_picture = avcodec_alloc_frame();
97 }
98 
99 //unused function
100 //static gboolean
101 //sink_event_handler(GstPad* pad,
102 // GstEvent* event)
103 //{
104 // gboolean ret;
105 
106 // switch (GST_EVENT_TYPE(event))
107 // {
108 // case GST_EVENT_QOS:
109 // ARMARX_DEBUG_S << "QOS event";
110 // ret = gst_pad_event_default(pad, event);
111 // break;
112 
113 // default:
114 // /* just call the default handler */
115 // ret = gst_pad_event_default(pad, event);
116 // break;
117 // }
118 
119 // return ret;
120 //}
121 
122 
123 void
125 {
126  ScopedLock lock(decodedImageMutex);
127  ppDecodedImages = new CByteImage*[numberImages];
128 
129  streamProvider = getProxy<Stream::StreamProviderPrx>(
130  getProperty<std::string>("UsedStreamProvider").getValue());
131  codec = streamProvider->getCodecType();
132  numberImages = streamProvider->getNumberOfImages();
133  int imgWidth, imgHeight, imgType;
134  streamProvider->getImageInformation(imgWidth, imgHeight, imgType);
135  ARMARX_INFO << "Images: " << numberImages;
136  for (int i = 0; i < numberImages; i++)
137  {
138  ppDecodedImages[i] = new CByteImage(imgWidth, imgHeight, CByteImage::eRGB24);
139  }
140  pCombinedDecodedImage = new CByteImage(imgWidth, imgHeight * numberImages, CByteImage::eRGB24);
141  return;
142 
143  // put dummy string names
144  // for (int i = 0; i < numberImages ; i++)
145  // {
146  // std::stringstream ss;
147  // ss << "Camera" << i;
148  // ARMARX_WARNING << "Creating dummy stream name for image " << ss.str();
149  // streamSources[ss.str()].reset(new StreamElements(ss.str(), i, streamProvider));
150  // streamSources[ss.str()]->streamName = ss.str();
151 
152  // }
153 
154  // /*
155  // std::string sourcesStr = getProperty<std::string>("UsedSources").getValue();
156  // std::vector<std::string> sources;
157  // boost::split(sources,
158  // sourcesStr,
159  // boost::is_any_of("\t ,"),
160  // boost::token_compress_on);
161  // for(unsigned int i=0; i< sources.size(); i++){
162  // if(sources[i].empty())
163  // continue;
164  // ARMARX_DEBUG << "Creating stream for " << sources[i];
165  // ARMARX_WARNING << "xxCreating stream for " << sources[i];
166  // streamSources[sources[i]].reset(new StreamElements( sources[i], streamProvider));
167  // streamSources[sources[i]]->streamName = sources[i];
168 
169  // }
170  // */
171 
172  // std::string camPipelineString = "appsrc name=dec ! ";
173 
174  // switch (codec)
175  // {
176  // case Stream::eVP8:
177  // ARMARX_INFO << "Selected Codec: VP8";
178  // camPipelineString += RTPVP8_ELEMENT
179  // VP8DECODE_ELEMENT;
180  // break;
181 
182  // case Stream::eH264:
183  // ARMARX_INFO << "Selected Codec: H264";
184  // camPipelineString += RTPH264_ELEMENT
185  // H264DECODE_ELEMENT;
186  // break;
187 
188  // default:
189  // throw armarx::LocalException("Codec not yet supported");
190  // }
191 
192  // camPipelineString +=
193  // // " tee name=t ! "
194  // // " queue !"
195  // // " fpsdisplaysink t. ! "
196  // " queue !"
197  // " ffmpegcolorspace ! "
198  // " video/x-raw-rgb,bpp=24,depth=24,endianness=4321,red_mask=16711680,green_mask=65280,blue_mask=255 ! "
199  // " appsink name=raw drop=true max-buffers=100";
200 
201 
202  // start = IceUtil::Time::now();
203  // transferredBits = 0;
204 
205 
206  // StreamSourceMap::iterator it = streamSources.begin();
207 
208  // for (; it != streamSources.end(); it++)
209  // {
210 
211  // StreamElementsPtr elem = it->second;
212 
213  // elem->pipeline = gst_parse_launch(
214  // camPipelineString.c_str()
215  // , NULL
216  // );
217 
218  // if (!elem->pipeline)
219  // {
220  // ARMARX_ERROR << "pipeline is ZERO";
221  // terminate();
222  // return;
223  // }
224 
225  // // get the appsrc
226  // elem->appsrc = gst_bin_get_by_name(GST_BIN(elem->pipeline), "dec");
227 
228  // elem->appsink = gst_bin_get_by_name(GST_BIN(elem->pipeline), "raw");
229 
230  // // set the pipeline to playing state.
231  // gst_element_set_state(elem->pipeline, GST_STATE_PLAYING);
232 
233  // elem->taskReceive = new PeriodicTask<StreamElements>(elem.get(), &StreamReceiver::StreamElements::receive, 1);
234  //// elem->taskReceive->start();
235  // ARMARX_INFO << it->first << " pipeline created";
236 
237  // }
238 
239  // taskSave = new PeriodicTask<StreamReceiver>(this, &StreamReceiver::store, 5, false, "StreamConverterToIVT");
240  // taskSave->start();
241 }
242 
243 void
245 {
246  // ARMARX_IMPORTANT << "onDisconnectComponent";
247  StreamSourceMap::iterator it = streamSources.begin();
248 
249  for (; it != streamSources.end(); it++)
250  {
251  gst_element_send_event(it->second->pipeline, gst_event_new_eos());
252  gst_element_send_event(it->second->appsink, gst_event_new_eos());
253  gst_element_set_state(it->second->appsink, GST_STATE_NULL);
254 
255  if (it->second->taskReceive)
256  {
257  it->second->taskReceive->stop();
258  }
259  }
260 
261  // ScopedLock lock(pipelineMutex);
262  // ARMARX_IMPORTANT << "stopping tasks";
263  if (taskSave)
264  {
265  taskSave->stop();
266  }
267 
268  // ARMARX_IMPORTANT << "taskSave stopped";
269 
270  it = streamSources.begin();
271 
272  for (; it != streamSources.end(); it++)
273  {
274 
275  // ARMARX_INFO << "Stopping : " << it->first;
276  gst_element_set_state(it->second->pipeline, GST_STATE_NULL);
277  gst_element_set_state(it->second->appsink, GST_STATE_NULL);
278  g_object_unref(it->second->pipeline);
279  it->second->pipeline = NULL;
280  // ARMARX_INFO << it->first << " stopped ";
281  }
282 
283  it = streamSources.begin();
284 
285  for (; it != streamSources.end(); it++)
286  {
287 
288  try
289  {
290  if (streamProvider)
291  {
292  streamProvider->stopCapture();
293  }
294  }
295  catch (Ice::NotRegisteredException& e)
296  {
297  ARMARX_DEBUG << "no proxy available";
298  continue;
299  }
300  catch (Ice::ConnectionRefusedException& e)
301  {
302  ARMARX_DEBUG << "no proxy available";
303  continue;
304  }
305 
306  // ARMARX_INFO << it->first << "remote stopped ";
307  }
308  delete pCombinedDecodedImage;
309  pCombinedDecodedImage = nullptr;
310  if (ppDecodedImages)
311  {
312  for (int i = 0; i < numberImages; i++)
313  {
314  delete ppDecodedImages[i];
315  }
316 
317  delete[] ppDecodedImages;
318  ppDecodedImages = nullptr;
319  }
320 
321 
322  ARMARX_IMPORTANT << "onDisconnectComponent DONE";
323 }
324 
325 void
327 {
328 }
329 
330 StreamReceiver::StreamElementsPtr
331 StreamReceiver::getStreamElements(std::string streamName)
332 {
333  StreamSourceMap::iterator it = streamSources.find(streamName);
334 
335  if (it == streamSources.end())
336  {
337  throw armarx::LocalException("No Stream registered under the name: '") << streamName << "'";
338  }
339 
340  return it->second;
341 }
342 
343 void
344 StreamReceiver::getImageFormat(StreamElementsPtr elem, int& height, int& width)
345 {
346  GstPad* pad = gst_element_get_static_pad(elem->appsink, "sink");
347 
348  if (!pad)
349  {
350  ARMARX_WARNING << "Could not get pad";
351  return;
352  }
353 
354  GstCaps* caps = gst_pad_get_negotiated_caps(pad);
355 
356  if (!caps)
357  {
358  ARMARX_WARNING << "Could not get caps";
359  return;
360  }
361 
362  GstStructure* s = gst_caps_get_structure(caps, 0);
363  gst_structure_get_int(s, "width", &width);
364  gst_structure_get_int(s, "height", &height);
365 
366  gst_caps_unref(caps);
367  gst_object_unref(pad);
368 }
369 
370 void
371 StreamReceiver::reportNewStreamData(const DataChunk& chunk, const Ice::Current&)
372 {
373  ScopedLock lock(streamDecodeMutex);
374  m_packet.size = chunk.size();
375  m_packet.data = const_cast<Ice::Byte*>(chunk.data());
376  // ARMARX_INFO << "H264Decoder: received encoded frame with framesize " << enc_frame.size();
377 
378  while (m_packet.size > 0)
379  {
380  int got_picture;
381  int len = avcodec_decode_video2(m_decoderContext, m_picture, &got_picture, &m_packet);
382  if (len < 0)
383  {
384  std::string err("Decoding error");
385  ARMARX_INFO << err;
386  return;
387  }
388  if (got_picture)
389  {
390  ARMARX_INFO << deactivateSpam(1) << "H264Decoder: frame decoded!";
391  // std::vector<unsigned char> result;
392  // this->storePicture(result);
393 
394  if (m_picture->format == AV_PIX_FMT_YUV420P)
395  {
396  static SwsContext* m_swsCtx = NULL;
397  // QImage frame_img = QImage(width, height, QImage::Format_RGB888);
398  m_swsCtx = sws_getCachedContext(m_swsCtx,
399  m_picture->width,
400  m_picture->height,
401  AV_PIX_FMT_YUV420P,
402  m_picture->width,
403  m_picture->height,
404  AV_PIX_FMT_RGB24,
405  SWS_GAUSS,
406  NULL,
407  NULL,
408  NULL);
409  ScopedLock lock(decodedImageMutex);
410 
411  uint8_t* dstSlice[] = {pCombinedDecodedImage->pixels};
412  int dstStride = m_picture->width * 3;
413  if (sws_scale(m_swsCtx,
414  m_picture->data,
415  m_picture->linesize,
416  0,
417  m_picture->height,
418  dstSlice,
419  &dstStride) != m_picture->height)
420  {
421  ARMARX_INFO << "SCALING FAILED!";
422  return;
423  }
424  for (int i = 0; i < numberImages; ++i)
425  {
426  size_t imageByteSize = ppDecodedImages[i]->width * ppDecodedImages[i]->height *
427  ppDecodedImages[i]->bytesPerPixel;
428  // if(ppInputImages[i]->type != CByteImage::eRGB24)
429  // {
430  // ::ImageProcessor::ConvertImage(imageProviderInfo, eRgb,)
431  // }
432  memcpy(ppDecodedImages[i]->pixels,
433  pCombinedDecodedImage->pixels + i * imageByteSize,
434  imageByteSize);
435  }
436  // ARMARX_INFO << "New decoded image!";
437  }
438  else
439  {
440  ARMARX_INFO << "Other format: " << m_picture->format;
441  }
442 
443  // emit newDecodedFrame(frame_img);
444  // }
445  // else if (m_picture->format == PIX_FMT_RGB32)
446  // {
447  // QImage img = QImage(result.data(), m_picture->width, m_picture->height, QImage::Format_RGB32);
448  // ARMARX_INFO << "New decoded image!";
449  // emit newDecodedFrame(img);
450  // }
451  // else if (m_picture->format == AV_PIX_FMT_RGB24)
452  // {
453  // QImage img = QImage(result.data(), m_picture->width, m_picture->height, QImage::Format_RGB888);
454  // ARMARX_INFO << "New decoded image!";
455  // emit newDecodedFrame(img);
456  // }
457  // else
458  // {
459  // std::string err = std::string( "Unsupported pixel format! Can't create QImage!");
460  // ARMARX_INFO << err;
461  // emit criticalError( err );
462  // return false;
463  // }
464  }
465  m_packet.size -= len;
466  m_packet.data += len;
467  }
468 }
469 
470 void
471 StreamReceiver::getImages(std::vector<CByteImage*>& imagesOut)
472 {
473 
474 
476  << "FPS: " << 1 / (IceUtil::Time::now() - lastReceiveTimestamp).toSecondsDouble();
477  lastReceiveTimestamp = IceUtil::Time::now();
478 
479  if (getState() < eManagedIceObjectStarted)
480  {
481  return;
482  }
483  ScopedLock lock2(decodedImageMutex);
484  for (int i = 0; i < numberImages && i < (int)imagesOut.size(); ++i)
485  {
486 
487  memcpy(imagesOut.at(i)->pixels,
488  ppDecodedImages[i]->pixels,
489  ppDecodedImages[i]->width * ppDecodedImages[i]->height *
490  ppDecodedImages[i]->bytesPerPixel);
491  }
492  return;
493 
494  ScopedLock lock(pipelineMutex);
495  int i = 0;
496  StreamSourceMap::iterator it = streamSources.begin();
497 
498  for (; it != streamSources.end(); it++, i++)
499  {
500  // ARMARX_IMPORTANT << "Waiting for Buffer";
501 
502  CByteImage* image = imagesOut.at(i);
503 
504  if (!image)
505  {
506  continue;
507  }
508 
509  if (!image->m_bOwnMemory)
510  {
511  throw armarx::LocalException("Output images need to have there own memory");
512  }
513 
514  StreamElementsPtr elem = it->second;
515 
516 
517  if (!elem->appsink || !elem->pipeline)
518  {
519  continue;
520  }
521 
522  GstState state;
523 
524  if (gst_element_get_state(elem->appsink, &state, NULL, 1000000000) ==
525  GST_STATE_CHANGE_FAILURE ||
526  state != GST_STATE_PLAYING)
527  {
528  ARMARX_ERROR << deactivateSpam(5) << "state of appsink for " << elem->streamName
529  << " not playing state: " << state << " - skipping" << std::endl;
530  continue;
531  }
532 
533  GstBuffer* buffer = gst_app_sink_pull_buffer(GST_APP_SINK(elem->appsink));
534 
535  if (!buffer)
536  {
537  ARMARX_DEBUG << "Received Buffer is empty";
538  return;
539  }
540 
541  // ARMARX_IMPORTANT << "got Buffer";
542 
543  int width;
544  int height;
545  getImageFormat(elem, height, width);
546 
547  // g_object_get(caps, "width", &width);
548  // g_object_get(caps, "height", &height);
549  // ARMARX_INFO_S << "width: " << width;
550  // CByteImage image(width, height, CByteImage::eRGB24);
551  int buf_size = GST_BUFFER_SIZE(buffer);
552 
553  if (width * height * 3 != buf_size)
554  {
555  ARMARX_WARNING << "Invalid buffer size: actual " << buf_size << " vs. expected "
556  << width * height * 3;
557  }
558  else
559  {
560  memcpy(image->pixels, GST_BUFFER_DATA(buffer), width * height * 3);
561  }
562 
563  // std::stringstream str;
564  // str << "/tmp/images/snap" << std::setfill('0') << std::setw(3) << imageNumber++ << ".bmp";
565  // // ARMARX_DEBUG << "Storing image";
566  // image.SaveToFile(str.str().c_str());
567 
568  gst_buffer_unref(buffer);
569  }
570 }
571 
572 int
574 {
575  // return streamSources.size();
576  return numberImages;
577 }
578 
579 void
580 StreamReceiver::getImageInformation(int& imageWidth, int& imageHeight, int& imageType)
581 {
582  streamProvider->getImageInformation(imageWidth, imageHeight, imageType);
583 }
584 
585 void
586 StreamReceiver::receive()
587 {
588 
589  // ScopedLock lock(pipelineMutex);
590 
591  // Stream::DataChunk c ;
592  // try{
593  // c = streamProvider->getChunk("Camera");
594  // }
595  // catch(Ice::NotRegisteredException &e)
596  // {
597  // ARMARX_DEBUG << "no proxy available";
598  // return;
599  // }
600  // catch(Ice::ConnectionRefusedException &e)
601  // {
602  // ARMARX_DEBUG << "no proxy available";
603  // return;
604  // }
605  // unsigned char* dataChunk = new unsigned char[c.size()];
606  // memcpy(dataChunk, &c[0], c.size());
607 
608  // //std::cout << "\r\033[K" << std::flush;
609 
610  // if(c.size() == 0)
611  // {
612  // ARMARX_DEBUG << "No Data received";
613  // return;
614  // }
615  // ARMARX_DEBUG << "Feeding chunk of size " << c.size() << std::endl;
616  // GstFlowReturn ret;
617  // GstBuffer *buffer;
618  // buffer = gst_app_buffer_new(dataChunk, c.size(), g_free, dataChunk);
619  // GST_BUFFER_TIMESTAMP (buffer) = (IceUtil::Time::now()- start).toMilliSeconds();
620  // ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
621 
622  // if (ret == GST_FLOW_UNEXPECTED) {
623  // ARMARX_DEBUG << std::endl << "Stream ended (received EOS)" ;
624  // return;
625  // } else if (ret != GST_FLOW_OK) {
626  // ARMARX_DEBUG << std::endl << "error while feeding chunk of size: " << gst_flow_get_name(ret) ;
627  // return;
628  // }
629 }
630 
631 StreamReceiver::StreamElements::StreamElements(std::string streamName,
632  int streamID,
633  StreamProviderPrx streamProvider)
634 {
635  this->streamName = streamName;
636  this->streamProvider = streamProvider;
637  receivedIndex = 0;
638  realPosition = 0;
639  this->streamID = streamID;
640 }
641 
642 void
643 StreamReceiver::StreamElements::receive()
644 {
645 
646  // // ScopedLock lock(pipelineMutex);
647 
648  // Stream::GstBufferWrapper wrap;
649 
650  // try
651  // {
652  // //wrap = streamProvider->getGstBuffer(streamName);
653  // wrap = streamProvider->getEncodedImagesFromBuffer(streamID, receivedIndex, realPosition);
654  // receivedIndex = realPosition;
655  // receivedIndex++;
656  // //ARMARX_DEBUG_S << " r " << receivedIndex << " p: " << realPosition << " s: " << wrap.data.size();
657 
658  // }
659  // catch (Ice::NotRegisteredException& e)
660  // {
661  // ARMARX_DEBUG_S << "no proxy available";
662  // return;
663  // }
664  // catch (Ice::ConnectionRefusedException& e)
665  // {
666  // ARMARX_DEBUG_S << "no proxy available";
667  // return;
668  // }
669 
670  // unsigned char* dataChunk = new unsigned char[wrap.data.size()];
671  // memcpy(dataChunk, &wrap.data[0], wrap.data.size());
672 
673  // //std::cout << "\r\033[K" << std::flush;
674 
675  // if (wrap.data.size() == 0)
676  // {
677  // ARMARX_DEBUG_S << "No Data received";
678  // return;
679  // }
680 
681  // // ARMARX_DEBUG << "Feeding chunk of size " << wrap.data.size() << std::endl;
682  // GstFlowReturn ret;
683  // GstBuffer* buffer;
684  // buffer = gst_app_buffer_new(dataChunk, wrap.data.size(), g_free, dataChunk);
685  // GST_BUFFER_TIMESTAMP(buffer) = wrap.timestamp;
686  // GST_BUFFER_DURATION(buffer) = wrap.duration;
687  // ret = gst_app_src_push_buffer(GST_APP_SRC(appsrc), buffer);
688 
689  // if (ret == GST_FLOW_UNEXPECTED)
690  // {
691  // ARMARX_DEBUG_S << std::endl << "Stream ended (received EOS)" ;
692  // return;
693  // }
694  // else if (ret != GST_FLOW_OK)
695  // {
696  // ARMARX_DEBUG_S << std::endl << "error while feeding chunk of size: " << gst_flow_get_name(ret) ;
697  // return;
698  // }
699 
700  // // bandwidth_kbps = 0.001 * wrap.data.size() * 24 * 8;
701  // // ARMARX_VERBOSE << "kbps: " << bandwidth_kbps;
702  // // transferredBits += wrap.data.size() * 8;
703  // // ARMARX_INFO << "Avg bandwidth: " << 0.001*transferredBits/(IceUtil::Time::now() - start).toSecondsDouble() << " kbps";
704 }
705 
706 void
707 StreamReceiver::store()
708 {
709  // ImageMap images;
710  // CByteImage image(640,480, CByteImage::eRGB24);
711  // CByteImage image2(640,480, CByteImage::eRGB24);
712  // images["Camera"] = &image;
713  // images["Camera2"] = &image2;
714  // getImages(images);
715 
716  // // ARMARX_DEBUG << "FPS: " << 1/(IceUtil::Time::now() -lastReceiveTimestamp).toSecondsDouble();
717  // lastReceiveTimestamp = IceUtil::Time::now();
718 
719  // ScopedLock lock(pipelineMutex);
720 
721  // // ARMARX_IMPORTANT << "Waiting for Buffer";
722  // GstBuffer* buffer = gst_app_sink_pull_buffer(GST_APP_SINK(this->appsink));
723  // if(!buffer)
724  // {
725  // ARMARX_DEBUG << "Received Buffer is empty";
726  // return;
727  // }
728  // // ARMARX_IMPORTANT << "got Buffer";
729 
730  // int width = 640;
731  // int height = 480;
732  // CByteImage image(width, height, CByteImage::eRGB24);
733  // size_t buf_size = GST_BUFFER_SIZE(buffer);
734  // if( width*height*3 != buf_size){
735  // ARMARX_WARNING << "Invalid buffer size: actual " << buf_size << " vs. expected " << width*height*3;
736  // gst_buffer_unref (buffer);
737  // return;
738  // }
739  // // ARMARX_DEBUG << "copying data";
740  // memcpy(image.pixels, GST_BUFFER_DATA(buffer), width*height*3);
741 
742  // std::stringstream str;
743  // str << "/tmp/images/snap" << std::setfill('0') << std::setw(3) << imageNumber++ << ".bmp";
744  // // ARMARX_DEBUG << "Storing image";
745  // image.SaveToFile(str.str().c_str());
746 
747  // gst_buffer_unref (buffer);
748 }
armarx::StreamReceiver::onExitComponent
virtual void onExitComponent()
Definition: StreamReceiver.cpp:326
armarx::StreamReceiver::getImageInformation
void getImageInformation(int &imageWidth, int &imageHeight, int &imageType)
Definition: StreamReceiver.cpp:580
ARMARX_IMPORTANT
#define ARMARX_IMPORTANT
Definition: Logging.h:190
armarx::StreamReceiver::getImages
void getImages(std::vector< CByteImage * > &imagesOut)
getImages retreives the next buffered images for all streams.
Definition: StreamReceiver.cpp:471
armarx::ScopedLock
Mutex::scoped_lock ScopedLock
Definition: Synchronization.h:150
deactivateSpam
SpamFilterDataPtr deactivateSpam(SpamFilterDataPtr const &spamFilter, float deactivationDurationSec, const std::string &identifier, bool deactivate)
Definition: Logging.cpp:75
ARMARX_DEBUG
#define ARMARX_DEBUG
Definition: Logging.h:184
armarx::StreamReceiver::getNumberOfImages
int getNumberOfImages()
getNumberOfImages can be called after the component was initialized.
Definition: StreamReceiver.cpp:573
Stream
Definition: StreamProviderI.cpp:38
ARMARX_ERROR
#define ARMARX_ERROR
Definition: Logging.h:196
armarx::StreamReceiver::reportNewStreamData
void reportNewStreamData(const Stream::DataChunk &chunk, const Ice::Current &)
Definition: StreamReceiver.cpp:371
StreamReceiver.h
armarx::StreamReceiver::onConnectComponent
virtual void onConnectComponent()
Definition: StreamReceiver.cpp:124
ARMARX_INFO
#define ARMARX_INFO
Definition: Logging.h:181
ImageUtil.h
ARMARX_WARNING
#define ARMARX_WARNING
Definition: Logging.h:193
armarx::StreamReceiver::onDisconnectComponent
virtual void onDisconnectComponent()
Definition: StreamReceiver.cpp:244
armarx::StreamReceiver::onInitComponent
virtual void onInitComponent()
Definition: StreamReceiver.cpp:61
armarx::ctrlutil::s
double s(double t, double s0, double v0, double a0, double j)
Definition: CtrlUtil.h:33
armarx
This file offers overloads of toIce() and fromIce() functions for STL container types.
Definition: ArmarXTimeserver.cpp:27