Argus Camera Sample
Argus Camera Sample
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
VideoPipeline.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * * Redistributions of source code must retain the above copyright
8  * notice, this list of conditions and the following disclaimer.
9  * * Redistributions in binary form must reproduce the above copyright
10  * notice, this list of conditions and the following disclaimer in the
11  * documentation and/or other materials provided with the distribution.
12  * * Neither the name of NVIDIA CORPORATION nor the names of its
13  * contributors may be used to endorse or promote products derived
14  * from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
20  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
21  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
22  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
24  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #include <stdio.h>
30 
31 #include <string>
32 
33 #include "Error.h"
34 #include "VideoPipeline.h"
35 #include "Renderer.h"
36 
37 namespace ArgusSamples
38 {
39 
41 #ifdef GST_SUPPORTED
42  : m_state(GST_STATE_NULL)
43  , m_pipeline(NULL)
44 #endif
45 {
46 }
47 
49 {
50  destroy();
51 }
52 
53 ///! give the video eoncoder a name so we can find it at stop()
54 static const char *s_videoEncoderName = "video encoder";
55 
56 /**
57  * RAII helper class for calling gst_object_unref on exit from a block or function.
58  */
59 template <typename T> class GstUnrefer
60 {
61 public:
62  explicit GstUnrefer(T * p)
63  : m_p(p)
64  {
65  }
67  : m_p(NULL)
68  {
69  }
71  {
72  release();
73  }
74 
75  /// Cancel the unref.
76  void cancel()
77  {
78  m_p = NULL;
79  }
80 
81  /// Unref the object now.
82  void release()
83  {
84  if (m_p)
85  gst_object_unref(m_p);
86  m_p = NULL;
87  }
88 
89  /// Set the object to be unrefed.
90  void set(T* p)
91  {
92  release();
93  m_p = p;
94  }
95 
96  /// Get the object.
97  T * get() const
98  {
99  return m_p;
100  }
101 
102 private:
103  T *m_p;
104 
105  /// Not implemented -- use default constructor
106  GstUnrefer(GstUnrefer& other);
107  /// Not implemented
109 };
110 
111 bool VideoPipeline::setupForRecording(EGLStreamKHR videoStream, uint32_t width, uint32_t height,
112  float frameRate, const char *fileName, VideoFormat videoFormat,
113  VideoFileType videoFileType, uint32_t bitRate)
114 {
115 #ifdef GST_SUPPORTED
116  // Init gstreamer
117  gst_init(NULL, NULL);
118 
119  // create the pipeline
120  m_pipeline = gst_pipeline_new("video_pipeline");
121  if (!m_pipeline)
122  ORIGINATE_ERROR("Failed to create video pipeline");
123 
124  // Create the capture source element
125  GstElement *videoSource = gst_element_factory_make("nveglstreamsrc", NULL);
126  if (!videoSource)
127  ORIGINATE_ERROR("Failed to create capture source element");
128  GstUnrefer<GstElement> unrefer(videoSource);
129  if (!gst_bin_add(GST_BIN(m_pipeline), videoSource))
130  ORIGINATE_ERROR("Failed to add video source to pipeline");
131  unrefer.cancel();
132 
133  g_object_set(G_OBJECT(videoSource), "display", Renderer::getInstance().getEGLDisplay(), NULL);
134  g_object_set(G_OBJECT(videoSource), "eglstream", videoStream, NULL);
135 
136  // Create queue
137  GstElement *queue = gst_element_factory_make("queue", NULL);
138  if (!queue)
139  ORIGINATE_ERROR("Failed to create queue");
140  unrefer.set(queue);
141  if (!gst_bin_add(GST_BIN(m_pipeline), queue))
142  ORIGINATE_ERROR("Failed to add queue to pipeline");
143  unrefer.cancel();
144 
145  // create the encoder
146  GstElement *videoEncoder = NULL;
147  switch (videoFormat)
148  {
149  case VIDEO_FORMAT_H263:
150  videoEncoder = gst_element_factory_make("avenc_h263", s_videoEncoderName);
151  break;
152  case VIDEO_FORMAT_H264:
153  videoEncoder = gst_element_factory_make("omxh264enc", s_videoEncoderName);
154  break;
155  case VIDEO_FORMAT_H265:
156  videoEncoder = gst_element_factory_make("omxh265enc", s_videoEncoderName);
157  break;
158  case VIDEO_FORMAT_VP8:
159  videoEncoder = gst_element_factory_make("omxvp8enc", s_videoEncoderName);
160  break;
161  case VIDEO_FORMAT_MPEG4:
162  videoEncoder = gst_element_factory_make("avenc_mpeg4", s_videoEncoderName);
163  break;
164  default:
165  ORIGINATE_ERROR("Unhandled video format");
166  }
167  if (!videoEncoder)
168  ORIGINATE_ERROR("Failed to create video encoder");
169  unrefer.set(videoEncoder);
170  if (!gst_bin_add(GST_BIN(m_pipeline), videoEncoder))
171  ORIGINATE_ERROR("Failed to add video encoder to pipeline");
172  unrefer.cancel();
173 
174  // if no bitrate is given select from reasonable presets
175  if (bitRate == 0)
176  {
177  if (height < 720)
178  bitRate = 4000000;
179  else if (height < 1080)
180  bitRate = 8000000;
181  else if (height <= 2160)
182  bitRate = 14000000;
183  else
184  bitRate = 20000000;
185  }
186 
187  g_object_set(G_OBJECT(videoEncoder), "bitrate", bitRate, NULL);
188 
189  // create the muxer
190  if ((videoFileType == VIDEO_FILE_TYPE_MP4) && (videoFormat == VIDEO_FORMAT_H263))
191  {
192  REPORT_ERROR("The video format H263 is not supported by the video file type MP4, "
193  "using AVI instead.");
194  videoFileType = VIDEO_FILE_TYPE_AVI;
195  }
196 
197  if ((videoFormat == VIDEO_FORMAT_H265) && (videoFileType != VIDEO_FILE_TYPE_MKV))
198  {
199  REPORT_ERROR("The video format H265 is only supported with MKV in current GST version. "
200  "Selecting MKV as container");
201  videoFileType = VIDEO_FILE_TYPE_MKV;
202  }
203 
204  GstElement *videoMuxer = NULL;
205  switch (videoFileType)
206  {
207  case VIDEO_FILE_TYPE_MP4:
208  videoMuxer = gst_element_factory_make("qtmux", NULL);
209  break;
210  case VIDEO_FILE_TYPE_3GP:
211  videoMuxer = gst_element_factory_make("3gppmux", NULL);
212  break;
213  case VIDEO_FILE_TYPE_AVI:
214  videoMuxer = gst_element_factory_make("avimux", NULL);
215  break;
216  case VIDEO_FILE_TYPE_MKV:
217  videoMuxer = gst_element_factory_make("matroskamux", NULL);
218  break;
220  videoMuxer = gst_element_factory_make("identity", NULL);
221  break;
222  default:
223  ORIGINATE_ERROR("Unhandled video file type");
224  }
225  if (!videoMuxer)
226  ORIGINATE_ERROR("Failed to create video muxer");
227  unrefer.set(videoMuxer);
228  if (!gst_bin_add(GST_BIN(m_pipeline), videoMuxer))
229  ORIGINATE_ERROR("Failed to add video muxer to pipeline");
230  unrefer.cancel();
231 
232  // create the sink
233  GstElement *videoSink = gst_element_factory_make("filesink", NULL);
234  if (!videoSink)
235  ORIGINATE_ERROR("Failed to create video sink");
236  unrefer.set(videoSink);
237  if (!gst_bin_add(GST_BIN(m_pipeline), videoSink))
238  ORIGINATE_ERROR("Failed to add video sink to pipeline");
239  unrefer.cancel();
240 
241  // set the filename
242  std::string videoFileName(fileName);
243  if (videoFileName != "/dev/null")
244  {
245  videoFileName += ".";
246  videoFileName += getFileExtension(videoFileType);
247  }
248 
249  g_object_set(G_OBJECT(videoSink), "location", videoFileName.c_str(), NULL);
250 
251  // @todo 'Floating point exception' and error 'Framerate set to : 0 at
252  // NvxVideoEncoderSetParameter' when no setting the framerate. '0' should be VFR, use 30
253  // instead
254  if (frameRate == 0.0f)
255  frameRate = 30.0f;
256 
257  // create a caps filter
258  GstCaps *caps = gst_caps_new_simple("video/x-raw",
259  "format", G_TYPE_STRING, "I420",
260  "width", G_TYPE_INT, width,
261  "height", G_TYPE_INT, height,
262  "framerate", GST_TYPE_FRACTION, static_cast<gint>(frameRate * 100.f), 100,
263  NULL);
264  if (!caps)
265  ORIGINATE_ERROR("Failed to create caps");
266 
267  GstCapsFeatures *feature = gst_caps_features_new("memory:NVMM", NULL);
268  if (!feature)
269  {
270  gst_caps_unref(caps);
271  ORIGINATE_ERROR("Failed to create caps feature");
272  }
273 
274  gst_caps_set_features(caps, 0, feature);
275 
276  // link the source to the queue via the capture filter
277  if (!gst_element_link_filtered(videoSource, queue, caps))
278  {
279  gst_caps_unref(caps);
280  ORIGINATE_ERROR("Failed to link source to queue");
281  }
282  gst_caps_unref(caps);
283 
284  // link the queue to the encoder
285  if (!gst_element_link(queue, videoEncoder))
286  ORIGINATE_ERROR("Failed to link queue to encoder");
287 
288  // link the encoder pad to the muxer
289  if (videoFileType == VIDEO_FILE_TYPE_H265)
290  {
291  // H265 has a identity muxer, need to link directly
292  if (!gst_element_link(videoEncoder, videoMuxer))
293  ORIGINATE_ERROR("Failed to link encoder to muxer");
294  }
295  else
296  {
297  if (!gst_element_link_pads(videoEncoder, "src", videoMuxer, "video_%u"))
298  ORIGINATE_ERROR("Failed to link encoder to muxer pad");
299  }
300 
301  // link the muxer to the sink
302  if (!gst_element_link(videoMuxer, videoSink))
303  ORIGINATE_ERROR("Failed to link muxer to sink");
304 
305  return true;
306 #else // GST_SUPPORTED
307  ORIGINATE_ERROR("Not supported");
308 #endif // GST_SUPPORTED
309 }
310 
311 #ifdef GST_SUPPORTED
312 /**
313  * Modify object flag values by name.
314  */
315 static bool objectModifyFlags(GObject *obj, const char *flagName, const char *valueName, bool set)
316 {
317  guint count;
318  GParamSpec **spec = g_object_class_list_properties(G_OBJECT_GET_CLASS(obj), &count);
319 
320  for (guint index = 0; index < count; ++index)
321  {
322  GParamSpec *param = spec[index];
323  if (strcmp(param->name, flagName) == 0)
324  {
325  if (!G_IS_PARAM_SPEC_FLAGS(param))
326  ORIGINATE_ERROR("Param '%s' is not a flag", flagName);
327 
328  GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS(param);
329  GFlagsValue *value = g_flags_get_value_by_nick(pflags->flags_class, valueName);
330  if (!value)
331  ORIGINATE_ERROR("Value '%s' of flag '%s' not found", valueName, flagName);
332 
333  gint flags;
334  g_object_get(obj, flagName, &flags, NULL);
335  if (set)
336  flags |= value->value;
337  else
338  flags &= ~value->value;
339  g_object_set(obj, flagName, flags, NULL);
340 
341  return true;
342  }
343  }
344 
345  ORIGINATE_ERROR("Param '%s' not found", flagName);
346 }
347 #endif // GST_SUPPORTED
348 
349 bool VideoPipeline::setupForPlayback(EGLStreamKHR *videoStream, const char *fileName)
350 {
351 #ifdef GST_SUPPORTED
352  // Init gstreamer
353  gst_init(NULL, NULL);
354 
355  // Create the source element
356  m_pipeline = gst_element_factory_make("playbin", "play");
357  if (!m_pipeline)
358  ORIGINATE_ERROR("Failed to create playback pipeline");
359 
360  // set the uri
361  char *uri = gst_filename_to_uri(fileName, NULL);
362  g_object_set(G_OBJECT(m_pipeline), "uri", uri, NULL);
363  g_free(uri);
364  uri = NULL;
365 
366  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "text", false));
367  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "native-video", true));
368 
369  // create the audio sink
370  GstElement *audioSink = gst_element_factory_make("autoaudiosink", "audio_sink");
371  if (!audioSink)
372  ORIGINATE_ERROR("Failed to create audio sink");
373 
374  // set the audio sink of the pipeline
375  g_object_set(G_OBJECT(m_pipeline), "audio-sink", audioSink, NULL);
376 
377  // Create the sink bin, this will hold the video converter and the video sink
378  GstElement *videoSinkBin = gst_bin_new("video_sink_bin");
379  if (!videoSinkBin)
380  ORIGINATE_ERROR("Failed to create video sink bin");
381 
382  // set the video sink of the pipeline
383  g_object_set(G_OBJECT(m_pipeline), "video-sink", videoSinkBin, NULL);
384 
385  // Create the video converted
386  GstElement *videoConvert = gst_element_factory_make("nvvidconv", "video converter");
387  if (!videoConvert)
388  ORIGINATE_ERROR("Failed to create video converter");
389  GstUnrefer<GstElement> unrefer(videoConvert);
390  if (!gst_bin_add(GST_BIN(videoSinkBin), videoConvert))
391  ORIGINATE_ERROR("Failed to add video convert to video sink bin");
392  unrefer.cancel();
393 
394  // Create the video sink
395  GstElement *videoSink = gst_element_factory_make("nvvideosink", "video sink");
396  if (!videoSink)
397  ORIGINATE_ERROR("Failed to create video sink");
398  unrefer.set(videoSink);
399  if (!gst_bin_add(GST_BIN(videoSinkBin), videoSink))
400  ORIGINATE_ERROR("Failed to add video sink to video sink bin");
401  unrefer.cancel();
402 
403  // configure video sink
404  g_object_set(G_OBJECT(videoSink), "display", Renderer::getInstance().getEGLDisplay(), NULL);
405  // get the EGL stream
406  *videoStream = EGL_NO_STREAM_KHR;
407  g_object_get(G_OBJECT(videoSink), "stream", videoStream, NULL);
408  if (*videoStream == EGL_NO_STREAM_KHR)
409  ORIGINATE_ERROR("Failed to get EGL stream from video sink");
410 
411  if (!gst_element_link(videoConvert, videoSink))
412  ORIGINATE_ERROR("Failed to link video convert to video sink");
413 
414  // create a ghost pad so that the pipeline can connect to the bin as a sink
415  GstPad *pad = gst_element_get_static_pad(videoConvert, "sink");
416  if (!pad)
417  ORIGINATE_ERROR("Failed to get sink pad of video convert");
418  GstUnrefer<GstPad> padUnrefer(pad);
419  GstPad *ghostPad = gst_ghost_pad_new("sink", pad);
420  if (!ghostPad)
421  ORIGINATE_ERROR("Failed to create the ghost pad");
422  GstUnrefer<GstPad> ghostPadUnrefer(ghostPad);
423  if (!gst_pad_set_active(ghostPad, TRUE))
424  ORIGINATE_ERROR("Failed to set pad active");
425  if (!gst_element_add_pad(videoSinkBin, ghostPad))
426  ORIGINATE_ERROR("Failed to add pad");
427  ghostPadUnrefer.cancel();
428  padUnrefer.release();
429 
430  return true;
431 #else // GST_SUPPORTED
432  ORIGINATE_ERROR("Not supported");
433 #endif // GST_SUPPORTED
434 }
435 
437 {
438 #ifdef GST_SUPPORTED
439  if (!m_pipeline)
440  ORIGINATE_ERROR("Video pipeline is not set up");
441 
442  if (m_state != GST_STATE_PLAYING)
443  {
444  // set to playing state
445  if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
446  ORIGINATE_ERROR("Failed to set playing state");
447 
448  m_state = GST_STATE_PLAYING;
449 
450  /* Dump Capture - Playing Pipeline into the dot file
451  * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp"
452  * Run argus_camera and 0.00.00.*-argus_camera.dot
453  * file will be generated.
454  * Run "dot -Tpng 0.00.00.*-argus_camera.dot > image.png"
455  * image.png will display the running capture pipeline.
456  * */
457  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline),
458  GST_DEBUG_GRAPH_SHOW_ALL, "argus_camera");
459  }
460 
461  return true;
462 #else // GST_SUPPORTED
463  ORIGINATE_ERROR("Not supported");
464 #endif // GST_SUPPORTED
465 }
466 
468 {
469 #ifdef GST_SUPPORTED
470  if (!m_pipeline)
471  ORIGINATE_ERROR("Video pipeline is not set up");
472 
473  if (m_state != GST_STATE_PAUSED)
474  {
475  if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
476  ORIGINATE_ERROR("Failed to set pause state");
477  m_state = GST_STATE_PAUSED;
478  }
479 
480  return true;
481 #else // GST_SUPPORTED
482  ORIGINATE_ERROR("Not supported");
483 #endif // GST_SUPPORTED
484 }
485 
486 
488 {
489 #ifdef GST_SUPPORTED
490  if (!m_pipeline)
491  ORIGINATE_ERROR("Video pipeline is not set up");
492 
493  GstState newState = GST_STATE_NULL;
494  if (m_state == GST_STATE_PLAYING)
495  newState = GST_STATE_PAUSED;
496  else if (m_state == GST_STATE_PAUSED)
497  newState = GST_STATE_PLAYING;
498  else
499  ORIGINATE_ERROR("Invalid state");
500 
501  if (gst_element_set_state(m_pipeline, newState) == GST_STATE_CHANGE_FAILURE)
502  ORIGINATE_ERROR("Failed to set pause state");
503 
504  m_state = newState;
505 
506  return true;
507 #else // GST_SUPPORTED
508  ORIGINATE_ERROR("Not supported");
509 #endif // GST_SUPPORTED
510 }
511 
513 {
514 #ifdef GST_SUPPORTED
515  if (!m_pipeline)
516  ORIGINATE_ERROR("Video pipeline is not set up");
517 
518  if (!gst_element_seek_simple(m_pipeline, GST_FORMAT_TIME,
519  static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0))
520  {
521  ORIGINATE_ERROR("Failed to rewind");
522  }
523 
524  return true;
525 #else // GST_SUPPORTED
526  ORIGINATE_ERROR("Not supported");
527 #endif // GST_SUPPORTED
528 }
529 
531 {
532 #ifdef GST_SUPPORTED
533  if (!m_pipeline)
534  ORIGINATE_ERROR("Video pipeline is not set up");
535 
536  if ((m_state == GST_STATE_PLAYING) || (m_state == GST_STATE_PAUSED))
537  {
538  // check if there is a video encoder
539  GstElement *videoEncoder = gst_bin_get_by_name(GST_BIN(m_pipeline), s_videoEncoderName);
540  if (videoEncoder)
541  {
542  // send the end of stream event
543  GstPad *pad = gst_element_get_static_pad(videoEncoder, "sink");
544  if (!pad)
545  ORIGINATE_ERROR("Failed to get 'sink' pad");
546  GstUnrefer<GstPad> padUnrefer(pad);
547  if (!gst_pad_send_event(pad, gst_event_new_eos()))
548  ORIGINATE_ERROR("Failed to send end of stream event encoder");
549  padUnrefer.release();
550 
551  // wait for the event to go through
552  GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
553  if (!bus)
554  ORIGINATE_ERROR("Failed to get bus");
555  GstUnrefer<GstBus> busUnrefer(bus);
556  if (!gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE))
557  ORIGINATE_ERROR("Failed to wait for the eof event");
558  busUnrefer.release();
559  }
560 
561  // stop the pipeline
562  if (gst_element_set_state(m_pipeline, GST_STATE_NULL) != GST_STATE_CHANGE_SUCCESS)
563  ORIGINATE_ERROR("Failed to stop pipeline");
564 
565  m_state = GST_STATE_NULL;
566  }
567 
568  return true;
569 #else // GST_SUPPORTED
570  ORIGINATE_ERROR("Not supported");
571 #endif // GST_SUPPORTED
572 }
573 
575 {
576 #ifdef GST_SUPPORTED
577  if (m_pipeline)
578  {
579  PROPAGATE_ERROR(stop());
580 
581  // delete pipeline
582  gst_object_unref(GST_OBJECT(m_pipeline));
583 
584  m_pipeline = NULL;
585  }
586 
587  return true;
588 #else // GST_SUPPORTED
589  ORIGINATE_ERROR("Not supported");
590 #endif // GST_SUPPORTED
591 }
592 
593 /*static*/ const char* VideoPipeline::getFileExtension(VideoFileType fileType)
594 {
595  switch (fileType)
596  {
597  case VIDEO_FILE_TYPE_MP4:
598  return "mp4";
599  case VIDEO_FILE_TYPE_3GP:
600  return "3gp";
601  case VIDEO_FILE_TYPE_AVI:
602  return "avi";
603  case VIDEO_FILE_TYPE_MKV:
604  return "mkv";
606  return "h265";
607  default:
608  break;
609  }
610 
611  return "Unhandled video file type";
612 }
613 
614 bool VideoPipeline::getAspectRatio(float *aspectRatio) const
615 {
616  if (aspectRatio == NULL)
617  ORIGINATE_ERROR("'aspectRatio' is NULL");
618 #ifdef GST_SUPPORTED
619  if ((m_state != GST_STATE_PLAYING) && (m_state != GST_STATE_PAUSED))
620  ORIGINATE_ERROR("Must be in paused or playing state.");
621 
622  GstState state = GST_STATE_NULL;
623  while ((state != GST_STATE_PLAYING) && (state != GST_STATE_PAUSED))
624  {
625  if (gst_element_get_state(m_pipeline, &state, NULL, GST_CLOCK_TIME_NONE) ==
626  GST_STATE_CHANGE_FAILURE)
627  {
628  ORIGINATE_ERROR("gst_element_get_state failed");
629  }
630  }
631 
632  // Retrieve the Caps at the entrance of the video sink
633  GstElement *videoSink;
634  g_object_get(m_pipeline, "video-sink", &videoSink, NULL);
635  if (!videoSink)
636  ORIGINATE_ERROR("Failed to get video-sink");
637  GstUnrefer<GstElement> videoSinkUnrefer(videoSink);
638 
639  GstPad *videoSinkPad = gst_element_get_static_pad(videoSink, "sink");
640  if (!videoSinkPad)
641  ORIGINATE_ERROR("Failed to get video-sink pad");
642 
643  GstCaps *caps = gst_pad_get_current_caps(videoSinkPad);
644  if (!caps)
645  ORIGINATE_ERROR("Failed to get video-sink pad caps");
646 
647  *aspectRatio = 1.0f;
648 
649  GstStructure *structure = gst_caps_get_structure(caps, 0);
650  if (!structure)
651  {
652  gst_caps_unref(caps);
653  ORIGINATE_ERROR("Failed to get caps structure");
654  }
655 
656  gint width, height;
657  gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;
658 
659  if (!gst_structure_get_int(structure, "width", &width) ||
660  !gst_structure_get_int(structure, "height", &height) ||
661  !gst_structure_get_fraction(structure, "pixel-aspect-ratio",
662  &pixelAspectRatioNumerator, &pixelAspectRatioDenominator))
663  {
664  gst_caps_unref(caps);
665  ORIGINATE_ERROR("Failed to get structure values");
666  }
667 
668  *aspectRatio = (float)width / (float)height;
669  *aspectRatio *= (float)pixelAspectRatioNumerator / (float)pixelAspectRatioDenominator;
670 
671  gst_caps_unref(caps);
672 
673  return true;
674 #else // GST_SUPPORTED
675  ORIGINATE_ERROR("Not supported");
676 #endif // GST_SUPPORTED
677 }
678 
679 }; // namespace ArgusSamples