Argus Camera Sample
Argus Camera Sample
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
VideoPipeline.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * * Redistributions of source code must retain the above copyright
8  * notice, this list of conditions and the following disclaimer.
9  * * Redistributions in binary form must reproduce the above copyright
10  * notice, this list of conditions and the following disclaimer in the
11  * documentation and/or other materials provided with the distribution.
12  * * Neither the name of NVIDIA CORPORATION nor the names of its
13  * contributors may be used to endorse or promote products derived
14  * from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
20  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
21  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
22  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
24  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #include <stdio.h>
30 
31 #include <string>
32 
33 #include "Error.h"
34 #include "VideoPipeline.h"
35 #include "Renderer.h"
36 
37 namespace ArgusSamples
38 {
39 
41 #ifdef GST_SUPPORTED
42  : m_state(GST_STATE_NULL)
43  , m_pipeline(NULL)
44 #endif
45 {
46 }
47 
49 {
50  destroy();
51 }
52 
53 ///! give the video eoncoder a name so we can find it at stop()
54 static const char *s_videoEncoderName = "video encoder";
55 
56 /**
57  * RAII helper class for calling gst_object_unref on exit from a block or function.
58  */
59 template <typename T> class GstUnrefer
60 {
61 public:
62  explicit GstUnrefer(T * p)
63  : m_p(p)
64  {
65  }
67  : m_p(NULL)
68  {
69  }
71  {
72  release();
73  }
74 
75  /// Cancel the unref.
76  void cancel()
77  {
78  m_p = NULL;
79  }
80 
81  /// Unref the object now.
82  void release()
83  {
84  if (m_p)
85  gst_object_unref(m_p);
86  m_p = NULL;
87  }
88 
89  /// Set the object to be unrefed.
90  void set(T* p)
91  {
92  release();
93  m_p = p;
94  }
95 
96  /// Get the object.
97  T * get() const
98  {
99  return m_p;
100  }
101 
102 private:
103  T *m_p;
104 
105  /// Not implemented -- use default constructor
106  GstUnrefer(GstUnrefer& other);
107  /// Not implemented
109 };
110 
111 bool VideoPipeline::setupForRecording(EGLStreamKHR videoStream, uint32_t width, uint32_t height,
112  float frameRate, const char *fileName, VideoFormat videoFormat,
113  VideoFileType videoFileType, uint32_t bitRate)
114 {
115 #ifdef GST_SUPPORTED
116  // Init gstreamer
117  gst_init(NULL, NULL);
118 
119  // create the pipeline
120  m_pipeline = gst_pipeline_new("video_pipeline");
121  if (!m_pipeline)
122  ORIGINATE_ERROR("Failed to create video pipeline");
123 
124  // Create the capture source element
125  GstElement *videoSource = gst_element_factory_make("nveglstreamsrc", NULL);
126  if (!videoSource)
127  ORIGINATE_ERROR("Failed to create capture source element");
128  GstUnrefer<GstElement> unrefer(videoSource);
129  if (!gst_bin_add(GST_BIN(m_pipeline), videoSource))
130  ORIGINATE_ERROR("Failed to add video source to pipeline");
131  unrefer.cancel();
132 
133  g_object_set(G_OBJECT(videoSource), "display", Renderer::getInstance().getEGLDisplay(), NULL);
134  g_object_set(G_OBJECT(videoSource), "eglstream", videoStream, NULL);
135 
136  // Create queue
137  GstElement *queue = gst_element_factory_make("queue", NULL);
138  if (!queue)
139  ORIGINATE_ERROR("Failed to create queue");
140  unrefer.set(queue);
141  if (!gst_bin_add(GST_BIN(m_pipeline), queue))
142  ORIGINATE_ERROR("Failed to add queue to pipeline");
143  unrefer.cancel();
144 
145  // create the encoder
146  GstElement *videoEncoder = NULL;
147  switch (videoFormat)
148  {
149  case VIDEO_FORMAT_H264:
150  videoEncoder = gst_element_factory_make("omxh264enc", s_videoEncoderName);
151  break;
152  case VIDEO_FORMAT_H265:
153  videoEncoder = gst_element_factory_make("omxh265enc", s_videoEncoderName);
154  break;
155  case VIDEO_FORMAT_VP8:
156  videoEncoder = gst_element_factory_make("omxvp8enc", s_videoEncoderName);
157  break;
158  default:
159  ORIGINATE_ERROR("Unhandled video format");
160  }
161  if (!videoEncoder)
162  ORIGINATE_ERROR("Failed to create video encoder");
163  unrefer.set(videoEncoder);
164  if (!gst_bin_add(GST_BIN(m_pipeline), videoEncoder))
165  ORIGINATE_ERROR("Failed to add video encoder to pipeline");
166  unrefer.cancel();
167 
168  // if no bitrate is given select from reasonable presets
169  if (bitRate == 0)
170  {
171  if (height < 720)
172  bitRate = 4000000;
173  else if (height < 1080)
174  bitRate = 8000000;
175  else if (height <= 2160)
176  bitRate = 14000000;
177  else
178  bitRate = 20000000;
179  }
180 
181  g_object_set(G_OBJECT(videoEncoder), "bitrate", bitRate, NULL);
182 
183  // create the muxer
184  if ((videoFormat == VIDEO_FORMAT_H265) && (videoFileType != VIDEO_FILE_TYPE_MKV))
185  {
186  REPORT_ERROR("The video format H265 is only supported with MKV in current GST version. "
187  "Selecting MKV as container");
188  videoFileType = VIDEO_FILE_TYPE_MKV;
189  }
190 
191  GstElement *videoMuxer = NULL;
192  switch (videoFileType)
193  {
194  case VIDEO_FILE_TYPE_MP4:
195  videoMuxer = gst_element_factory_make("qtmux", NULL);
196  break;
197  case VIDEO_FILE_TYPE_3GP:
198  videoMuxer = gst_element_factory_make("3gppmux", NULL);
199  break;
200  case VIDEO_FILE_TYPE_AVI:
201  videoMuxer = gst_element_factory_make("avimux", NULL);
202  break;
203  case VIDEO_FILE_TYPE_MKV:
204  videoMuxer = gst_element_factory_make("matroskamux", NULL);
205  break;
207  videoMuxer = gst_element_factory_make("identity", NULL);
208  break;
209  default:
210  ORIGINATE_ERROR("Unhandled video file type");
211  }
212  if (!videoMuxer)
213  ORIGINATE_ERROR("Failed to create video muxer");
214  unrefer.set(videoMuxer);
215  if (!gst_bin_add(GST_BIN(m_pipeline), videoMuxer))
216  ORIGINATE_ERROR("Failed to add video muxer to pipeline");
217  unrefer.cancel();
218 
219  // create the sink
220  GstElement *videoSink = gst_element_factory_make("filesink", NULL);
221  if (!videoSink)
222  ORIGINATE_ERROR("Failed to create video sink");
223  unrefer.set(videoSink);
224  if (!gst_bin_add(GST_BIN(m_pipeline), videoSink))
225  ORIGINATE_ERROR("Failed to add video sink to pipeline");
226  unrefer.cancel();
227 
228  // set the filename
229  std::string videoFileName(fileName);
230  if (videoFileName != "/dev/null")
231  {
232  videoFileName += ".";
233  videoFileName += getFileExtension(videoFileType);
234  }
235 
236  g_object_set(G_OBJECT(videoSink), "location", videoFileName.c_str(), NULL);
237 
238  // @todo 'Floating point exception' and error 'Framerate set to : 0 at
239  // NvxVideoEncoderSetParameter' when no setting the framerate. '0' should be VFR, use 30
240  // instead
241  if (frameRate == 0.0f)
242  frameRate = 30.0f;
243 
244  // create a caps filter
245  GstCaps *caps = gst_caps_new_simple("video/x-raw",
246  "format", G_TYPE_STRING, "I420",
247  "width", G_TYPE_INT, width,
248  "height", G_TYPE_INT, height,
249  "framerate", GST_TYPE_FRACTION, static_cast<gint>(frameRate * 100.f), 100,
250  NULL);
251  if (!caps)
252  ORIGINATE_ERROR("Failed to create caps");
253 
254  GstCapsFeatures *feature = gst_caps_features_new("memory:NVMM", NULL);
255  if (!feature)
256  {
257  gst_caps_unref(caps);
258  ORIGINATE_ERROR("Failed to create caps feature");
259  }
260 
261  gst_caps_set_features(caps, 0, feature);
262 
263  // link the source to the queue via the capture filter
264  if (!gst_element_link_filtered(videoSource, queue, caps))
265  {
266  gst_caps_unref(caps);
267  ORIGINATE_ERROR("Failed to link source to queue");
268  }
269  gst_caps_unref(caps);
270 
271  // link the queue to the encoder
272  if (!gst_element_link(queue, videoEncoder))
273  ORIGINATE_ERROR("Failed to link queue to encoder");
274 
275  // link the encoder pad to the muxer
276  if (videoFileType == VIDEO_FILE_TYPE_H265)
277  {
278  // H265 has a identity muxer, need to link directly
279  if (!gst_element_link(videoEncoder, videoMuxer))
280  ORIGINATE_ERROR("Failed to link encoder to muxer");
281  }
282  else
283  {
284  if (!gst_element_link_pads(videoEncoder, "src", videoMuxer, "video_%u"))
285  ORIGINATE_ERROR("Failed to link encoder to muxer pad");
286  }
287 
288  // link the muxer to the sink
289  if (!gst_element_link(videoMuxer, videoSink))
290  ORIGINATE_ERROR("Failed to link muxer to sink");
291 
292  return true;
293 #else // GST_SUPPORTED
294  ORIGINATE_ERROR("Not supported");
295 #endif // GST_SUPPORTED
296 }
297 
298 #ifdef GST_SUPPORTED
299 /**
300  * Modify object flag values by name.
301  */
302 static bool objectModifyFlags(GObject *obj, const char *flagName, const char *valueName, bool set)
303 {
304  guint count;
305  GParamSpec **spec = g_object_class_list_properties(G_OBJECT_GET_CLASS(obj), &count);
306 
307  for (guint index = 0; index < count; ++index)
308  {
309  GParamSpec *param = spec[index];
310  if (strcmp(param->name, flagName) == 0)
311  {
312  if (!G_IS_PARAM_SPEC_FLAGS(param))
313  ORIGINATE_ERROR("Param '%s' is not a flag", flagName);
314 
315  GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS(param);
316  GFlagsValue *value = g_flags_get_value_by_nick(pflags->flags_class, valueName);
317  if (!value)
318  ORIGINATE_ERROR("Value '%s' of flag '%s' not found", valueName, flagName);
319 
320  gint flags;
321  g_object_get(obj, flagName, &flags, NULL);
322  if (set)
323  flags |= value->value;
324  else
325  flags &= ~value->value;
326  g_object_set(obj, flagName, flags, NULL);
327 
328  return true;
329  }
330  }
331 
332  ORIGINATE_ERROR("Param '%s' not found", flagName);
333 }
334 #endif // GST_SUPPORTED
335 
336 bool VideoPipeline::setupForPlayback(EGLStreamKHR *videoStream, const char *fileName)
337 {
338 #ifdef GST_SUPPORTED
339  // Init gstreamer
340  gst_init(NULL, NULL);
341 
342  // Create the source element
343  m_pipeline = gst_element_factory_make("playbin", "play");
344  if (!m_pipeline)
345  ORIGINATE_ERROR("Failed to create playback pipeline");
346 
347  // set the uri
348  char *uri = gst_filename_to_uri(fileName, NULL);
349  g_object_set(G_OBJECT(m_pipeline), "uri", uri, NULL);
350  g_free(uri);
351  uri = NULL;
352 
353  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "text", false));
354  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "native-video", true));
355 
356  // create the audio sink
357  GstElement *audioSink = gst_element_factory_make("autoaudiosink", "audio_sink");
358  if (!audioSink)
359  ORIGINATE_ERROR("Failed to create audio sink");
360 
361  // set the audio sink of the pipeline
362  g_object_set(G_OBJECT(m_pipeline), "audio-sink", audioSink, NULL);
363 
364  // Create the sink bin, this will hold the video converter and the video sink
365  GstElement *videoSinkBin = gst_bin_new("video_sink_bin");
366  if (!videoSinkBin)
367  ORIGINATE_ERROR("Failed to create video sink bin");
368 
369  // set the video sink of the pipeline
370  g_object_set(G_OBJECT(m_pipeline), "video-sink", videoSinkBin, NULL);
371 
372  // Create the video converted
373  GstElement *videoConvert = gst_element_factory_make("nvvidconv", "video converter");
374  if (!videoConvert)
375  ORIGINATE_ERROR("Failed to create video converter");
376  GstUnrefer<GstElement> unrefer(videoConvert);
377  if (!gst_bin_add(GST_BIN(videoSinkBin), videoConvert))
378  ORIGINATE_ERROR("Failed to add video convert to video sink bin");
379  unrefer.cancel();
380 
381  // Create the video sink
382  GstElement *videoSink = gst_element_factory_make("nvvideosink", "video sink");
383  if (!videoSink)
384  ORIGINATE_ERROR("Failed to create video sink");
385  unrefer.set(videoSink);
386  if (!gst_bin_add(GST_BIN(videoSinkBin), videoSink))
387  ORIGINATE_ERROR("Failed to add video sink to video sink bin");
388  unrefer.cancel();
389 
390  // configure video sink
391  g_object_set(G_OBJECT(videoSink), "display", Renderer::getInstance().getEGLDisplay(), NULL);
392  // get the EGL stream
393  *videoStream = EGL_NO_STREAM_KHR;
394  g_object_get(G_OBJECT(videoSink), "stream", videoStream, NULL);
395  if (*videoStream == EGL_NO_STREAM_KHR)
396  ORIGINATE_ERROR("Failed to get EGL stream from video sink");
397 
398  if (!gst_element_link(videoConvert, videoSink))
399  ORIGINATE_ERROR("Failed to link video convert to video sink");
400 
401  // create a ghost pad so that the pipeline can connect to the bin as a sink
402  GstPad *pad = gst_element_get_static_pad(videoConvert, "sink");
403  if (!pad)
404  ORIGINATE_ERROR("Failed to get sink pad of video convert");
405  GstUnrefer<GstPad> padUnrefer(pad);
406  GstPad *ghostPad = gst_ghost_pad_new("sink", pad);
407  if (!ghostPad)
408  ORIGINATE_ERROR("Failed to create the ghost pad");
409  GstUnrefer<GstPad> ghostPadUnrefer(ghostPad);
410  if (!gst_pad_set_active(ghostPad, TRUE))
411  ORIGINATE_ERROR("Failed to set pad active");
412  if (!gst_element_add_pad(videoSinkBin, ghostPad))
413  ORIGINATE_ERROR("Failed to add pad");
414  ghostPadUnrefer.cancel();
415  padUnrefer.release();
416 
417  return true;
418 #else // GST_SUPPORTED
419  ORIGINATE_ERROR("Not supported");
420 #endif // GST_SUPPORTED
421 }
422 
424 {
425 #ifdef GST_SUPPORTED
426  if (!m_pipeline)
427  ORIGINATE_ERROR("Video pipeline is not set up");
428 
429  if (m_state != GST_STATE_PLAYING)
430  {
431  // set to playing state
432  if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
433  ORIGINATE_ERROR("Failed to set playing state");
434 
435  m_state = GST_STATE_PLAYING;
436 
437  /* Dump Capture - Playing Pipeline into the dot file
438  * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp"
439  * Run argus_camera and 0.00.00.*-argus_camera.dot
440  * file will be generated.
441  * Run "dot -Tpng 0.00.00.*-argus_camera.dot > image.png"
442  * image.png will display the running capture pipeline.
443  * */
444  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline),
445  GST_DEBUG_GRAPH_SHOW_ALL, "argus_camera");
446  }
447 
448  return true;
449 #else // GST_SUPPORTED
450  ORIGINATE_ERROR("Not supported");
451 #endif // GST_SUPPORTED
452 }
453 
455 {
456 #ifdef GST_SUPPORTED
457  if (!m_pipeline)
458  ORIGINATE_ERROR("Video pipeline is not set up");
459 
460  if (m_state != GST_STATE_PAUSED)
461  {
462  if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
463  ORIGINATE_ERROR("Failed to set pause state");
464  m_state = GST_STATE_PAUSED;
465  }
466 
467  return true;
468 #else // GST_SUPPORTED
469  ORIGINATE_ERROR("Not supported");
470 #endif // GST_SUPPORTED
471 }
472 
473 
475 {
476 #ifdef GST_SUPPORTED
477  if (!m_pipeline)
478  ORIGINATE_ERROR("Video pipeline is not set up");
479 
480  GstState newState = GST_STATE_NULL;
481  if (m_state == GST_STATE_PLAYING)
482  newState = GST_STATE_PAUSED;
483  else if (m_state == GST_STATE_PAUSED)
484  newState = GST_STATE_PLAYING;
485  else
486  ORIGINATE_ERROR("Invalid state");
487 
488  if (gst_element_set_state(m_pipeline, newState) == GST_STATE_CHANGE_FAILURE)
489  ORIGINATE_ERROR("Failed to set pause state");
490 
491  m_state = newState;
492 
493  return true;
494 #else // GST_SUPPORTED
495  ORIGINATE_ERROR("Not supported");
496 #endif // GST_SUPPORTED
497 }
498 
500 {
501 #ifdef GST_SUPPORTED
502  if (!m_pipeline)
503  ORIGINATE_ERROR("Video pipeline is not set up");
504 
505  if (!gst_element_seek_simple(m_pipeline, GST_FORMAT_TIME,
506  static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0))
507  {
508  ORIGINATE_ERROR("Failed to rewind");
509  }
510 
511  return true;
512 #else // GST_SUPPORTED
513  ORIGINATE_ERROR("Not supported");
514 #endif // GST_SUPPORTED
515 }
516 
518 {
519 #ifdef GST_SUPPORTED
520  if (!m_pipeline)
521  ORIGINATE_ERROR("Video pipeline is not set up");
522 
523  if ((m_state == GST_STATE_PLAYING) || (m_state == GST_STATE_PAUSED))
524  {
525  // check if there is a video encoder
526  GstElement *videoEncoder = gst_bin_get_by_name(GST_BIN(m_pipeline), s_videoEncoderName);
527  if (videoEncoder)
528  {
529  // send the end of stream event
530  GstPad *pad = gst_element_get_static_pad(videoEncoder, "sink");
531  if (!pad)
532  ORIGINATE_ERROR("Failed to get 'sink' pad");
533  GstUnrefer<GstPad> padUnrefer(pad);
534  if (!gst_pad_send_event(pad, gst_event_new_eos()))
535  ORIGINATE_ERROR("Failed to send end of stream event encoder");
536  padUnrefer.release();
537 
538  // wait for the event to go through
539  GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
540  if (!bus)
541  ORIGINATE_ERROR("Failed to get bus");
542  GstUnrefer<GstBus> busUnrefer(bus);
543  if (!gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE))
544  ORIGINATE_ERROR("Failed to wait for the eof event");
545  busUnrefer.release();
546  }
547 
548  // stop the pipeline
549  if (gst_element_set_state(m_pipeline, GST_STATE_NULL) != GST_STATE_CHANGE_SUCCESS)
550  ORIGINATE_ERROR("Failed to stop pipeline");
551 
552  m_state = GST_STATE_NULL;
553  }
554 
555  return true;
556 #else // GST_SUPPORTED
557  ORIGINATE_ERROR("Not supported");
558 #endif // GST_SUPPORTED
559 }
560 
562 {
563 #ifdef GST_SUPPORTED
564  if (m_pipeline)
565  {
566  PROPAGATE_ERROR(stop());
567 
568  // delete pipeline
569  gst_object_unref(GST_OBJECT(m_pipeline));
570 
571  m_pipeline = NULL;
572  }
573 
574  return true;
575 #else // GST_SUPPORTED
576  ORIGINATE_ERROR("Not supported");
577 #endif // GST_SUPPORTED
578 }
579 
580 /*static*/ const char* VideoPipeline::getFileExtension(VideoFileType fileType)
581 {
582  switch (fileType)
583  {
584  case VIDEO_FILE_TYPE_MP4:
585  return "mp4";
586  case VIDEO_FILE_TYPE_3GP:
587  return "3gp";
588  case VIDEO_FILE_TYPE_AVI:
589  return "avi";
590  case VIDEO_FILE_TYPE_MKV:
591  return "mkv";
593  return "h265";
594  default:
595  break;
596  }
597 
598  return "Unhandled video file type";
599 }
600 
601 bool VideoPipeline::getAspectRatio(float *aspectRatio) const
602 {
603  if (aspectRatio == NULL)
604  ORIGINATE_ERROR("'aspectRatio' is NULL");
605 #ifdef GST_SUPPORTED
606  if ((m_state != GST_STATE_PLAYING) && (m_state != GST_STATE_PAUSED))
607  ORIGINATE_ERROR("Must be in paused or playing state.");
608 
609  GstState state = GST_STATE_NULL;
610  while ((state != GST_STATE_PLAYING) && (state != GST_STATE_PAUSED))
611  {
612  if (gst_element_get_state(m_pipeline, &state, NULL, GST_CLOCK_TIME_NONE) ==
613  GST_STATE_CHANGE_FAILURE)
614  {
615  ORIGINATE_ERROR("gst_element_get_state failed");
616  }
617  }
618 
619  // Retrieve the Caps at the entrance of the video sink
620  GstElement *videoSink;
621  g_object_get(m_pipeline, "video-sink", &videoSink, NULL);
622  if (!videoSink)
623  ORIGINATE_ERROR("Failed to get video-sink");
624  GstUnrefer<GstElement> videoSinkUnrefer(videoSink);
625 
626  GstPad *videoSinkPad = gst_element_get_static_pad(videoSink, "sink");
627  if (!videoSinkPad)
628  ORIGINATE_ERROR("Failed to get video-sink pad");
629 
630  GstCaps *caps = gst_pad_get_current_caps(videoSinkPad);
631  if (!caps)
632  ORIGINATE_ERROR("Failed to get video-sink pad caps");
633 
634  *aspectRatio = 1.0f;
635 
636  GstStructure *structure = gst_caps_get_structure(caps, 0);
637  if (!structure)
638  {
639  gst_caps_unref(caps);
640  ORIGINATE_ERROR("Failed to get caps structure");
641  }
642 
643  gint width, height;
644  gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;
645 
646  if (!gst_structure_get_int(structure, "width", &width) ||
647  !gst_structure_get_int(structure, "height", &height) ||
648  !gst_structure_get_fraction(structure, "pixel-aspect-ratio",
649  &pixelAspectRatioNumerator, &pixelAspectRatioDenominator))
650  {
651  gst_caps_unref(caps);
652  ORIGINATE_ERROR("Failed to get structure values");
653  }
654 
655  *aspectRatio = (float)width / (float)height;
656  *aspectRatio *= (float)pixelAspectRatioNumerator / (float)pixelAspectRatioDenominator;
657 
658  gst_caps_unref(caps);
659 
660  return true;
661 #else // GST_SUPPORTED
662  ORIGINATE_ERROR("Not supported");
663 #endif // GST_SUPPORTED
664 }
665 
666 }; // namespace ArgusSamples