Argus Camera Sample
Argus Camera Sample
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
VideoPipeline.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016-2021, NVIDIA CORPORATION. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * * Redistributions of source code must retain the above copyright
8  * notice, this list of conditions and the following disclaimer.
9  * * Redistributions in binary form must reproduce the above copyright
10  * notice, this list of conditions and the following disclaimer in the
11  * documentation and/or other materials provided with the distribution.
12  * * Neither the name of NVIDIA CORPORATION nor the names of its
13  * contributors may be used to endorse or promote products derived
14  * from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
20  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
21  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
22  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
24  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #include <stdio.h>
30 
31 #include <string>
32 
33 #include "Error.h"
34 #include "VideoPipeline.h"
35 #include "Composer.h"
36 #include "Util.h"
37 
38 namespace ArgusSamples
39 {
40 
42 #ifdef GST_SUPPORTED
43  : m_state(GST_STATE_NULL)
44  , m_pipeline(NULL)
45 #endif
46 {
47 }
48 
50 {
51  destroy();
52 }
53 
54 ///! give the video eoncoder a name so we can find it at stop()
55 static const char *s_videoEncoderName = "video encoder";
56 
57 /**
58  * RAII helper class for calling gst_object_unref on exit from a block or function.
59  */
60 template <typename T> class GstUnrefer
61 {
62 public:
63  explicit GstUnrefer(T * p)
64  : m_p(p)
65  {
66  }
68  : m_p(NULL)
69  {
70  }
72  {
73  release();
74  }
75 
76  /// Cancel the unref.
77  void cancel()
78  {
79  m_p = NULL;
80  }
81 
82  /// Unref the object now.
83  void release()
84  {
85  if (m_p)
86  gst_object_unref(m_p);
87  m_p = NULL;
88  }
89 
90  /// Set the object to be unrefed.
91  void set(T* p)
92  {
93  release();
94  m_p = p;
95  }
96 
97  /// Get the object.
98  T * get() const
99  {
100  return m_p;
101  }
102 
103 private:
104  T *m_p;
105 
106  /// Not implemented -- use default constructor
107  GstUnrefer(GstUnrefer& other);
108  /// Not implemented
110 };
111 
112 bool VideoPipeline::setupForRecording(EGLStreamKHR videoStream, uint32_t width, uint32_t height,
113  float frameRate, const char *fileName, VideoFormat videoFormat,
114  VideoFileType videoFileType, uint32_t bitRate, VideoControlRateMode controlRate,
115  bool enableTwoPassCBR)
116 {
117 #ifdef GST_SUPPORTED
118  // set the filename
119  std::string videoFileName(fileName);
120  if (videoFileName != "/dev/null")
121  {
122  videoFileName += ".";
123  videoFileName += getFileExtension(videoFileType);
124  PROPAGATE_ERROR(validateOutputPath(videoFileName.c_str()));
125  }
126 
127  // Init gstreamer
128  gst_init(NULL, NULL);
129 
130  // create the pipeline
131  m_pipeline = gst_pipeline_new("video_pipeline");
132  if (!m_pipeline)
133  ORIGINATE_ERROR("Failed to create video pipeline");
134 
135  // Create the capture source element
136  GstElement *videoSource = gst_element_factory_make("nveglstreamsrc", NULL);
137  if (!videoSource)
138  ORIGINATE_ERROR("Failed to create capture source element");
139  GstUnrefer<GstElement> unrefer(videoSource);
140  if (!gst_bin_add(GST_BIN(m_pipeline), videoSource))
141  ORIGINATE_ERROR("Failed to add video source to pipeline");
142  unrefer.cancel();
143 
144  g_object_set(G_OBJECT(videoSource), "display", Composer::getInstance().getEGLDisplay(), NULL);
145  g_object_set(G_OBJECT(videoSource), "eglstream", videoStream, NULL);
146 
147  // Create queue
148  GstElement *queue = gst_element_factory_make("queue", NULL);
149  if (!queue)
150  ORIGINATE_ERROR("Failed to create queue");
151  unrefer.set(queue);
152  if (!gst_bin_add(GST_BIN(m_pipeline), queue))
153  ORIGINATE_ERROR("Failed to add queue to pipeline");
154  unrefer.cancel();
155 
156  // create the encoder
157  GstElement *videoEncoder = NULL;
158  switch (videoFormat)
159  {
160  case VIDEO_FORMAT_H264:
161  videoEncoder = gst_element_factory_make("nvv4l2h264enc", s_videoEncoderName);
162  break;
163  case VIDEO_FORMAT_H265:
164  videoEncoder = gst_element_factory_make("nvv4l2h265enc", s_videoEncoderName);
165  break;
166  case VIDEO_FORMAT_VP8:
167  printf("\n***vp8 encode is not supported for Jetson-Xavier & beyond\n");
168  videoEncoder = gst_element_factory_make("nvv4l2vp8enc", s_videoEncoderName);
169  break;
170  case VIDEO_FORMAT_VP9:
171  videoEncoder = gst_element_factory_make("nvv4l2vp9enc", s_videoEncoderName);
172  break;
173  default:
174  ORIGINATE_ERROR("Unhandled video format");
175  }
176  if (!videoEncoder)
177  ORIGINATE_ERROR("Failed to create video encoder");
178  unrefer.set(videoEncoder);
179  if (!gst_bin_add(GST_BIN(m_pipeline), videoEncoder))
180  ORIGINATE_ERROR("Failed to add video encoder to pipeline");
181  unrefer.cancel();
182 
183  // if no bitrate is given select from reasonable presets
184  if (bitRate == 0)
185  {
186  if (height < 720)
187  bitRate = VIDEO_BITRATE_4M;
188  else if (height < 1080)
189  bitRate = VIDEO_BITRATE_8M;
190  else if (height <= 2160)
191  bitRate = VIDEO_BITRATE_14M;
192  else
193  bitRate = VIDEO_BITRATE_20M;
194  }
195 
196  g_object_set(G_OBJECT(videoEncoder), "bitrate", bitRate, NULL);
197  g_object_set(G_OBJECT(videoEncoder), "control-rate", controlRate, NULL);
198  g_object_set(G_OBJECT(videoEncoder), "EnableTwopassCBR", enableTwoPassCBR, NULL);
199 
200  /*
201  * Currently, of all the supported videoEncoders above: H264, H265, VP8 and VP9, Only H265
202  * supports resolution > 4k.
203  */
204  const uint32_t WIDTH_4K = 3840;
205  if (width > WIDTH_4K && videoFormat != VIDEO_FORMAT_H265)
206  {
207  ORIGINATE_ERROR("\n Resolution > 4k requires videoformat H265 \n");
208  }
209  // set video encoding profile for h.264 to high to get optmized video quality
210  if (videoFormat == VIDEO_FORMAT_H264)
211  {
212  g_object_set(G_OBJECT(videoEncoder), "profile", VIDEO_AVC_PROFILE_HIGH, NULL);
213  }
214 
215  // create the muxer
216  if (videoFormat == VIDEO_FORMAT_VP9)
217  {
218  printf("\nThe VP9 video format is not supported on Jetson-tx1.\n");
219  }
220 
221  if ((videoFileType == VIDEO_FILE_TYPE_3GP) &&
222  !(videoFormat == VIDEO_FORMAT_H264))
223  {
224  printf("\nThe 3GP is only supported with H264 in current GST version. "
225  "Selecting other containers.\n");
226  videoFileType = VIDEO_FILE_TYPE_MKV;
227  }
228 
229  GstElement *videoParse = NULL;
230  switch (videoFormat)
231  {
232  case VIDEO_FORMAT_H264:
233  videoParse = gst_element_factory_make("h264parse", NULL);
234  if (!videoParse)
235  ORIGINATE_ERROR("Failed to create video parser");
236  break;
237  case VIDEO_FORMAT_H265:
238  videoParse = gst_element_factory_make("h265parse", NULL);
239  if (!videoParse)
240  ORIGINATE_ERROR("Failed to create video parser");
241  break;
242  case VIDEO_FORMAT_VP9:
243  case VIDEO_FORMAT_VP8:
244  break;
245  default:
246  ORIGINATE_ERROR("Unhandled video file type");
247  }
248  if (videoParse)
249  {
250  unrefer.set(videoParse);
251  if (!gst_bin_add(GST_BIN(m_pipeline), videoParse))
252  ORIGINATE_ERROR("Failed to add video parser to pipeline");
253  unrefer.cancel();
254  }
255 
256  GstElement *videoMuxer = NULL;
257  switch (videoFileType)
258  {
259  case VIDEO_FILE_TYPE_MP4:
260  videoMuxer = gst_element_factory_make("qtmux", NULL);
261  break;
262  case VIDEO_FILE_TYPE_3GP:
263  videoMuxer = gst_element_factory_make("3gppmux", NULL);
264  break;
265  case VIDEO_FILE_TYPE_AVI:
266  videoMuxer = gst_element_factory_make("avimux", NULL);
267  break;
268  case VIDEO_FILE_TYPE_MKV:
269  videoMuxer = gst_element_factory_make("matroskamux", NULL);
270  break;
271  default:
272  ORIGINATE_ERROR("Unhandled video file type");
273  }
274  if (!videoMuxer)
275  ORIGINATE_ERROR("Failed to create video muxer");
276  unrefer.set(videoMuxer);
277  if (!gst_bin_add(GST_BIN(m_pipeline), videoMuxer))
278  ORIGINATE_ERROR("Failed to add video muxer to pipeline");
279  unrefer.cancel();
280 
281  // create the sink
282  GstElement *videoSink = gst_element_factory_make("filesink", NULL);
283  if (!videoSink)
284  ORIGINATE_ERROR("Failed to create video sink");
285  unrefer.set(videoSink);
286  if (!gst_bin_add(GST_BIN(m_pipeline), videoSink))
287  ORIGINATE_ERROR("Failed to add video sink to pipeline");
288  unrefer.cancel();
289 
290  g_object_set(G_OBJECT(videoSink), "location", videoFileName.c_str(), NULL);
291 
292  // @todo 'Floating point exception' and error 'Framerate set to : 0 at
293  // NvxVideoEncoderSetParameter' when no setting the framerate. '0' should be VFR, use 30
294  // instead
295  if (frameRate == 0.0f)
296  frameRate = 30.0f;
297 
298  // create a caps filter
299  GstCaps *caps = gst_caps_new_simple("video/x-raw",
300  "format", G_TYPE_STRING, "NV12",
301  "width", G_TYPE_INT, width,
302  "height", G_TYPE_INT, height,
303  "framerate", GST_TYPE_FRACTION, static_cast<gint>(frameRate * 100.f), 100,
304  NULL);
305  if (!caps)
306  ORIGINATE_ERROR("Failed to create caps");
307 
308  GstCapsFeatures *feature = gst_caps_features_new("memory:NVMM", NULL);
309  if (!feature)
310  {
311  gst_caps_unref(caps);
312  ORIGINATE_ERROR("Failed to create caps feature");
313  }
314 
315  gst_caps_set_features(caps, 0, feature);
316 
317  // link the source to the queue via the capture filter
318  if (!gst_element_link_filtered(videoSource, queue, caps))
319  {
320  gst_caps_unref(caps);
321  ORIGINATE_ERROR("Failed to link source to queue");
322  }
323  gst_caps_unref(caps);
324 
325  // link the queue to the encoder
326  if (!gst_element_link(queue, videoEncoder))
327  ORIGINATE_ERROR("Failed to link queue to encoder");
328 
329  // link the encoder pad to the muxer
330  if (videoParse)
331  {
332  if (!gst_element_link(videoEncoder, videoParse))
333  ORIGINATE_ERROR("Failed to link encoder to parser");
334 
335  if (!gst_element_link(videoParse, videoMuxer))
336  ORIGINATE_ERROR("Failed to link parser to muxer");
337  }
338  else
339  {
340  if (!gst_element_link(videoEncoder, videoMuxer))
341  ORIGINATE_ERROR("Failed to link encoder to muxer");
342  }
343 
344  // link the muxer to the sink
345  if (!gst_element_link(videoMuxer, videoSink))
346  ORIGINATE_ERROR("Failed to link muxer to sink");
347 
348  return true;
349 #else // GST_SUPPORTED
350  ORIGINATE_ERROR("Not supported");
351 #endif // GST_SUPPORTED
352 }
353 
354 #ifdef GST_SUPPORTED
355 /**
356  * Modify object flag values by name.
357  */
358 static bool objectModifyFlags(GObject *obj, const char *flagName, const char *valueName, bool set)
359 {
360  guint count;
361  GParamSpec **spec = g_object_class_list_properties(G_OBJECT_GET_CLASS(obj), &count);
362 
363  for (guint index = 0; index < count; ++index)
364  {
365  GParamSpec *param = spec[index];
366  if (strcmp(param->name, flagName) == 0)
367  {
368  if (!G_IS_PARAM_SPEC_FLAGS(param))
369  ORIGINATE_ERROR("Param '%s' is not a flag", flagName);
370 
371  GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS(param);
372  GFlagsValue *value = g_flags_get_value_by_nick(pflags->flags_class, valueName);
373  if (!value)
374  ORIGINATE_ERROR("Value '%s' of flag '%s' not found", valueName, flagName);
375 
376  gint flags;
377  g_object_get(obj, flagName, &flags, NULL);
378  if (set)
379  flags |= value->value;
380  else
381  flags &= ~value->value;
382  g_object_set(obj, flagName, flags, NULL);
383 
384  return true;
385  }
386  }
387 
388  ORIGINATE_ERROR("Param '%s' not found", flagName);
389 }
390 #endif // GST_SUPPORTED
391 
392 bool VideoPipeline::setupForPlayback(EGLStreamKHR *videoStream, const char *fileName)
393 {
394 #ifdef GST_SUPPORTED
395  // Init gstreamer
396  gst_init(NULL, NULL);
397 
398  // Create the source element
399  m_pipeline = gst_element_factory_make("playbin", "play");
400  if (!m_pipeline)
401  ORIGINATE_ERROR("Failed to create playback pipeline");
402 
403  // set the uri
404  char *uri = gst_filename_to_uri(fileName, NULL);
405  g_object_set(G_OBJECT(m_pipeline), "uri", uri, NULL);
406  g_free(uri);
407  uri = NULL;
408 
409  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "text", false));
410  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "native-video", true));
411 
412  // create the audio sink
413  GstElement *audioSink = gst_element_factory_make("autoaudiosink", "audio_sink");
414  if (!audioSink)
415  ORIGINATE_ERROR("Failed to create audio sink");
416 
417  // set the audio sink of the pipeline
418  g_object_set(G_OBJECT(m_pipeline), "audio-sink", audioSink, NULL);
419 
420  // Create the sink bin, this will hold the video converter and the video sink
421  GstElement *videoSinkBin = gst_bin_new("video_sink_bin");
422  if (!videoSinkBin)
423  ORIGINATE_ERROR("Failed to create video sink bin");
424 
425  // set the video sink of the pipeline
426  g_object_set(G_OBJECT(m_pipeline), "video-sink", videoSinkBin, NULL);
427 
428  // Create the video converted
429  GstElement *videoConvert = gst_element_factory_make("nvvidconv", "video converter");
430  if (!videoConvert)
431  ORIGINATE_ERROR("Failed to create video converter");
432  GstUnrefer<GstElement> unrefer(videoConvert);
433  if (!gst_bin_add(GST_BIN(videoSinkBin), videoConvert))
434  ORIGINATE_ERROR("Failed to add video convert to video sink bin");
435  unrefer.cancel();
436 
437  // Create the video sink
438  GstElement *videoSink = gst_element_factory_make("nvvideosink", "video sink");
439  if (!videoSink)
440  ORIGINATE_ERROR("Failed to create video sink");
441  unrefer.set(videoSink);
442  if (!gst_bin_add(GST_BIN(videoSinkBin), videoSink))
443  ORIGINATE_ERROR("Failed to add video sink to video sink bin");
444  unrefer.cancel();
445 
446  // configure video sink
447  g_object_set(G_OBJECT(videoSink), "display", Composer::getInstance().getEGLDisplay(), NULL);
448  // get the EGL stream
449  *videoStream = EGL_NO_STREAM_KHR;
450  g_object_get(G_OBJECT(videoSink), "stream", videoStream, NULL);
451  if (*videoStream == EGL_NO_STREAM_KHR)
452  ORIGINATE_ERROR("Failed to get EGL stream from video sink");
453 
454  if (!gst_element_link(videoConvert, videoSink))
455  ORIGINATE_ERROR("Failed to link video convert to video sink");
456 
457  // create a ghost pad so that the pipeline can connect to the bin as a sink
458  GstPad *pad = gst_element_get_static_pad(videoConvert, "sink");
459  if (!pad)
460  ORIGINATE_ERROR("Failed to get sink pad of video convert");
461  GstUnrefer<GstPad> padUnrefer(pad);
462  GstPad *ghostPad = gst_ghost_pad_new("sink", pad);
463  if (!ghostPad)
464  ORIGINATE_ERROR("Failed to create the ghost pad");
465  GstUnrefer<GstPad> ghostPadUnrefer(ghostPad);
466  if (!gst_pad_set_active(ghostPad, TRUE))
467  ORIGINATE_ERROR("Failed to set pad active");
468  if (!gst_element_add_pad(videoSinkBin, ghostPad))
469  ORIGINATE_ERROR("Failed to add pad");
470  ghostPadUnrefer.cancel();
471  padUnrefer.release();
472 
473  return true;
474 #else // GST_SUPPORTED
475  ORIGINATE_ERROR("Not supported");
476 #endif // GST_SUPPORTED
477 }
478 
480 {
481 #ifdef GST_SUPPORTED
482  if (!m_pipeline)
483  ORIGINATE_ERROR("Video pipeline is not set up");
484 
485  if (m_state != GST_STATE_PLAYING)
486  {
487  // set to playing state
488  if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
489  ORIGINATE_ERROR("Failed to set playing state");
490 
491  m_state = GST_STATE_PLAYING;
492 
493  /* Dump Capture - Playing Pipeline into the dot file
494  * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp"
495  * Run argus_camera and 0.00.00.*-argus_camera.dot
496  * file will be generated.
497  * Run "dot -Tpng 0.00.00.*-argus_camera.dot > image.png"
498  * image.png will display the running capture pipeline.
499  * */
500  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline),
501  GST_DEBUG_GRAPH_SHOW_ALL, "argus_camera");
502  }
503 
504  return true;
505 #else // GST_SUPPORTED
506  ORIGINATE_ERROR("Not supported");
507 #endif // GST_SUPPORTED
508 }
509 
511 {
512 #ifdef GST_SUPPORTED
513  if (!m_pipeline)
514  ORIGINATE_ERROR("Video pipeline is not set up");
515 
516  if (m_state != GST_STATE_PAUSED)
517  {
518  if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
519  ORIGINATE_ERROR("Failed to set pause state");
520  m_state = GST_STATE_PAUSED;
521  }
522 
523  return true;
524 #else // GST_SUPPORTED
525  ORIGINATE_ERROR("Not supported");
526 #endif // GST_SUPPORTED
527 }
528 
529 
531 {
532 #ifdef GST_SUPPORTED
533  if (!m_pipeline)
534  ORIGINATE_ERROR("Video pipeline is not set up");
535 
536  GstState newState = GST_STATE_NULL;
537  if (m_state == GST_STATE_PLAYING)
538  newState = GST_STATE_PAUSED;
539  else if (m_state == GST_STATE_PAUSED)
540  newState = GST_STATE_PLAYING;
541  else
542  ORIGINATE_ERROR("Invalid state");
543 
544  if (gst_element_set_state(m_pipeline, newState) == GST_STATE_CHANGE_FAILURE)
545  ORIGINATE_ERROR("Failed to set pause state");
546 
547  m_state = newState;
548 
549  return true;
550 #else // GST_SUPPORTED
551  ORIGINATE_ERROR("Not supported");
552 #endif // GST_SUPPORTED
553 }
554 
556 {
557 #ifdef GST_SUPPORTED
558  if (!m_pipeline)
559  ORIGINATE_ERROR("Video pipeline is not set up");
560 
561  if (!gst_element_seek_simple(m_pipeline, GST_FORMAT_TIME,
562  static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0))
563  {
564  ORIGINATE_ERROR("Failed to rewind");
565  }
566 
567  return true;
568 #else // GST_SUPPORTED
569  ORIGINATE_ERROR("Not supported");
570 #endif // GST_SUPPORTED
571 }
572 
574 {
575 #ifdef GST_SUPPORTED
576  if (!m_pipeline)
577  ORIGINATE_ERROR("Video pipeline is not set up");
578 
579  if ((m_state == GST_STATE_PLAYING) || (m_state == GST_STATE_PAUSED))
580  {
581  // check if there is a video encoder
582  GstElement *videoEncoder = gst_bin_get_by_name(GST_BIN(m_pipeline), s_videoEncoderName);
583  if (videoEncoder)
584  {
585  // send the end of stream event
586  GstPad *pad = gst_element_get_static_pad(videoEncoder, "sink");
587  if (!pad)
588  ORIGINATE_ERROR("Failed to get 'sink' pad");
589  GstUnrefer<GstPad> padUnrefer(pad);
590  if (!gst_pad_send_event(pad, gst_event_new_eos()))
591  ORIGINATE_ERROR("Failed to send end of stream event encoder");
592  padUnrefer.release();
593 
594  // wait for the event to go through
595  GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
596  if (!bus)
597  ORIGINATE_ERROR("Failed to get bus");
598  GstUnrefer<GstBus> busUnrefer(bus);
599  if (!gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE))
600  ORIGINATE_ERROR("Failed to wait for the eof event");
601  busUnrefer.release();
602  }
603 
604  // stop the pipeline
605  if (gst_element_set_state(m_pipeline, GST_STATE_NULL) != GST_STATE_CHANGE_SUCCESS)
606  ORIGINATE_ERROR("Failed to stop pipeline");
607 
608  m_state = GST_STATE_NULL;
609  }
610 
611  return true;
612 #else // GST_SUPPORTED
613  ORIGINATE_ERROR("Not supported");
614 #endif // GST_SUPPORTED
615 }
616 
618 {
619 #ifdef GST_SUPPORTED
620  if (m_pipeline)
621  {
622  PROPAGATE_ERROR(stop());
623 
624  // delete pipeline
625  gst_object_unref(GST_OBJECT(m_pipeline));
626 
627  m_pipeline = NULL;
628  }
629 
630  return true;
631 #else // GST_SUPPORTED
632  ORIGINATE_ERROR("Not supported");
633 #endif // GST_SUPPORTED
634 }
635 
636 /*static*/ const char* VideoPipeline::getFileExtension(VideoFileType fileType)
637 {
638  switch (fileType)
639  {
640  case VIDEO_FILE_TYPE_MP4:
641  return "mp4";
642  case VIDEO_FILE_TYPE_3GP:
643  return "3gp";
644  case VIDEO_FILE_TYPE_AVI:
645  return "avi";
646  case VIDEO_FILE_TYPE_MKV:
647  return "mkv";
649  return "h265";
650  default:
651  break;
652  }
653 
654  return "Unhandled video file type";
655 }
656 
657 bool VideoPipeline::getAspectRatio(float *aspectRatio) const
658 {
659  if (aspectRatio == NULL)
660  ORIGINATE_ERROR("'aspectRatio' is NULL");
661 #ifdef GST_SUPPORTED
662  if ((m_state != GST_STATE_PLAYING) && (m_state != GST_STATE_PAUSED))
663  ORIGINATE_ERROR("Must be in paused or playing state.");
664 
665  GstState state = GST_STATE_NULL;
666  while ((state != GST_STATE_PLAYING) && (state != GST_STATE_PAUSED))
667  {
668  if (gst_element_get_state(m_pipeline, &state, NULL, GST_CLOCK_TIME_NONE) ==
669  GST_STATE_CHANGE_FAILURE)
670  {
671  ORIGINATE_ERROR("gst_element_get_state failed");
672  }
673  }
674 
675  // Retrieve the Caps at the entrance of the video sink
676  GstElement *videoSink;
677  g_object_get(m_pipeline, "video-sink", &videoSink, NULL);
678  if (!videoSink)
679  ORIGINATE_ERROR("Failed to get video-sink");
680  GstUnrefer<GstElement> videoSinkUnrefer(videoSink);
681 
682  GstPad *videoSinkPad = gst_element_get_static_pad(videoSink, "sink");
683  if (!videoSinkPad)
684  ORIGINATE_ERROR("Failed to get video-sink pad");
685 
686  GstCaps *caps = gst_pad_get_current_caps(videoSinkPad);
687  if (!caps)
688  ORIGINATE_ERROR("Failed to get video-sink pad caps");
689 
690  *aspectRatio = 1.0f;
691 
692  GstStructure *structure = gst_caps_get_structure(caps, 0);
693  if (!structure)
694  {
695  gst_caps_unref(caps);
696  ORIGINATE_ERROR("Failed to get caps structure");
697  }
698 
699  gint width, height;
700  gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;
701 
702  if (!gst_structure_get_int(structure, "width", &width) ||
703  !gst_structure_get_int(structure, "height", &height) ||
704  !gst_structure_get_fraction(structure, "pixel-aspect-ratio",
705  &pixelAspectRatioNumerator, &pixelAspectRatioDenominator))
706  {
707  gst_caps_unref(caps);
708  ORIGINATE_ERROR("Failed to get structure values");
709  }
710 
711  *aspectRatio = (float)width / (float)height;
712  *aspectRatio *= (float)pixelAspectRatioNumerator / (float)pixelAspectRatioDenominator;
713 
714  gst_caps_unref(caps);
715 
716  return true;
717 #else // GST_SUPPORTED
718  ORIGINATE_ERROR("Not supported");
719 #endif // GST_SUPPORTED
720 }
721 
723 {
724 #ifdef GST_SUPPORTED
725  return true;
726 #else // GST_SUPPORTED
727  return false;
728 #endif // GST_SUPPORTED
729 }
730 
731 }; // namespace ArgusSamples