Argus Camera Sample
Argus Camera Sample
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
VideoPipeline.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016-2021, NVIDIA CORPORATION. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * * Redistributions of source code must retain the above copyright
8  * notice, this list of conditions and the following disclaimer.
9  * * Redistributions in binary form must reproduce the above copyright
10  * notice, this list of conditions and the following disclaimer in the
11  * documentation and/or other materials provided with the distribution.
12  * * Neither the name of NVIDIA CORPORATION nor the names of its
13  * contributors may be used to endorse or promote products derived
14  * from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
20  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
21  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
22  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
24  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #include <stdio.h>
30 
31 #include <string>
32 
33 #include "Error.h"
34 #include "VideoPipeline.h"
35 #include "Composer.h"
36 #include "Util.h"
37 
38 namespace ArgusSamples
39 {
40 
42 #ifdef GST_SUPPORTED
43  : m_state(GST_STATE_NULL)
44  , m_pipeline(NULL)
45 #endif
46 {
47 }
48 
50 {
51  destroy();
52 }
53 
54 ///! give the video eoncoder a name so we can find it at stop()
55 static const char *s_videoEncoderName = "video encoder";
56 
57 /**
58  * RAII helper class for calling gst_object_unref on exit from a block or function.
59  */
60 template <typename T> class GstUnrefer
61 {
62 public:
63  explicit GstUnrefer(T * p)
64  : m_p(p)
65  {
66  }
68  : m_p(NULL)
69  {
70  }
72  {
73  release();
74  }
75 
76  /// Cancel the unref.
77  void cancel()
78  {
79  m_p = NULL;
80  }
81 
82  /// Unref the object now.
83  void release()
84  {
85  if (m_p)
86  gst_object_unref(m_p);
87  m_p = NULL;
88  }
89 
90  /// Set the object to be unrefed.
91  void set(T* p)
92  {
93  release();
94  m_p = p;
95  }
96 
97  /// Get the object.
98  T * get() const
99  {
100  return m_p;
101  }
102 
103 private:
104  T *m_p;
105 
106  /// Not implemented -- use default constructor
107  GstUnrefer(GstUnrefer& other);
108  /// Not implemented
110 };
111 
112 bool VideoPipeline::setupForRecording(EGLStreamKHR videoStream, uint32_t width, uint32_t height,
113  float frameRate, const char *fileName, VideoFormat videoFormat,
114  VideoFileType videoFileType, uint32_t bitRate, VideoControlRateMode controlRate,
115  bool enableTwoPassCBR)
116 {
117 #ifdef GST_SUPPORTED
118  // set the filename
119  std::string videoFileName(fileName);
120  if (videoFileName != "/dev/null")
121  {
122  videoFileName += ".";
123  videoFileName += getFileExtension(videoFileType);
124  PROPAGATE_ERROR(validateOutputPath(videoFileName.c_str()));
125  }
126 
127  // Init gstreamer
128  gst_init(NULL, NULL);
129 
130  // create the pipeline
131  m_pipeline = gst_pipeline_new("video_pipeline");
132  if (!m_pipeline)
133  ORIGINATE_ERROR("Failed to create video pipeline");
134 
135  // Create the capture source element
136  GstElement *videoSource = gst_element_factory_make("nveglstreamsrc", NULL);
137  if (!videoSource)
138  ORIGINATE_ERROR("Failed to create capture source element");
139  GstUnrefer<GstElement> unrefer(videoSource);
140  if (!gst_bin_add(GST_BIN(m_pipeline), videoSource))
141  ORIGINATE_ERROR("Failed to add video source to pipeline");
142  unrefer.cancel();
143 
144  g_object_set(G_OBJECT(videoSource), "display", Composer::getInstance().getEGLDisplay(), NULL);
145  g_object_set(G_OBJECT(videoSource), "eglstream", videoStream, NULL);
146 
147  // Create queue
148  GstElement *queue = gst_element_factory_make("queue", NULL);
149  if (!queue)
150  ORIGINATE_ERROR("Failed to create queue");
151  unrefer.set(queue);
152  if (!gst_bin_add(GST_BIN(m_pipeline), queue))
153  ORIGINATE_ERROR("Failed to add queue to pipeline");
154  unrefer.cancel();
155 
156  // create the encoder
157  GstElement *videoEncoder = NULL;
158  switch (videoFormat)
159  {
160  case VIDEO_FORMAT_H264:
161  videoEncoder = gst_element_factory_make("nvv4l2h264enc", s_videoEncoderName);
162  break;
163  case VIDEO_FORMAT_H265:
164  videoEncoder = gst_element_factory_make("nvv4l2h265enc", s_videoEncoderName);
165  break;
166  case VIDEO_FORMAT_VP8:
167  printf("\n***vp8 encode is not supported for Jetson-Xavier & beyond\n");
168  videoEncoder = gst_element_factory_make("nvv4l2vp8enc", s_videoEncoderName);
169  break;
170  case VIDEO_FORMAT_VP9:
171  videoEncoder = gst_element_factory_make("nvv4l2vp9enc", s_videoEncoderName);
172  break;
173  default:
174  ORIGINATE_ERROR("Unhandled video format");
175  }
176  if (!videoEncoder)
177  ORIGINATE_ERROR("Failed to create video encoder");
178  unrefer.set(videoEncoder);
179  if (!gst_bin_add(GST_BIN(m_pipeline), videoEncoder))
180  ORIGINATE_ERROR("Failed to add video encoder to pipeline");
181  unrefer.cancel();
182 
183  // if no bitrate is given select from reasonable presets
184  if (bitRate == 0)
185  {
186  if (height < 720)
187  bitRate = VIDEO_BITRATE_4M;
188  else if (height < 1080)
189  bitRate = VIDEO_BITRATE_8M;
190  else if (height <= 2160)
191  bitRate = VIDEO_BITRATE_14M;
192  else
193  bitRate = VIDEO_BITRATE_20M;
194  }
195 
196  g_object_set(G_OBJECT(videoEncoder), "bitrate", bitRate, NULL);
197  g_object_set(G_OBJECT(videoEncoder), "control-rate", controlRate, NULL);
198  g_object_set(G_OBJECT(videoEncoder), "EnableTwopassCBR", enableTwoPassCBR, NULL);
199 
200  /*
201  * Currently, of all the supported videoEncoders above: H264, H265, VP8 and VP9, Only H265
202  * supports resolution > 4k.
203  */
204  const uint32_t WIDTH_4K = 3840;
205  if (width > WIDTH_4K && videoFormat != VIDEO_FORMAT_H265)
206  {
207  ORIGINATE_ERROR("\n Resolution > 4k requires videoformat H265 \n");
208  }
209  // set video encoding profile for h.264 to high to get optmized video quality
210  if (videoFormat == VIDEO_FORMAT_H264)
211  {
212  g_object_set(G_OBJECT(videoEncoder), "profile", VIDEO_AVC_PROFILE_HIGH, NULL);
213  }
214 
215  // create the muxer
216  if (videoFormat == VIDEO_FORMAT_VP9)
217  {
218  printf("\nThe VP9 video format is not supported on Jetson-tx1.\n");
219  }
220 
221  if ((videoFileType == VIDEO_FILE_TYPE_3GP) &&
222  !(videoFormat == VIDEO_FORMAT_H264))
223  {
224  printf("\nThe 3GP is only supported with H264 in current GST version. "
225  "Selecting other containers.\n");
226  videoFileType = VIDEO_FILE_TYPE_MKV;
227  }
228 
229  if ((videoFileType == VIDEO_FILE_TYPE_AVI) &&
230  (videoFormat == VIDEO_FORMAT_H265))
231  {
232  printf("\nThe AVI is not supported with H265 in current GST version. "
233  "Selecting other containers.\n");
234  videoFileType = VIDEO_FILE_TYPE_MKV;
235  }
236 
237  GstElement *videoParse = NULL;
238  switch (videoFormat)
239  {
240  case VIDEO_FORMAT_H264:
241  videoParse = gst_element_factory_make("h264parse", NULL);
242  if (!videoParse)
243  ORIGINATE_ERROR("Failed to create video parser");
244  break;
245  case VIDEO_FORMAT_H265:
246  videoParse = gst_element_factory_make("h265parse", NULL);
247  if (!videoParse)
248  ORIGINATE_ERROR("Failed to create video parser");
249  break;
250  case VIDEO_FORMAT_VP9:
251  case VIDEO_FORMAT_VP8:
252  break;
253  default:
254  ORIGINATE_ERROR("Unhandled video file type");
255  }
256  if (videoParse)
257  {
258  unrefer.set(videoParse);
259  if (!gst_bin_add(GST_BIN(m_pipeline), videoParse))
260  ORIGINATE_ERROR("Failed to add video parser to pipeline");
261  unrefer.cancel();
262  }
263 
264  GstElement *videoMuxer = NULL;
265  switch (videoFileType)
266  {
267  case VIDEO_FILE_TYPE_MP4:
268  videoMuxer = gst_element_factory_make("qtmux", NULL);
269  break;
270  case VIDEO_FILE_TYPE_3GP:
271  videoMuxer = gst_element_factory_make("3gppmux", NULL);
272  break;
273  case VIDEO_FILE_TYPE_AVI:
274  videoMuxer = gst_element_factory_make("avimux", NULL);
275  break;
276  case VIDEO_FILE_TYPE_MKV:
277  videoMuxer = gst_element_factory_make("matroskamux", NULL);
278  break;
279  default:
280  ORIGINATE_ERROR("Unhandled video file type");
281  }
282  if (!videoMuxer)
283  ORIGINATE_ERROR("Failed to create video muxer");
284  unrefer.set(videoMuxer);
285  if (!gst_bin_add(GST_BIN(m_pipeline), videoMuxer))
286  ORIGINATE_ERROR("Failed to add video muxer to pipeline");
287  unrefer.cancel();
288 
289  // create the sink
290  GstElement *videoSink = gst_element_factory_make("filesink", NULL);
291  if (!videoSink)
292  ORIGINATE_ERROR("Failed to create video sink");
293  unrefer.set(videoSink);
294  if (!gst_bin_add(GST_BIN(m_pipeline), videoSink))
295  ORIGINATE_ERROR("Failed to add video sink to pipeline");
296  unrefer.cancel();
297 
298  g_object_set(G_OBJECT(videoSink), "location", videoFileName.c_str(), NULL);
299 
300  // @todo 'Floating point exception' and error 'Framerate set to : 0 at
301  // NvxVideoEncoderSetParameter' when no setting the framerate. '0' should be VFR, use 30
302  // instead
303  if (frameRate == 0.0f)
304  frameRate = 30.0f;
305 
306  // create a caps filter
307  GstCaps *caps = gst_caps_new_simple("video/x-raw",
308  "format", G_TYPE_STRING, "NV12",
309  "width", G_TYPE_INT, width,
310  "height", G_TYPE_INT, height,
311  "framerate", GST_TYPE_FRACTION, static_cast<gint>(frameRate * 100.f), 100,
312  NULL);
313  if (!caps)
314  ORIGINATE_ERROR("Failed to create caps");
315 
316  GstCapsFeatures *feature = gst_caps_features_new("memory:NVMM", NULL);
317  if (!feature)
318  {
319  gst_caps_unref(caps);
320  ORIGINATE_ERROR("Failed to create caps feature");
321  }
322 
323  gst_caps_set_features(caps, 0, feature);
324 
325  // link the source to the queue via the capture filter
326  if (!gst_element_link_filtered(videoSource, queue, caps))
327  {
328  gst_caps_unref(caps);
329  ORIGINATE_ERROR("Failed to link source to queue");
330  }
331  gst_caps_unref(caps);
332 
333  // link the queue to the encoder
334  if (!gst_element_link(queue, videoEncoder))
335  ORIGINATE_ERROR("Failed to link queue to encoder");
336 
337  // link the encoder pad to the muxer
338  if (videoParse)
339  {
340  if (!gst_element_link(videoEncoder, videoParse))
341  ORIGINATE_ERROR("Failed to link encoder to parser");
342 
343  if (!gst_element_link(videoParse, videoMuxer))
344  ORIGINATE_ERROR("Failed to link parser to muxer");
345  }
346  else
347  {
348  if (!gst_element_link(videoEncoder, videoMuxer))
349  ORIGINATE_ERROR("Failed to link encoder to muxer");
350  }
351 
352  // link the muxer to the sink
353  if (!gst_element_link(videoMuxer, videoSink))
354  ORIGINATE_ERROR("Failed to link muxer to sink");
355 
356  return true;
357 #else // GST_SUPPORTED
358  ORIGINATE_ERROR("Not supported");
359 #endif // GST_SUPPORTED
360 }
361 
362 #ifdef GST_SUPPORTED
363 /**
364  * Modify object flag values by name.
365  */
366 static bool objectModifyFlags(GObject *obj, const char *flagName, const char *valueName, bool set)
367 {
368  guint count;
369  GParamSpec **spec = g_object_class_list_properties(G_OBJECT_GET_CLASS(obj), &count);
370 
371  for (guint index = 0; index < count; ++index)
372  {
373  GParamSpec *param = spec[index];
374  if (strcmp(param->name, flagName) == 0)
375  {
376  if (!G_IS_PARAM_SPEC_FLAGS(param))
377  ORIGINATE_ERROR("Param '%s' is not a flag", flagName);
378 
379  GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS(param);
380  GFlagsValue *value = g_flags_get_value_by_nick(pflags->flags_class, valueName);
381  if (!value)
382  ORIGINATE_ERROR("Value '%s' of flag '%s' not found", valueName, flagName);
383 
384  gint flags;
385  g_object_get(obj, flagName, &flags, NULL);
386  if (set)
387  flags |= value->value;
388  else
389  flags &= ~value->value;
390  g_object_set(obj, flagName, flags, NULL);
391 
392  return true;
393  }
394  }
395 
396  ORIGINATE_ERROR("Param '%s' not found", flagName);
397 }
398 #endif // GST_SUPPORTED
399 
400 bool VideoPipeline::setupForPlayback(EGLStreamKHR *videoStream, const char *fileName)
401 {
402 #ifdef GST_SUPPORTED
403  // Init gstreamer
404  gst_init(NULL, NULL);
405 
406  // Create the source element
407  m_pipeline = gst_element_factory_make("playbin", "play");
408  if (!m_pipeline)
409  ORIGINATE_ERROR("Failed to create playback pipeline");
410 
411  // set the uri
412  char *uri = gst_filename_to_uri(fileName, NULL);
413  g_object_set(G_OBJECT(m_pipeline), "uri", uri, NULL);
414  g_free(uri);
415  uri = NULL;
416 
417  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "text", false));
418  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "native-video", true));
419 
420  // create the audio sink
421  GstElement *audioSink = gst_element_factory_make("autoaudiosink", "audio_sink");
422  if (!audioSink)
423  ORIGINATE_ERROR("Failed to create audio sink");
424 
425  // set the audio sink of the pipeline
426  g_object_set(G_OBJECT(m_pipeline), "audio-sink", audioSink, NULL);
427 
428  // Create the sink bin, this will hold the video converter and the video sink
429  GstElement *videoSinkBin = gst_bin_new("video_sink_bin");
430  if (!videoSinkBin)
431  ORIGINATE_ERROR("Failed to create video sink bin");
432 
433  // set the video sink of the pipeline
434  g_object_set(G_OBJECT(m_pipeline), "video-sink", videoSinkBin, NULL);
435 
436  // Create the video converted
437  GstElement *videoConvert = gst_element_factory_make("nvvidconv", "video converter");
438  if (!videoConvert)
439  ORIGINATE_ERROR("Failed to create video converter");
440  GstUnrefer<GstElement> unrefer(videoConvert);
441  if (!gst_bin_add(GST_BIN(videoSinkBin), videoConvert))
442  ORIGINATE_ERROR("Failed to add video convert to video sink bin");
443  unrefer.cancel();
444 
445  // Create the video sink
446  GstElement *videoSink = gst_element_factory_make("nvvideosink", "video sink");
447  if (!videoSink)
448  ORIGINATE_ERROR("Failed to create video sink");
449  unrefer.set(videoSink);
450  if (!gst_bin_add(GST_BIN(videoSinkBin), videoSink))
451  ORIGINATE_ERROR("Failed to add video sink to video sink bin");
452  unrefer.cancel();
453 
454  // configure video sink
455  g_object_set(G_OBJECT(videoSink), "display", Composer::getInstance().getEGLDisplay(), NULL);
456  // get the EGL stream
457  *videoStream = EGL_NO_STREAM_KHR;
458  g_object_get(G_OBJECT(videoSink), "stream", videoStream, NULL);
459  if (*videoStream == EGL_NO_STREAM_KHR)
460  ORIGINATE_ERROR("Failed to get EGL stream from video sink");
461 
462  if (!gst_element_link(videoConvert, videoSink))
463  ORIGINATE_ERROR("Failed to link video convert to video sink");
464 
465  // create a ghost pad so that the pipeline can connect to the bin as a sink
466  GstPad *pad = gst_element_get_static_pad(videoConvert, "sink");
467  if (!pad)
468  ORIGINATE_ERROR("Failed to get sink pad of video convert");
469  GstUnrefer<GstPad> padUnrefer(pad);
470  GstPad *ghostPad = gst_ghost_pad_new("sink", pad);
471  if (!ghostPad)
472  ORIGINATE_ERROR("Failed to create the ghost pad");
473  GstUnrefer<GstPad> ghostPadUnrefer(ghostPad);
474  if (!gst_pad_set_active(ghostPad, TRUE))
475  ORIGINATE_ERROR("Failed to set pad active");
476  if (!gst_element_add_pad(videoSinkBin, ghostPad))
477  ORIGINATE_ERROR("Failed to add pad");
478  ghostPadUnrefer.cancel();
479  padUnrefer.release();
480 
481  return true;
482 #else // GST_SUPPORTED
483  ORIGINATE_ERROR("Not supported");
484 #endif // GST_SUPPORTED
485 }
486 
488 {
489 #ifdef GST_SUPPORTED
490  if (!m_pipeline)
491  ORIGINATE_ERROR("Video pipeline is not set up");
492 
493  if (m_state != GST_STATE_PLAYING)
494  {
495  // set to playing state
496  if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
497  ORIGINATE_ERROR("Failed to set playing state");
498 
499  m_state = GST_STATE_PLAYING;
500 
501  /* Dump Capture - Playing Pipeline into the dot file
502  * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp"
503  * Run argus_camera and 0.00.00.*-argus_camera.dot
504  * file will be generated.
505  * Run "dot -Tpng 0.00.00.*-argus_camera.dot > image.png"
506  * image.png will display the running capture pipeline.
507  * */
508  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline),
509  GST_DEBUG_GRAPH_SHOW_ALL, "argus_camera");
510  }
511 
512  return true;
513 #else // GST_SUPPORTED
514  ORIGINATE_ERROR("Not supported");
515 #endif // GST_SUPPORTED
516 }
517 
519 {
520 #ifdef GST_SUPPORTED
521  if (!m_pipeline)
522  ORIGINATE_ERROR("Video pipeline is not set up");
523 
524  if (m_state != GST_STATE_PAUSED)
525  {
526  if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
527  ORIGINATE_ERROR("Failed to set pause state");
528  m_state = GST_STATE_PAUSED;
529  }
530 
531  return true;
532 #else // GST_SUPPORTED
533  ORIGINATE_ERROR("Not supported");
534 #endif // GST_SUPPORTED
535 }
536 
537 
539 {
540 #ifdef GST_SUPPORTED
541  if (!m_pipeline)
542  ORIGINATE_ERROR("Video pipeline is not set up");
543 
544  GstState newState = GST_STATE_NULL;
545  if (m_state == GST_STATE_PLAYING)
546  newState = GST_STATE_PAUSED;
547  else if (m_state == GST_STATE_PAUSED)
548  newState = GST_STATE_PLAYING;
549  else
550  ORIGINATE_ERROR("Invalid state");
551 
552  if (gst_element_set_state(m_pipeline, newState) == GST_STATE_CHANGE_FAILURE)
553  ORIGINATE_ERROR("Failed to set pause state");
554 
555  m_state = newState;
556 
557  return true;
558 #else // GST_SUPPORTED
559  ORIGINATE_ERROR("Not supported");
560 #endif // GST_SUPPORTED
561 }
562 
564 {
565 #ifdef GST_SUPPORTED
566  if (!m_pipeline)
567  ORIGINATE_ERROR("Video pipeline is not set up");
568 
569  if (!gst_element_seek_simple(m_pipeline, GST_FORMAT_TIME,
570  static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0))
571  {
572  ORIGINATE_ERROR("Failed to rewind");
573  }
574 
575  return true;
576 #else // GST_SUPPORTED
577  ORIGINATE_ERROR("Not supported");
578 #endif // GST_SUPPORTED
579 }
580 
582 {
583 #ifdef GST_SUPPORTED
584  if (!m_pipeline)
585  ORIGINATE_ERROR("Video pipeline is not set up");
586 
587  if ((m_state == GST_STATE_PLAYING) || (m_state == GST_STATE_PAUSED))
588  {
589  // check if there is a video encoder
590  GstElement *videoEncoder = gst_bin_get_by_name(GST_BIN(m_pipeline), s_videoEncoderName);
591  if (videoEncoder)
592  {
593  // send the end of stream event
594  GstPad *pad = gst_element_get_static_pad(videoEncoder, "sink");
595  if (!pad)
596  ORIGINATE_ERROR("Failed to get 'sink' pad");
597  GstUnrefer<GstPad> padUnrefer(pad);
598  if (!gst_pad_send_event(pad, gst_event_new_eos()))
599  ORIGINATE_ERROR("Failed to send end of stream event encoder");
600  padUnrefer.release();
601 
602  // wait for the event to go through
603  GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
604  if (!bus)
605  ORIGINATE_ERROR("Failed to get bus");
606  GstUnrefer<GstBus> busUnrefer(bus);
607  if (!gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE))
608  ORIGINATE_ERROR("Failed to wait for the eof event");
609  busUnrefer.release();
610  }
611 
612  // stop the pipeline
613  if (gst_element_set_state(m_pipeline, GST_STATE_NULL) != GST_STATE_CHANGE_SUCCESS)
614  ORIGINATE_ERROR("Failed to stop pipeline");
615 
616  m_state = GST_STATE_NULL;
617  }
618 
619  return true;
620 #else // GST_SUPPORTED
621  ORIGINATE_ERROR("Not supported");
622 #endif // GST_SUPPORTED
623 }
624 
626 {
627 #ifdef GST_SUPPORTED
628  if (m_pipeline)
629  {
630  PROPAGATE_ERROR(stop());
631 
632  // delete pipeline
633  gst_object_unref(GST_OBJECT(m_pipeline));
634 
635  m_pipeline = NULL;
636  }
637 
638  return true;
639 #else // GST_SUPPORTED
640  ORIGINATE_ERROR("Not supported");
641 #endif // GST_SUPPORTED
642 }
643 
644 /*static*/ const char* VideoPipeline::getFileExtension(VideoFileType fileType)
645 {
646  switch (fileType)
647  {
648  case VIDEO_FILE_TYPE_MP4:
649  return "mp4";
650  case VIDEO_FILE_TYPE_3GP:
651  return "3gp";
652  case VIDEO_FILE_TYPE_AVI:
653  return "avi";
654  case VIDEO_FILE_TYPE_MKV:
655  return "mkv";
657  return "h265";
658  default:
659  break;
660  }
661 
662  return "Unhandled video file type";
663 }
664 
665 bool VideoPipeline::getAspectRatio(float *aspectRatio) const
666 {
667  if (aspectRatio == NULL)
668  ORIGINATE_ERROR("'aspectRatio' is NULL");
669 #ifdef GST_SUPPORTED
670  if ((m_state != GST_STATE_PLAYING) && (m_state != GST_STATE_PAUSED))
671  ORIGINATE_ERROR("Must be in paused or playing state.");
672 
673  GstState state = GST_STATE_NULL;
674  while ((state != GST_STATE_PLAYING) && (state != GST_STATE_PAUSED))
675  {
676  if (gst_element_get_state(m_pipeline, &state, NULL, GST_CLOCK_TIME_NONE) ==
677  GST_STATE_CHANGE_FAILURE)
678  {
679  ORIGINATE_ERROR("gst_element_get_state failed");
680  }
681  }
682 
683  // Retrieve the Caps at the entrance of the video sink
684  GstElement *videoSink;
685  g_object_get(m_pipeline, "video-sink", &videoSink, NULL);
686  if (!videoSink)
687  ORIGINATE_ERROR("Failed to get video-sink");
688  GstUnrefer<GstElement> videoSinkUnrefer(videoSink);
689 
690  GstPad *videoSinkPad = gst_element_get_static_pad(videoSink, "sink");
691  if (!videoSinkPad)
692  ORIGINATE_ERROR("Failed to get video-sink pad");
693 
694  GstCaps *caps = gst_pad_get_current_caps(videoSinkPad);
695  if (!caps)
696  ORIGINATE_ERROR("Failed to get video-sink pad caps");
697 
698  *aspectRatio = 1.0f;
699 
700  GstStructure *structure = gst_caps_get_structure(caps, 0);
701  if (!structure)
702  {
703  gst_caps_unref(caps);
704  ORIGINATE_ERROR("Failed to get caps structure");
705  }
706 
707  gint width, height;
708  gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;
709 
710  if (!gst_structure_get_int(structure, "width", &width) ||
711  !gst_structure_get_int(structure, "height", &height) ||
712  !gst_structure_get_fraction(structure, "pixel-aspect-ratio",
713  &pixelAspectRatioNumerator, &pixelAspectRatioDenominator))
714  {
715  gst_caps_unref(caps);
716  ORIGINATE_ERROR("Failed to get structure values");
717  }
718 
719  *aspectRatio = (float)width / (float)height;
720  *aspectRatio *= (float)pixelAspectRatioNumerator / (float)pixelAspectRatioDenominator;
721 
722  gst_caps_unref(caps);
723 
724  return true;
725 #else // GST_SUPPORTED
726  ORIGINATE_ERROR("Not supported");
727 #endif // GST_SUPPORTED
728 }
729 
731 {
732 #ifdef GST_SUPPORTED
733  return true;
734 #else // GST_SUPPORTED
735  return false;
736 #endif // GST_SUPPORTED
737 }
738 
739 }; // namespace ArgusSamples