Gstreamer to stream local video over IP streaming on imx6q which is not displaying any video out

cancel
Showing results for 
Show  only  | Search instead for 
Did you mean: 

Gstreamer to stream local video over IP streaming on imx6q which is not displaying any video out

Jump to solution
1,118 Views
Naveenprasad
Contributor I

As i need to stream the local stored video through IP streaming using Gstreamer and to receive the streamed video in VLC player.

I am using IMX6Q-SL EVK board

Code Compiling & executing without any error. But not showing any output and no pockets were transmitted.

Kindly help me to get resolved. Code reference given below.

As i am new to the processor kindly bear with my knowledge.

 

 

#include <gst/gst.h>

int main(int argc, char *argv[]) {
  GstElement *pipeline, *source, *decode, *convert, *scale, *encoder, *pay, *sink;
  GstCaps *filtercaps;
  GstBus *bus;
  GstMessage *msg;
  GMainLoop *loop;

  // Initialize GStreamer
  gst_init(&argc, &argv);

  gst_debug_set_threshold_for_name("x264enc", GST_LEVEL_LOG);

  // Create the pipeline
  pipeline = gst_pipeline_new("my-pipeline");

  // Create the source element for reading the video frames from a local file
  source = gst_element_factory_make("filesrc", "source");
  g_object_set(G_OBJECT(source), "location", "Video.mp4", NULL); // set the path to the video file as needed

  // Create the decode element for decoding the video
  decode = gst_element_factory_make("decodebin", "decode");

  // Create the convert element for converting the video format
  convert = gst_element_factory_make("videoconvert", "convert");

  // Create the scale element for resizing the video
  scale = gst_element_factory_make("videoscale", "scale");
  g_object_set(G_OBJECT(scale), "method", 0, NULL);
  g_object_set(G_OBJECT(scale), "add-borders", FALSE, NULL);
  g_object_set(G_OBJECT(scale), "skip-canvas", TRUE, NULL);

  // Create the encoder element for encoding the video with H.264
  encoder = gst_element_factory_make("x264enc", "encoder");
  g_object_set(G_OBJECT(encoder), "tune", 5, NULL); // set encoding parameters as needed

  // Create the pay element for packaging the encoded video into RTP packets
  pay = gst_element_factory_make("rtph264pay", "pay");

  // Create the sink element for sending the RTP packets over UDP
  sink = gst_element_factory_make("udpsink", "sink");
  g_object_set(G_OBJECT(sink), "host", "192.168.100.232", NULL); // set receiver IP address
  g_object_set(G_OBJECT(sink), "port", 554, NULL); // set receiver port number

  // Add all elements to the pipeline
  gst_bin_add_many(GST_BIN(pipeline), source, decode, convert, scale, encoder, pay, sink, NULL);

  // Link the elements together
  gst_element_link(source, decode);
  gst_element_link_many(convert, scale, encoder, pay, sink, NULL);

  // Set the caps filter for the decode element
  filtercaps = gst_caps_new_simple("video/x-h264",
      "stream-format", G_TYPE_STRING, "byte-stream",
      NULL);
  gst_element_link_filtered(decode, convert, filtercaps);
  gst_caps_unref(filtercaps);

  // Start playing the pipeline
  gst_element_set_state(pipeline, GST_STATE_PLAYING);

  // Wait for completion or error
  loop = g_main_loop_new(NULL, FALSE);
  bus = gst_element_get_bus(pipeline);
  msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_ERROR);
# if (msg != NULL) {
      gst_message_unref(msg);
  }
  gst_object_unref(bus);
  gst_element_set_state(pipeline, GST_STATE_NULL);
  gst_object_unref(pipeline);
  g_main_loop_unref(loop);

  return 0;
}

 

 Using the below bash to compile the program

 

g++ -o Stream Stream_Local.cpp `pkg-config --cflags --libs gstreamer-1.0 gstreamer-app-1.0`

 

0 Kudos
1 Solution
1,067 Views
Naveenprasad
Contributor I

Solution to the problem. for future reference.

#include <gst/gst.h>
#include <glib.h>
#include <string>
#include <iostream>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) 
{ 
  GMainLoop *loop = (GMainLoop *) data; switch (GST_MESSAGE_TYPE (msg)) {
  case GST_MESSAGE_EOS: 
    g_print ("End of stream\n"); 
    g_main_loop_quit (loop); 
    break; 
  case GST_MESSAGE_ERROR: {
    gchar *debug; 
    GError *error; 
    gst_message_parse_error (msg, &error, &debug); 
    g_free (debug); 
    g_printerr ("Error: %s\n", error->message); 
    g_error_free (error); 
    g_main_loop_quit (loop); 
    break; 
  } 
  default: 
  break; 
  } 
  return TRUE; 
}

static void on_pad_added (GstElement *element, GstPad *pad, gpointer data) 
{ 
  GstPad *sinkpad; 
  GstCaps *caps;
  GstElement *parser = (GstElement *) data; 
  GstStructure *str;
  /* Link this pad with the h264parse sink pad */ 
  caps =  gst_pad_get_current_caps (pad);

  g_print ("Dynamic pad created, linking demuxer/parser\n"); 
  g_print("%s\n", gst_caps_to_string(caps));
  caps = gst_caps_make_writable(caps);
  str = gst_caps_get_structure (caps, 0);
  //gst_structure_remove_fields (str,"level", "profile", "height", "width", "framerate", "pixel-aspect-ratio", NULL);
  const char * lala = gst_caps_to_string (caps);
  GstCaps *ee = gst_caps_from_string(lala);
   
  std::cout << "ee   " << gst_caps_to_string (ee) << std::endl;
  sinkpad = gst_element_get_static_pad (parser, "sink"); 
  gst_pad_link (pad, sinkpad); 
  gst_object_unref (sinkpad); 
} 

static void 
on_pad_added_parser (GstElement *element, GstPad *pad, gpointer data) 
{ 
  GstPad *sinkpad; 
  GstCaps *caps;
  GstElement *decoder = (GstElement *) data; 
} 

int 
main (int argc, 
char *argv[]) 
{ 
  GMainLoop *loop; 
  GstElement *pipeline, *source, *demuxer, *parser, *decoder, *convert, *encoder, *pay, *sink; 
  GstBus *bus; 
  /* gst Initialisation */ 
  gst_init (&argc, &argv); 
  loop = g_main_loop_new (NULL, FALSE); 
  /* Check input arguments */ 
  std::string filename;
  if (argc != 2) { 
    filename = "/media/Test.mp4";
  }else
    filename = argv[1];

  /* Create gstreamer elements */ 
  pipeline = gst_pipeline_new ("mp4-player"); 
  source = gst_element_factory_make ("filesrc", "file-source"); 
  demuxer = gst_element_factory_make ("qtdemux", "demuxer"); 
  parser = gst_element_factory_make ("h264parse", "parser"); 
  decoder = gst_element_factory_make ("imxvpudec", "decoder"); 
  convert = gst_element_factory_make("imxipuvideotransform", "convert");
  encoder = gst_element_factory_make("imxvpuenc_h264", "encoder");
  pay = gst_element_factory_make("rtph264pay", "pay");
  sink = gst_element_factory_make ("udpsink", "sink"); 
  
  if (!pipeline || !source || !demuxer || !parser || !decoder || !convert || !encoder || !pay || !sink) { 
  g_printerr ("One element could not be created. Exiting.\n"); 
  return -1; 
  } 
  
  /* Set up the pipeline */ 
  /* Set the input to the element */   
  g_object_set (G_OBJECT (source), "location", filename.c_str(),  NULL); 
  g_object_set(G_OBJECT(encoder), "tune", 4, NULL); // set encoding parameters as needed
  g_object_set(G_OBJECT(pay), "config-interval", 1, NULL);
  g_object_set(G_OBJECT(pay), "pt", 96, NULL);
  g_object_set(G_OBJECT(sink), "host", "192.168.100.232", NULL); // set receiver IP address
  g_object_set(G_OBJECT(sink), "port", 5000, NULL); // set receiver port number
  g_object_set(G_OBJECT(sink), "auto-multicast", 0, NULL); // set receiver port number
 
  /* Message handler */ 
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); 
  gst_bus_add_watch (bus, bus_call, loop); 
  gst_object_unref (bus); 
  
  /* Add all elements into the pipeline */ 
  gst_bin_add_many (GST_BIN (pipeline), source, demuxer, parser, decoder, convert, encoder, pay, sink, NULL); 
  
  /* Link the elements together */ 
  gst_element_link (source, demuxer); 
  gst_element_link_many (parser, decoder, convert, encoder, pay, sink, NULL); 
  g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), parser); 
  g_signal_connect (parser, "pad-added", G_CALLBACK (on_pad_added_parser), decoder); 
  
  /* note that the demuxer will be linked to the decoder dynamically. 
  The reason is that Mp4 may contain various streams (for example 
  audio and video). The source pad(s) will be created at run time, 
  by the demuxer when it detects the amount and nature of streams. 
  Therefore we connect a callback function which will be executed 
  when the "pad-added" is emitted.*/ 
  
  /* Set the pipeline to "playing" state*/ 
  g_print ("Now playing: %s\n", filename.c_str()); 
  gst_element_set_state (pipeline, GST_STATE_PLAYING); 
  
  /* Iterate */ 
  g_print ("Running...\n"); 
  g_main_loop_run (loop); 
  
  /* Out of the main loop, clean up nicely */ 
  g_print ("Returned, stopping playback\n"); 
  gst_element_set_state (pipeline, GST_STATE_NULL); 
  g_print ("Deleting pipeline\n"); 
  gst_object_unref (GST_OBJECT (pipeline)); 
  return 0; 
}

View solution in original post

0 Kudos
2 Replies
1,068 Views
Naveenprasad
Contributor I

Solution to the problem. for future reference.

#include <gst/gst.h>
#include <glib.h>
#include <string>
#include <iostream>
static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer data) 
{ 
  GMainLoop *loop = (GMainLoop *) data; switch (GST_MESSAGE_TYPE (msg)) {
  case GST_MESSAGE_EOS: 
    g_print ("End of stream\n"); 
    g_main_loop_quit (loop); 
    break; 
  case GST_MESSAGE_ERROR: {
    gchar *debug; 
    GError *error; 
    gst_message_parse_error (msg, &error, &debug); 
    g_free (debug); 
    g_printerr ("Error: %s\n", error->message); 
    g_error_free (error); 
    g_main_loop_quit (loop); 
    break; 
  } 
  default: 
  break; 
  } 
  return TRUE; 
}

static void on_pad_added (GstElement *element, GstPad *pad, gpointer data) 
{ 
  GstPad *sinkpad; 
  GstCaps *caps;
  GstElement *parser = (GstElement *) data; 
  GstStructure *str;
  /* Link this pad with the h264parse sink pad */ 
  caps =  gst_pad_get_current_caps (pad);

  g_print ("Dynamic pad created, linking demuxer/parser\n"); 
  g_print("%s\n", gst_caps_to_string(caps));
  caps = gst_caps_make_writable(caps);
  str = gst_caps_get_structure (caps, 0);
  //gst_structure_remove_fields (str,"level", "profile", "height", "width", "framerate", "pixel-aspect-ratio", NULL);
  const char * lala = gst_caps_to_string (caps);
  GstCaps *ee = gst_caps_from_string(lala);
   
  std::cout << "ee   " << gst_caps_to_string (ee) << std::endl;
  sinkpad = gst_element_get_static_pad (parser, "sink"); 
  gst_pad_link (pad, sinkpad); 
  gst_object_unref (sinkpad); 
} 

static void 
on_pad_added_parser (GstElement *element, GstPad *pad, gpointer data) 
{ 
  GstPad *sinkpad; 
  GstCaps *caps;
  GstElement *decoder = (GstElement *) data; 
} 

int 
main (int argc, 
char *argv[]) 
{ 
  GMainLoop *loop; 
  GstElement *pipeline, *source, *demuxer, *parser, *decoder, *convert, *encoder, *pay, *sink; 
  GstBus *bus; 
  /* gst Initialisation */ 
  gst_init (&argc, &argv); 
  loop = g_main_loop_new (NULL, FALSE); 
  /* Check input arguments */ 
  std::string filename;
  if (argc != 2) { 
    filename = "/media/Test.mp4";
  }else
    filename = argv[1];

  /* Create gstreamer elements */ 
  pipeline = gst_pipeline_new ("mp4-player"); 
  source = gst_element_factory_make ("filesrc", "file-source"); 
  demuxer = gst_element_factory_make ("qtdemux", "demuxer"); 
  parser = gst_element_factory_make ("h264parse", "parser"); 
  decoder = gst_element_factory_make ("imxvpudec", "decoder"); 
  convert = gst_element_factory_make("imxipuvideotransform", "convert");
  encoder = gst_element_factory_make("imxvpuenc_h264", "encoder");
  pay = gst_element_factory_make("rtph264pay", "pay");
  sink = gst_element_factory_make ("udpsink", "sink"); 
  
  if (!pipeline || !source || !demuxer || !parser || !decoder || !convert || !encoder || !pay || !sink) { 
  g_printerr ("One element could not be created. Exiting.\n"); 
  return -1; 
  } 
  
  /* Set up the pipeline */ 
  /* Set the input to the element */   
  g_object_set (G_OBJECT (source), "location", filename.c_str(),  NULL); 
  g_object_set(G_OBJECT(encoder), "tune", 4, NULL); // set encoding parameters as needed
  g_object_set(G_OBJECT(pay), "config-interval", 1, NULL);
  g_object_set(G_OBJECT(pay), "pt", 96, NULL);
  g_object_set(G_OBJECT(sink), "host", "192.168.100.232", NULL); // set receiver IP address
  g_object_set(G_OBJECT(sink), "port", 5000, NULL); // set receiver port number
  g_object_set(G_OBJECT(sink), "auto-multicast", 0, NULL); // set receiver port number
 
  /* Message handler */ 
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); 
  gst_bus_add_watch (bus, bus_call, loop); 
  gst_object_unref (bus); 
  
  /* Add all elements into the pipeline */ 
  gst_bin_add_many (GST_BIN (pipeline), source, demuxer, parser, decoder, convert, encoder, pay, sink, NULL); 
  
  /* Link the elements together */ 
  gst_element_link (source, demuxer); 
  gst_element_link_many (parser, decoder, convert, encoder, pay, sink, NULL); 
  g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), parser); 
  g_signal_connect (parser, "pad-added", G_CALLBACK (on_pad_added_parser), decoder); 
  
  /* note that the demuxer will be linked to the decoder dynamically. 
  The reason is that Mp4 may contain various streams (for example 
  audio and video). The source pad(s) will be created at run time, 
  by the demuxer when it detects the amount and nature of streams. 
  Therefore we connect a callback function which will be executed 
  when the "pad-added" is emitted.*/ 
  
  /* Set the pipeline to "playing" state*/ 
  g_print ("Now playing: %s\n", filename.c_str()); 
  gst_element_set_state (pipeline, GST_STATE_PLAYING); 
  
  /* Iterate */ 
  g_print ("Running...\n"); 
  g_main_loop_run (loop); 
  
  /* Out of the main loop, clean up nicely */ 
  g_print ("Returned, stopping playback\n"); 
  gst_element_set_state (pipeline, GST_STATE_NULL); 
  g_print ("Deleting pipeline\n"); 
  gst_object_unref (GST_OBJECT (pipeline)); 
  return 0; 
}
0 Kudos
1,099 Views
joanxie
NXP TechSupport
NXP TechSupport

it seems nxp doesn't have IMX6Q-SL EVK board, did you use the third party company board? did you test nxp bsp firstly, you can refer to the document as below, then you can refer to the nxp bsp which supports this already

https://community.nxp.com/t5/i-MX-Processors-Knowledge-Base/Audio-Video-trough-Camera-stream-and-gst...