Hi i wrote GStreamer C application for my following working GStreamer command to record video with audio
gst-launch –e mfw_v4lsrc capture-mode=1 fps-n=30 ! vpuenc codec=0 ! queue ! mux. alsasrc ! 'audio/x-raw-int,rate=48000,channels=1' ! mfw_mp3encoder ! queue ! mux. avimux name=mux ! filesink location=audio_video.avi sync=false.
This is my main code.The following code records only video.But audio is not capturing.Where i did wrong.Please kindly correct my mistake.
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline,*video_source, *video_encoder, *video_queue,*audio_source,*audio_encoder,*audio_queue,*audio_muxer, *sink, *filter;
//GstElement *pipeline, *source, *encoder, *sink;
GstBus *bus;
guint bus_watch_id;
//GstCaps *caps = gst_caps_new_empty();
gst_init (&argc, &argv);
GstCaps *audio_caps;
loop = g_main_loop_new (NULL, FALSE);
if(argc !=2)
{
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}
/* Pipeline create */
pipeline = gst_pipeline_new("pipeline");
/*video source */
video_source = gst_element_factory_make("mfw_v4lsrc", "video_source");
/*codec encoder */
video_encoder = gst_element_factory_make("vpuenc", "video_encoder");
/*queue for video */
video_queue = gst_element_factory_make("queue", "video_queue");
/*audio source */
audio_source = gst_element_factory_make("alsasrc","audio_source");
filter = gst_element_factory_make("capsfilter", "filter");
/*audio encoder */
audio_encoder = gst_element_factory_make("mfw_mp3encoder", "audio_encoder");
/*audio queue */
audio_queue = gst_element_factory_make("queue", "audio_queue");
/*muxer*/
audio_muxer = gst_element_factory_make("avimux","audio_muxer");
sink = gst_element_factory_make("filesink", "sink");
g_object_set (G_OBJECT (sink), "location",argv[1], NULL);
g_object_set(G_OBJECT (sink),"sync",FALSE,NULL);
audio_caps = gst_caps_new_simple("audio/x-raw-int",
"rate", G_TYPE_INT,44100,
"channels",G_TYPE_INT,1,
NULL);
g_object_set (G_OBJECT (filter), "caps", audio_caps, NULL);
g_object_set (G_OBJECT (video_source), "capture-mode", 1, NULL);
g_object_set (G_OBJECT (video_source), "fps-n", 30, NULL);
g_object_set (G_OBJECT (video_encoder),"codec", 0 , NULL);
g_object_set (G_OBJECT (sink), "location",argv[1], NULL);
g_object_set(G_OBJECT (sink),"sync",FALSE,NULL);
if(!pipeline || !video_source || !video_encoder || !video_queue
|| !audio_source || !audio_encoder || !audio_muxer
|| !filter || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
printf("Creating watcher bus ID *************************************************************************************************************\n");
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
gst_bin_add_many(GST_BIN (pipeline), video_source, video_encoder, video_queue,audio_source,audio_encoder,audio_queue,audio_muxer,sink,filter, NULL);
gst_element_link_many (video_source,video_encoder,video_queue,audio_muxer,NULL);
gst_element_link_many(audio_source,filter,audio_encoder,audio_queue,audio_muxer,NULL);
gst_element_link(audio_muxer,sink);
g_signal_connect (video_source, "pad-added", G_CALLBACK (on_pad_added),sink);
printf("SIGNAL sent ********\n");
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
g_print ("Running...\n");
g_main_loop_run (loop);
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
in fact, for capture raw audio and raw video, you can use gstreamer directly, you can refer to the source code of freescale gstreamer, and follow the command as below:
gst-launch videotestsrc num-buffers=250 ! 'video/x-raw-yuv,format=(fourcc)I420,width=320,height=240,framerate=(fraction)25/1' ! queue ! mux. audiotestsrc num-buffers=440 ! audioconvert ! 'audio/x-raw-int,rate=44100,channels=2' ! queue ! mux. avimux name=mux ! filesink location=test.avi