/* called when the appsink notifies us that there is a new buffer ready for
* processing */
GstFlowReturn Gstpipeline::newSampleCallback2 (GstAppSink *appsink, gpointer user_data)
{
GstSample *sample;
GstBuffer *buffer;
GstMapInfo info;
/* get the sample from appsink */
sample = gst_app_sink_pull_sample (GST_APP_SINK (appsink));
buffer = gst_sample_get_buffer (sample);
gst_buffer_map( buffer, &info, (GstMapFlags)(GST_MAP_READ) );
//do something with the buffer. E.g, send it to AI NPU unit
//.....
gst_buffer_unmap(buffer, &info);
/* we don't need the appsink sample anymore */
gst_sample_unref (sample);
return GST_FLOW_OK;
}
int Gstpipeline::gstpipelineInit()
{
GError *error = NULL;
gchar *descr;
GstStateChangeReturn ret;
qInfo() << "gstpipelineInit()";
/* Initialize GStreamer */
gst_init (NULL,NULL);
#if 0
descr = g_strdup_printf ("v4l2src device=/dev/v4l/by-path/platform-vvcam-video.0-video-index0 ! video/x-raw,width=3840,height=2160 ! "
"videocrop name=video_crop ! imxvideoconvert_g2d ! glimagesink rotate-method=3 sync=false name=sink ");
#endif
#if 0
descr = g_strdup_printf ("v4l2src name=video_source device=/dev/v4l/by-path/platform-vvcam-video.0-video-index0 ! video/x-raw, width=3840,height=2160 ! tee name=t "
"t. ! queue ! imxvideoconvert_g2d name=g2d rotation=3 ! video/x-raw, width=480, height=800 ! appsink name=app_sink1 max-buffers=2 drop=True "
"t. ! queue ! appsink name=app_sink2 max-buffers=2 drop=True ");
#endif
#if 1
descr = g_strdup_printf ("v4l2src name=video_source device=/dev/v4l/by-path/platform-vvcam-video.0-video-index0 ! video/x-raw, width=3840,height=2160 ! "
"videocrop name=video_crop ! imxvideoconvert_g2d rotation=3 ! video/x-raw, width=480, height=800 ! appsink name=app_sink1 max-buffers=1 drop=True ");
#endif
#if 0
descr = g_strdup_printf ("v4l2src name=video_source device=/dev/v4l/by-path/platform-vvcam-video.0-video-index0 ! video/x-raw, width=3840,height=2160 ! "
"videocrop name=video_crop ! imxvideoconvert_g2d ! video/x-raw, width=800, height=480 ! intervideosink sync=false ");
#endif
#if 0
descr = g_strdup_printf ("v4l2src name=video_source io-mode=5 device=/dev/v4l/by-path/platform-vvcam-video.0-video-index0 ! video/x-raw, width=3840,height=2160 ! "
"videocrop name=video_crop ! imxvideoconvert_g2d ! video/x-raw, width=800, height=480 ! interpipesink name=src_1 sync=false ");
#endif
#if 0
descr = g_strdup_printf ("v4l2src name=video_source device=/dev/v4l/by-path/platform-vvcam-video.0-video-index0 ! video/x-raw, width=3840,height=2160 ! tee name=t "
"t. ! queue ! imxvideoconvert_g2d name=g2d ! video/x-raw, width=800, height=480 ! interpipesink name=src_1 sync=false "
"t. ! queue ! appsink name=app_sink2 max-buffers=2 drop=True ");
#endif
#if 0
descr = g_strdup_printf ("v4l2src name=video_source device=/dev/v4l/by-path/platform-vvcam-video.0-video-index0 ! video/x-raw, width=3840,height=2160 ! tee name=t "
"t. ! queue ! imxvideoconvert_g2d name=g2d ! video/x-raw, width=800, height=480 ! interpipesink name=src_1 sync=false "
"t. ! queue ! appsink name=app_sink2 max-buffers=2 drop=True ");
#endif
data.pipeline = gst_parse_launch (descr, &error);
if (error != NULL)
{
qDebug() << "launch_gst_pipeline: could not construct gstreamer pipeline:" << error->message;
g_clear_error (&error);
return 0;
}
data.v4l2src = gst_bin_get_by_name( GST_BIN( data.pipeline), "video_source");
data.videocrop = gst_bin_get_by_name( GST_BIN( data.pipeline), "video_crop");
g_object_set(data.videocrop, "left", left_crop, "right", right_crop, "top", top_crop, "bottom", bottom_crop, NULL);
//data.g2d = gst_bin_get_by_name( GST_BIN( data.pipeline), "g2d");
#if 1
/* we use appsink in push mode, it sends us a signal when data is available
* and we pull out the data in the signal callback. We want the appsink to
* push as fast as it can, hence the sync=false */
data.appsink1 = gst_bin_get_by_name (GST_BIN (data.pipeline), "app_sink1");
g_object_set (G_OBJECT (data.appsink1), "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (data.appsink1, "new-sample", G_CALLBACK (newSampleCallback1), this);
//gst_object_unref (data.appsink1);
#endif
#if 0
data.appsink2 = gst_bin_get_by_name (GST_BIN (data.pipeline), "app_sink2");
g_object_set (G_OBJECT (data.appsink2), "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect (data.appsink2, "new-sample", G_CALLBACK (newSampleCallback2), this);
#endif
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE)
{
qDebug() << "launch_gst_pipeline: Unable to set the gstreamer pipeline to the playing state.";
gst_object_unref (data.pipeline);
return 0;
}
return 1;
}
QML frontend
import QtQuick 2.6
import QtQuick.Controls 1.4
import QtMultimedia 5.4
import QtQuick.Window 2.12
Window {
id : cameraUI
visible: true
color: "black"
property real zoomValue : 1.5
property int zoomCntr : 0
property int illuScenNr: 3
property string snaptext: ""
property bool showMenu: false
Item { //needed for keypad event handler
anchors.fill: parent
focus: true
/*MediaPlayer {
id: mediaplayer
autoPlay: true
//source: "gst-pipeline: intervideosrc ! queue ! qtvideosink sync=false"
source: "gst-pipeline: interpipesrc is-live=true listen-to=src_1 ! queue ! qtvideosink sync=false async=false"
//videoOutput: [v1, v2]
}
VideoOutput {
id: output
anchors.fill: parent
source: mediaplayer
focus : visible // to receive focus and capture key events when visible
transformOrigin: Item.Center
orientation: 90
MouseArea {
anchors.fill: parent
onClicked: {
showMenu = true
}
}
}*/
//preview camera on LCD
VideoOutput {
source: frameProvider
anchors.fill: parent
focus : visible // to receive focus and capture key events when visible
//transformOrigin: Item.Center
//orientation: 90
//fillMode: VideoOutput.PreserveAspectCrop
MouseArea {
anchors.fill: parent
onClicked: {
showMenu = true
}
}
}