Using frame buffer for display Aravis Video

i derive this code from aravis viewer to display video on frame buffer instead of gtk

int main (int argc, char **argv){
   gst_init(&argc, &argv);
   ArvViewer* viewer = new ArvViewer;	
   ApplicationData data;
   GstElement *videoconvert;
   GstElement *videosink;
   GstCaps *caps;
   ArvPixelFormat pixel_format;
   double frame_rate;
   double gain_min, gain_max;
   unsigned payload;
   gint width, height;
   char *string;
   const char *caps_string;
   gboolean auto_gain, auto_exposure;
   gboolean is_frame_rate_available;
   gboolean is_exposure_available;
   gboolean is_exposure_auto_available;
   gboolean is_gain_available;
   gboolean is_gain_auto_available;	
   data.buffer_count = 0;
   /* Instantiation of the first available camera */
   viewer->camera = arv_camera_new (NULL);
   if (viewer->camera != NULL) {
       void (*old_sigint_handler)(int);
       gint payload;
       arv_camera_set_region (viewer->camera, 0, 0, 2592, 2048);
       arv_camera_set_frame_rate (viewer->camera, 50.0);
       payload = arv_camera_get_payload (viewer->camera);
       viewer->stream = arv_camera_create_stream (viewer->camera, NULL, NULL);
       if (viewer->stream != NULL) {
            for (int i = 0; i < 50; i++)
                 arv_stream_push_buffer (viewer->stream, arv_buffer_new (payload, NULL));
            arv_camera_get_region (viewer->camera, NULL, NULL, &width, &height);
        pixel_format = arv_camera_get_pixel_format (viewer->camera);
        arv_camera_get_exposure_time_bounds (viewer->camera, &viewer->exposure_min, &viewer- 
           arv_camera_get_gain_bounds (viewer->camera, &gain_min, &gain_max);
           frame_rate = arv_camera_get_frame_rate (viewer->camera);	        
   	caps_string = arv_pixel_format_to_gst_caps_string (pixel_format);
           if (caps_string == NULL) {
            	g_message ("GStreamer cannot understand the camera pixel format: 0x%x!\n", (int) 
   	        stop_video (viewer);
   	       return -1;
             arv_camera_start_acquisition (viewer->camera);
       viewer->pipeline = gst_pipeline_new ("pipeline");
           viewer->appsrc = gst_element_factory_make ("appsrc", NULL);
       videoconvert = gst_element_factory_make ("videoconvert", NULL);
       viewer->transform = gst_element_factory_make ("videoflip", NULL);
           gst_bin_add_many (GST_BIN (viewer->pipeline), viewer->appsrc, videoconvert, viewer- 
                 >transform, NULL);
           if (g_str_has_prefix (caps_string, "video/x-bayer")) {
   	       GstElement *bayer2rgb;
                     bayer2rgb = gst_element_factory_make ("bayer2rgb", NULL);
   	       gst_bin_add (GST_BIN (viewer->pipeline), bayer2rgb);
   	       gst_element_link_many (viewer->appsrc, bayer2rgb, videoconvert, viewer->transform, 
   	else {
   	        gst_element_link_many (viewer->appsrc, videoconvert, viewer->transform, NULL);
           videosink = gst_element_factory_make ("fbdevsink", NULL);			
       g_object_set(G_OBJECT (videosink), "device", "/dev/fb0", NULL);
       gst_bin_add (GST_BIN (viewer->pipeline), videosink)	;
          gst_element_link_many (viewer->transform, videosink, NULL);	
           MyStruct myst;
           g_signal_connect (viewer->stream, "new-buffer", G_CALLBACK (new_buffer_cb),&myst);         
           arv_stream_set_emit_signals (viewer->stream, TRUE);
           g_signal_connect (arv_camera_get_device (viewer->camera), "control-lost",G_CALLBACK (control_lost_cb), NULL);       
           g_timeout_add_seconds (1, periodic_task_cb, &data);      
           gst_element_set_state (viewer->pipeline, GST_STATE_PLAYING);
   		data.main_loop = g_main_loop_new (NULL, FALSE);
           old_sigint_handler = signal (SIGINT, set_cancel);         
           g_main_loop_run (data.main_loop);
   	    signal (SIGINT, old_sigint_handler);
           g_main_loop_unref (data.main_loop);            
           arv_camera_stop_acquisition (viewer->camera);            
           arv_stream_set_emit_signals (viewer->stream, FALSE);
           g_object_unref (viewer->stream);
       } else
           printf ("Can't create stream thread (check if the device is not already used)\n");

       g_object_unref (viewer->camera);
   } else
       printf ("No camera found\n");

   return 0;

and i know frame entrance to pipeline line is in this fucntion:

static void new_buffer_cb (ArvStream *stream, MyStruct *myst){
	ArvBuffer *arv_buffer;
	gint n_input_buffers, n_output_buffers;
	arv_buffer = arv_stream_pop_buffer (stream);
	if (arv_buffer == NULL)
	arv_stream_get_n_buffers (stream, &n_input_buffers, &n_output_buffers);	
	if (arv_buffer_get_status (arv_buffer) == ARV_BUFFER_STATUS_SUCCESS) {
		gst_app_src_push_buffer (GST_APP_SRC (myst->arvViewer->appsrc), arv_to_gst_buffer 
                   (arv_buffer, stream));       
	else {
		arv_stream_push_buffer (stream, arv_buffer);

but my problem is framerate is zero and nothing display and i understand after a few second arv_to_gst_buffer fuction didnt call i dont realy know why?

Hi Vahid,

Try to enable debugging by setting ARV_DEBUG to all.

export ARV_DEBUG=all

Thank you for your response
I can’t understand your mean
You mean for debuging my code I should type above command in Linux command shell?
Is it true?

Yes, just before your launch your code.