首页 > 解决方案 > Gstreamer 将音频延迟应用于实时流

问题描述

我想使用 gstreamer rtmpsink 在实时流中添加音频延迟(最多 10/15 秒)这是我的线路

gst-launch-1.0 -vvv flvmux streamable=true name=mux ! rtmpsink location="rtmp://localhost/live" \
 souphttpsrc location="http://<url video h264>" ! tsdemux ! h264parse ! queue ! mux. \
 souphttpsrc location="https://<url audio aac>" ! icydemux ! aacparse ! queue ! mux.

直接在行上我尝试在 accparse 之后添加“队列 max-size-buffers = 0 max-size-time = 0 max-size-bytes = 0 min-threshold-time = 15000000000”但整个流在此被阻止方法

作用于 ci 试图修改 aacparse pad 缓冲区上的 pts 时间戳,但我可以用任何值更改并且没有效果

gst_pad_add_probe(line->aacparse_srcpad, GST_PAD_PROBE_TYPE_BUFFER, cb_have_data_audio, NULL, NULL);

..

static GstPadProbeReturn
cb_have_data_audio (GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
  GstMapInfo map;
  GstBuffer *buffer;

  buffer = GST_PAD_PROBE_INFO_BUFFER (info);
  buffer = gst_buffer_make_writable (buffer);
  
  if (buffer == NULL)
    return GST_PAD_PROBE_OK;
    
  GstClockTime pts = GST_BUFFER_PTS(buffer);
  GST_BUFFER_PTS(buffer) = pts + 100000000000;

  GST_PAD_PROBE_INFO_DATA (info) = buffer;

  return GST_PAD_PROBE_OK;
}

我也尝试使用 gst_pad_set_offset(),但同样没有效果

gst_pad_set_offset(line->aacparse_srcpad, 1000000000);

即使使用 flvmux 垫并设置“streamable = false”也没有效果,应该是什么方法只为音频添加延迟?

标签: audiostreamgstreamerdelayrtmp

解决方案


#include <gst/gst.h>
#include<stdio.h>
static GMainLoop *loop;
static gint counter;
static GstBus *bus;
static gboolean prerolled = FALSE;
static GstPad *sinkpad,*ident_sink;
static GstClockTime ptimestamp=(guint64)1000;
static GstClockTime dtimestamp= 0;
static GstPadProbeReturn

display_data (GstPad          *pad,
              GstBuffer *apsInfo,
              gpointer         user_data)
{
  // apsInfo = gst_buffer_ref(apsInfo);  

  // apsInfo = gst_buffer_make_writable(apsInfo);
  //   int fps = 30;

  //   ptimestamp += gst_util_uint64_scale_int (1, GST_SECOND, fps);

  //   int a=GST_BUFFER_PTS (apsInfo) ;
  //   int b=GST_BUFFER_DTS (apsInfo) ;
  //   GST_BUFFER_PTS (apsInfo)=ptimestamp;
  //   GST_BUFFER_DTS (apsInfo)=ptimestamp;;
  //   //printf("%d %d \n",a,b);
  //   GST_BUFFER_DURATION (apsInfo) = gst_util_uint64_scale_int (1, GST_SECOND, fps);
    
}
static GstPadProbeReturn
change_time (GstPad          *pad,
              GstBuffer *apsInfo,
              gpointer         user_data)
{
  //printf("change ing time");
  //apsInfo = gst_buffer_ref(apsInfo); 
  //apsInfo = gst_buffer_make_writable(apsInfo);
  /* when these commented lines are added the buffer status returns gst_buffer_is_writable
  writern wrtable but any changes in code does not effect the video but when these lines are 
  removed the buffer status writern not writable but the changes in code effect the video 
  */
        GST_BUFFER_FLAG_SET (apsInfo, GST_BUFFER_FLAG_DISCONT);
  
    int fps = 30;

   dtimestamp += gst_util_uint64_scale_int (1, GST_SECOND, fps);
   ptimestamp += gst_util_uint64_scale_int (1, GST_SECOND, fps);

   int a=GST_BUFFER_PTS (apsInfo) = ptimestamp;
   int b=GST_BUFFER_DTS (apsInfo) = dtimestamp;
   printf("%d %d \n",a,b);
   GST_BUFFER_DURATION (apsInfo) = gst_util_uint64_scale_int (1, GST_SECOND, fps);
    
}
static void
dec_counter (GstElement * pipeline)
{
  if (prerolled)
    return;

  if (g_atomic_int_dec_and_test (&counter)) {
    /* all probes blocked and no-more-pads signaled, post
     * message on the bus. */
    prerolled = TRUE;

    gst_bus_post (bus, gst_message_new_application (
          GST_OBJECT_CAST (pipeline),
          gst_structure_new_empty ("ExPrerolled")));
  }
}

/* called when a source pad of uridecodebin is blocked */
static GstPadProbeReturn
cb_blocked (GstPad          *pad,
            GstPadProbeInfo *info,
            gpointer         user_data)
{
  GstElement *pipeline = GST_ELEMENT (user_data);

  if (prerolled)
    return GST_PAD_PROBE_REMOVE;

  dec_counter (pipeline);

  return GST_PAD_PROBE_OK;
}

/* called when uridecodebin has a new pad */
static void
cb_pad_added (GstElement *element,
              GstPad     *pad,
              gpointer    user_data)
{
  GstElement *pipeline = GST_ELEMENT (user_data);

  if (prerolled)
    return;

  g_atomic_int_inc (&counter);

  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
      (GstPadProbeCallback) cb_blocked, pipeline, NULL);

  /* try to link to the video pad */
  gst_pad_link (pad, sinkpad);
  

}

/* called when uridecodebin has created all pads */
static void
cb_no_more_pads (GstElement *element,
                 gpointer    user_data)
{
  GstElement *pipeline = GST_ELEMENT (user_data);

  if (prerolled)
    return;

  dec_counter (pipeline);
}

/* called when a new message is posted on the bus */
static void
cb_message (GstBus     *bus,
            GstMessage *message,
            gpointer    user_data)
{
  GstElement *pipeline = GST_ELEMENT (user_data);

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_ERROR:
      g_print ("we received an error!\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_EOS:
      g_print ("we reached EOS\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_APPLICATION:
    {
      if (gst_message_has_name (message, "ExPrerolled")) {
        /* it's our message */
        g_print ("we are all prerolled, do seek\n");
gst_element_seek (pipeline, 1.0, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH,
                         GST_SEEK_TYPE_SET, 0,
                         GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);

        gst_element_set_state (pipeline, GST_STATE_PLAYING);
      }
      break;
    }
    default:
      break;
  }
}


gint
main (gint   argc,
      gchar *argv[])
{
  GstElement *pipeline, *src, *csp, *vs, *sink ,*idelem;

  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);



  /* build */
  pipeline = gst_pipeline_new ("my-pipeline");

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", (GCallback) cb_message,
      pipeline);

  src = gst_element_factory_make ("uridecodebin", "src");

  if (src == NULL)
    g_error ("Could not create 'uridecodebin' element");

  g_object_set (src, "uri", "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4", NULL);

  csp = gst_element_factory_make ("videoconvert", "csp");
  if (csp == NULL)
    g_error ("Could not create 'videoconvert' element");

  vs = gst_element_factory_make ("videoscale", "vs");
  if (csp == NULL)
    g_error ("Could not create 'videoscale' element");

  idelem = gst_element_factory_make ("identity", "identity-elem");
  if (idelem == NULL)
    g_error ("Could not create 'idelem' ");

  sink = gst_element_factory_make ("autovideosink", "sink");
  if (sink == NULL)
    g_error ("Could not create 'autovideosink' element");

  gst_bin_add_many (GST_BIN (pipeline), src, csp, vs,idelem, sink, NULL);

  /* can't link src yet, it has no pads */
  gst_element_link_many (csp, vs,idelem, sink, NULL);

  sinkpad = gst_element_get_static_pad (csp, "sink");
  //ident_sink = gst_element_get_static_pad (idelem, "sink");

  // //  
  // gst_pad_add_probe (sinkpad, GST_PAD_PROBE_TYPE_BUFFER,
  //     (GstPadProbeCallback) display_data, NULL, NULL); //displayinfo data
  // gst_object_unref (sinkpad);



GstElement *identity_elem = gst_bin_get_by_name(GST_BIN(pipeline), "identity-elem");
g_object_set(G_OBJECT(identity_elem), "signal-handoffs", TRUE, NULL);   //change time
g_signal_connect(idelem, "handoff",  (GCallback)  change_time, pipeline);



  g_atomic_int_set (&counter, 1);

  g_signal_connect (src, "pad-added",
      (GCallback) cb_pad_added, pipeline);
  g_signal_connect (src, "no-more-pads",
      (GCallback) cb_no_more_pads, pipeline);

  gst_element_set_state (pipeline, GST_STATE_PAUSED);

  g_main_loop_run (loop);

  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (sinkpad);
  gst_object_unref (bus);
  gst_object_unref (pipeline);
  g_main_loop_unref (loop);

  return 0;
}



推荐阅读