插件开发(一) 一个插件示例

参考:

环境:

  • system: centos8.1, arch: x86_64, kernel: 4.18.0, gcc: 8.5.0
  • gstreamer version: 1.18.6

1. 概述

gstreamer 基于插件式开发,用户理论上只需像搭建积木一样组合插件以实现不同的功能即可,但是实际上,因为业务的需要,有时我们需要修改存在的插件,或者自己实现一个插件。
修改或者实现插件就需要深入到 gstreamer 的底层设计,且不论 GObject 的学习本身就已经很让人头痛,就 gstreamer 而言,其底层设计也非常复杂,概念众多,且只阅读相关文档有时也很难了解一个插件到底是如何工作的。
于是乎最好的学习方法就是自己动手实现一个简单的插件。

2. pipeline 组成

本系列文章会实现一个名为 learn 的插件,类似于 tee,实现从本地文件读取 flv 文件,解复用出 h264 视频流后,learn 插件实现多路 rtp 输出,pipeline 如下:

                                                                            +-------+             +------------+           +---------+
                                                                     -----> | queue | ---h264---> | rtph264pay | ---rtp--> | udpsink |  
                                                                     |      +-------+             +------------+           +---------+ 
+---------+               +----------+                +-------+      |
| filesrc |  ---file--->  | flvdemux |  ---h264---->  | learn | -----
+---------+               +----------+                +-------+      |
                                                                     |
                                                                     ----->  ...

3. learn 插件实现

3.1 插件实现

learn 插件实现如下:

#include <gst/gst.h>
#include <gst/base/gstbytewriter.h>
#include <stdio.h>

//
// define log
//

GST_DEBUG_CATEGORY_STATIC (gst_learn_debug);
#define GST_CAT_DEFAULT gst_learn_debug
#define _do_init \
    GST_DEBUG_CATEGORY_INIT (gst_learn_debug, "learn", 0, "learn element");

//
// define gst_learn_pad
//

#define GST_TYPE_LEARN_PAD (gst_learn_pad_get_type())
#define GST_LEARN_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_LEARN_PAD, GstLearnPad))
#define GST_LEARN_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST(klass), GST_TYPE_LEARN_PAD, GstLearnPadClass)
#define GST_IS_LEARN_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_LEARN_PAD))
#define GST_IS_LEARN_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_LEARN_PAD))
#define GST_LEARN_PAD_CAST(obj) ((GstLearnPad*)obj)

typedef struct _GstLearnPad {
  // parent
  GstPad parent;

  // private
  float scale;
  guint index;
  gboolean negotiation;
} GstLearnPad;

typedef struct _GstLearnPadClass {
  // parent
  GstPadClass parent;
} GstLearnPadClass;

G_DEFINE_TYPE (GstLearnPad, gst_learn_pad, GST_TYPE_PAD);

enum {
  PROP_SCALE = 1,
};

static void gst_learn_pad_set_property(GObject* object, guint prop_id, const GValue* value,
    GParamSpec* pspec);
static void gst_learn_pad_get_property(GObject* object, guint prop_id, GValue* value,
    GParamSpec* pspec);

static void gst_learn_pad_class_init(GstLearnPadClass* klass) {
  GObjectClass* gobj_class = G_OBJECT_CLASS(klass);
  gobj_class->set_property = gst_learn_pad_set_property;
  gobj_class->get_property = gst_learn_pad_get_property;
  g_object_class_install_property(gobj_class, PROP_SCALE,
      g_param_spec_float("scale", "scale of video",
          "set scale for video", 0, 10, 1,
          G_PARAM_READABLE | G_PARAM_WRITABLE));
}

static void gst_learn_pad_init(GstLearnPad* pad) {
  pad->scale = 1.0;
  pad->negotiation = FALSE;
  pad->index = 0;
}

static void gst_learn_pad_set_property(GObject* object, guint prop_id, const GValue* value,
    GParamSpec* pspec) {
  GstLearnPad *learn_pad = GST_LEARN_PAD(object);
  switch (prop_id) {
    case (PROP_SCALE): {
      learn_pad->scale = g_value_get_float(value);
      GST_WARNING_OBJECT(learn_pad, "pad: %s set scale: %d",
          GST_OBJECT_NAME(learn_pad), learn_pad->scale);
      break;
    }
    default: {
      G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
      break;
    }
  }
}

static void gst_learn_pad_get_property(GObject* object, guint prop_id, GValue* value,
    GParamSpec * pspec) {
  GstLearnPad *learn_pad = GST_LEARN_PAD(object);
  switch (prop_id) {
    case (PROP_SCALE): {
      g_value_set_float(value, learn_pad->scale);
      break;
    }
    default: {
      G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
      break;
    }
  }
}

//
// define gst_learn
//

#define GST_TYPE_LEARN (gst_learn_get_type())
#define GST_LEARN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_LEARN,GstLearn))
#define GST_LEARN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST(klass),GST_TYPE_LEARN,GstLearnClass)
#define GST_IS_LEARN(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_LEARN))
#define GST_IS_LEARN_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_LEARN))
#define GST_LEARN_CAST(obj) ((GstLearn*)obj)

typedef struct _GstLearn {
  // parent
  GstElement parent;

  // private
  GstPad* sinkpad;
  GHashTable* pad_indexes;
  guint next_pad_index;
  GstBuffer* codec_data; 
  gboolean negotiation;
  guint nal_length_size;
} GstLearn;

typedef struct _GstLearnClass {
  // parent
  GstElementClass parent;
} GstLearnClass;

G_DEFINE_TYPE_WITH_CODE (GstLearn, gst_learn, GST_TYPE_ELEMENT, _do_init);
// #define gst_learn_parent_class parent_class

static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE("sink", 
    GST_PAD_SINK,
    GST_PAD_ALWAYS,
    GST_STATIC_CAPS("video/x-h264, "
        "stream-format = (string) \"avc\""));

static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE(
    "src_%u",
    GST_PAD_SRC,
    GST_PAD_REQUEST,
    GST_STATIC_CAPS("video/x-h264, "
        "stream-format = (string) \"byte-stream\", "
        "alignment = (string) \"nal\""));

enum {
  PROP_NUM_SRC_PADS = 1,
};

static void gst_learn_finalize(GObject* object);
static void gst_learn_dispose(GObject* object);
static GstStateChangeReturn gst_learn_change_state(GstElement* element, GstStateChange transition);
static gboolean gst_learn_sink_event(GstPad* pad, GstObject* parent, GstEvent* event);
static gboolean gst_learn_sink_query(GstPad* pad, GstObject* parent, GstQuery* query);
static gboolean gst_learn_sink_activate_mode(GstPad* pad, GstObject* parent, 
    GstPadMode mode, gboolean active);
static GstFlowReturn gst_learn_chain(GstPad* pad, GstObject* parent, GstBuffer* buffer);
static GstFlowReturn gst_learn_chain_list (GstPad * pad, GstObject * parent, GstBufferList * list);

static gboolean gst_learn_src_query(GstPad* pad, GstObject* parent, GstQuery* query);
static gboolean gst_learn_src_event(GstPad* pad, GstObject* parent, GstEvent* event);
static gboolean gst_learn_src_activate_mode(GstPad * pad, GstObject * parent,
    GstPadMode mode, gboolean active);

static GstPad* gst_learn_request_new_pad(GstElement* element,
    GstPadTemplate* temp, const gchar* unused, const GstCaps* caps);
static void gst_learn_release_pad(GstElement* element, GstPad* pad);

static void gst_learn_set_property(GObject * object, guint prop_id, const GValue * value,
    GParamSpec * pspec);
static void gst_learn_get_property(GObject * object, guint prop_id, GValue * value,
    GParamSpec * pspec);

static void gst_learn_class_init(GstLearnClass* klass) {
  GObjectClass* gobj_class = G_OBJECT_CLASS(klass);
  GstElementClass* gstelement_class = GST_ELEMENT_CLASS(klass);

  gobj_class->finalize = gst_learn_finalize;
  gobj_class->dispose = gst_learn_dispose;
  gobj_class->set_property = gst_learn_set_property;
  gobj_class->get_property = gst_learn_get_property;

  g_object_class_install_property(gobj_class, PROP_NUM_SRC_PADS,
      g_param_spec_int ("num-src-pads", "Num Src Pads",
          "The number of source pads", 0, G_MAXINT, 0,
          G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));

  gst_element_class_set_static_metadata(gstelement_class,
      "Just for learn",
      "Generic",
      "Learn just",
      "helloworld@test.com");

  gst_element_class_add_static_pad_template(gstelement_class, &sink_template);
  gst_element_class_add_static_pad_template(gstelement_class, &src_template);

  gstelement_class->change_state = GST_DEBUG_FUNCPTR(gst_learn_change_state);
  gstelement_class->request_new_pad = GST_DEBUG_FUNCPTR(gst_learn_request_new_pad);
  gstelement_class->release_pad = GST_DEBUG_FUNCPTR(gst_learn_release_pad);
}

static void gst_learn_init(GstLearn* learn) {
  learn->sinkpad = gst_pad_new_from_static_template(&sink_template, "sink");

  gst_pad_set_event_function(learn->sinkpad, GST_DEBUG_FUNCPTR(gst_learn_sink_event));
  gst_pad_set_query_function(learn->sinkpad, GST_DEBUG_FUNCPTR(gst_learn_sink_query));
  gst_pad_set_activatemode_function(learn->sinkpad, GST_DEBUG_FUNCPTR(gst_learn_sink_activate_mode));
  gst_pad_set_chain_function(learn->sinkpad, GST_DEBUG_FUNCPTR(gst_learn_chain));
  gst_pad_set_chain_list_function(learn->sinkpad, GST_DEBUG_FUNCPTR(gst_learn_chain_list));

  gst_element_add_pad(GST_ELEMENT(learn), learn->sinkpad);

  learn->pad_indexes = g_hash_table_new(NULL, NULL);
  learn->nal_length_size = 0;
  learn->next_pad_index = 0;
  learn->codec_data = NULL;
  learn->negotiation = FALSE;
}

static void gst_learn_dispose(GObject* object) {
  GList* item;
  for (item = GST_ELEMENT_PADS(object); item; item = g_list_next(item)) {
    GstPad* pad = GST_PAD(item->data);
    if (GST_PAD_IS_SRC(pad)) {
      gst_element_release_request_pad(GST_ELEMENT(object), pad);
    }
  }

  G_OBJECT_CLASS(gst_learn_parent_class)->dispose(object);
}

static void gst_learn_finalize(GObject* object) {
  GstLearn* learn = GST_LEARN(object);
  g_hash_table_unref(learn->pad_indexes);

  G_OBJECT_CLASS(gst_learn_parent_class)->finalize(object);
}

static gboolean forward_sticky_events(GstPad * pad, GstEvent ** event, gpointer user_data)
{
  GstPad *srcpad = GST_PAD_CAST(user_data);
  GstFlowReturn ret = gst_pad_store_sticky_event(srcpad, *event);
  if (ret != GST_FLOW_OK) {
    GST_DEBUG_OBJECT (srcpad, "storing sticky event %p (%s) failed: %s", *event,
        GST_EVENT_TYPE_NAME (*event), gst_flow_get_name (ret));
  }

  return TRUE;
}

static gboolean gst_learn_src_negotiation(GstPad* src_pad) {
  GstCaps* peer_caps = gst_pad_peer_query_caps(src_pad, NULL);
  GST_DEBUG_OBJECT(src_pad, "all peer supported caps %" GST_PTR_FORMAT, peer_caps);
  gst_caps_unref(peer_caps);

  GstCaps* fixed_caps = gst_static_pad_template_get_caps(&src_template);
  if (!gst_pad_peer_query_accept_caps(src_pad, fixed_caps)) {
    GST_ERROR_OBJECT(src_pad, "peer not support out src caps %" GST_PTR_FORMAT, fixed_caps); 
    gst_caps_unref(fixed_caps);
    return FALSE;
  }

  // set caps to peer
  if (!gst_pad_set_caps(src_pad, fixed_caps)) {
    GST_ERROR_OBJECT(src_pad, "set caps %" GST_PTR_FORMAT " to peer failed", 
        fixed_caps);
    gst_caps_unref(fixed_caps);
    return FALSE;
  }

  return TRUE;
}

static GstPad* gst_learn_request_new_pad(GstElement* element, GstPadTemplate* pad_temp,
    const gchar* pad_name, const GstCaps* caps) {
  GstLearn* learn = GST_LEARN(element);
  GST_DEBUG_OBJECT(learn, "requesting new pad");

  guint index = 0;
  if (pad_name && sscanf(pad_name, "src_%u", &index) == 1) {
    GST_LOG_OBJECT(learn, "name: %s (index %d)", pad_name, index);
    if (g_hash_table_contains(learn->pad_indexes, GUINT_TO_POINTER(index))) {
      GST_ERROR_OBJECT(element, "pad name %s is not unique", pad_name); 
      GST_OBJECT_UNLOCK(learn);
      return NULL;
    }
    if (index >= learn->next_pad_index) {
      learn->next_pad_index = index + 1;
    }
  } else {
    index = learn->next_pad_index;
    while (g_hash_table_contains(learn->pad_indexes, GUINT_TO_POINTER(index))) {
      index++;
    }
    learn->next_pad_index = index + 1;
  }

  gchar* name = g_strdup_printf("src_%u", index);
  GstPad* src_pad = GST_PAD_CAST(g_object_new(GST_TYPE_LEARN_PAD, 
      "name", name, "direction", pad_temp->direction, 
      "template", pad_temp, NULL));
  GST_LEARN_PAD_CAST(src_pad)->index = index;

  gboolean res = gst_pad_activate_mode(src_pad, GST_PAD_MODE_PUSH, TRUE);
  if (!res) {
    GST_ERROR_OBJECT(learn, "failed to activate request pad");
    gst_object_unref(src_pad);
    g_free(name);
    return NULL;
  }
  GST_DEBUG_OBJECT(learn, "create new pad: %s succeed", name);
  g_free(name);

  gst_pad_set_activatemode_function(src_pad, GST_DEBUG_FUNCPTR(gst_learn_src_activate_mode));
  gst_pad_set_query_function(src_pad, GST_DEBUG_FUNCPTR(gst_learn_src_query));
  gst_pad_set_event_function(src_pad, GST_DEBUG_FUNCPTR(gst_learn_src_event));

  // forward sticky evnets to new srcpad
  gst_pad_sticky_events_foreach(learn->sinkpad, forward_sticky_events, src_pad);

  g_hash_table_insert(learn->pad_indexes, GUINT_TO_POINTER(index), NULL);
  gst_element_add_pad(GST_ELEMENT_CAST(learn), src_pad);

  return src_pad;
}

static void gst_learn_release_pad(GstElement* element, GstPad* pad) {
  GstLearn* learn = GST_LEARN(element);
  GST_DEBUG_OBJECT(learn, "releasing pad");

  gboolean changed = FALSE; 
  gst_pad_set_active(pad, FALSE);
  gst_element_remove_pad(GST_ELEMENT_CAST(learn), pad);

  guint index = GST_LEARN_PAD_CAST(pad)->index;
  g_hash_table_remove(learn->pad_indexes, GUINT_TO_POINTER(index));
}

static void gst_learn_set_property (GObject * object, guint prop_id, const GValue * value,
    GParamSpec * pspec) {
  // nothing to do
}

static void gst_learn_get_property (GObject * object, guint prop_id, GValue * value,
    GParamSpec * pspec) {
  GstLearn* learn = GST_LEARN(object);

  switch (prop_id) {
    case PROP_NUM_SRC_PADS:
      g_value_set_int (value, GST_ELEMENT(learn)->numsrcpads);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}

static gboolean gst_learn_avcc_get_nal_len_size(GstLearn* learn) {
  GstMapInfo codec_data_map;
  if (!gst_buffer_map(learn->codec_data, &codec_data_map, GST_MAP_READ)) {
    GST_ERROR_OBJECT(learn, "map codec data failed");
    return FALSE;
  }

  int offset = 4;
  // get nalu length size
  learn->nal_length_size = (codec_data_map.data[offset] & 0x03) + 1;
  GST_INFO_OBJECT(learn, "codec data get nal length size: %d", learn->nal_length_size);
  return TRUE;
}

static gboolean gst_learn_sink_event(GstPad* pad, GstObject* parent, GstEvent* event) {
  GstLearn* learn = GST_LEARN(parent);
  gboolean res = TRUE;

  GST_DEBUG_OBJECT(learn, "sink pad recved event: %s", 
      gst_event_type_get_name(GST_EVENT_TYPE(event)));

  switch (GST_EVENT_TYPE(event)) {
    case (GST_EVENT_CAPS): {
      GstCaps* caps;
      gst_event_parse_caps(event, &caps);
      GST_DEBUG_OBJECT(pad, "upstream setting caps %" GST_PTR_FORMAT, caps);

      GstStructure* s = gst_caps_get_structure(caps, 0);
      if (!gst_structure_has_name(s, "video/x-h264")) {
        GST_ERROR_OBJECT(learn, "not video/x-h264, mark negotiation failed");
        gst_caps_unref(caps);
        learn->negotiation = FALSE;
        break;
      }
      
      const GValue* value = gst_structure_get_value(s, "codec_data");
      if (!value) {
        GST_ERROR_OBJECT(learn, "no codec_data found in h264-avcc, mark negotiation failed");
        gst_caps_unref(caps);
        learn->negotiation = FALSE;
        break;
      }
      learn->codec_data = gst_buffer_ref(gst_value_get_buffer(value));
      gst_event_unref(event);

      if (!gst_learn_avcc_get_nal_len_size(learn)) {
        gst_buffer_unref(learn->codec_data);
        learn->codec_data = NULL;
        learn->negotiation = FALSE;
        break;
      }
      learn->negotiation = TRUE;

      // start to negotiation src pads

      GList* src_pads = GST_ELEMENT_CAST(learn)->srcpads;
      while (src_pads) {
        GstPad* src_pad = GST_PAD_CAST(src_pads->data);
        GstLearnPad* learn_pad = GST_LEARN_PAD(src_pad);
        if (!gst_learn_src_negotiation(src_pad)) {
          GST_ERROR_OBJECT(learn_pad, "failed to negotiation request pad");
        } else {
          learn_pad->negotiation = TRUE;
        }
        src_pads = g_list_next(src_pads);
      }
      break;
    }
    case (GST_EVENT_STREAM_START): {
      GST_INFO_OBJECT(learn, "new stream start...");
      res = gst_pad_event_default(pad, parent, event);
      break;
    }
    case (GST_EVENT_TAG): {
      GstTagList* taglist;
      gst_event_parse_tag(event, &taglist);
      if (taglist) {
        GST_INFO_OBJECT(learn, "upstream tag msg: %s", gst_tag_list_to_string(taglist));
      }
    }
    default: {
      res = gst_pad_event_default(pad, parent, event);
      break;
    } 
  }
  return res;
}

static gboolean gst_learn_sink_query(GstPad* pad, GstObject* parent, GstQuery* query) {
  GstLearn* learn = GST_LEARN(parent);
  gboolean res = TRUE;

  switch (GST_QUERY_TYPE(query)) {
    case (GST_QUERY_CAPS): {
      GstCaps* caps = NULL;
      gst_query_parse_caps(query, &caps);
      GST_DEBUG_OBJECT(learn, "upstream negotiation query filter caps %" GST_PTR_FORMAT, caps);

      GstCaps* fixed_caps = gst_static_pad_template_get_caps(&sink_template);
      gst_query_set_caps_result(query, fixed_caps);
      gst_caps_unref(fixed_caps);
      break;
    }
    default: {
      res = gst_pad_query_default(pad, parent, query);
      break;
    }
  }

  return res;
}

static gboolean gst_learn_src_event(GstPad* pad, GstObject* parent, GstEvent* event) {
  GstLearn* learn = GST_LEARN(parent);
  gboolean res = TRUE;

  GST_DEBUG_OBJECT(learn, "src pad recved event: %s", 
      gst_event_type_get_name(GST_EVENT_TYPE(event)));

  switch (GST_EVENT_TYPE(event)) {
    case (GST_EVENT_RECONFIGURE): {
      if (!learn->negotiation) {
        GST_INFO_OBJECT(learn, "wait for sink pad negotiation success");
        break;
      }
      // start to negotiation src pads
      GList* src_pads = GST_ELEMENT_CAST(learn)->srcpads;
      while (src_pads) {
        GstPad* src_pad = GST_PAD_CAST(src_pads->data);
        GstLearnPad* learn_pad = GST_LEARN_PAD(src_pad);
        if (learn_pad->negotiation) {
          src_pads = g_list_next(src_pads);
          continue;
        }
        if (!gst_learn_src_negotiation(src_pad)) {
          GST_ERROR_OBJECT(learn_pad, "failed to negotiation request pad");
        } else {
          learn_pad->negotiation = TRUE;
        }
        src_pads = g_list_next(src_pads);
      }
      break;
    }
    default: {
      res = gst_pad_event_default(pad, parent, event);
      break;
    } 
  }
  return res;
}

static gboolean gst_learn_sink_activate_mode(GstPad* pad, GstObject* parent, 
    GstPadMode mode, gboolean active) {
  gboolean res;
  GstLearn* learn = GST_LEARN(parent);
  switch (mode) {
    case GST_PAD_MODE_PUSH: {
      GST_DEBUG_OBJECT(learn, "activate sink pad to push mode");
      res = TRUE;
      break;
    }
    default: {
      GST_ERROR_OBJECT(learn, "only support push mode on sink pad");
      res = FALSE;
      break;
    }
  }

  return res;
}

static gboolean gst_learn_src_query(GstPad* pad, GstObject* parent, GstQuery* query) {
  GstLearn* learn = GST_LEARN(parent);
  gboolean res = TRUE;

  switch (GST_QUERY_TYPE(query)) {
    default: {
      res = gst_pad_query_default(pad, parent, query);
      break;
    }
  }

  return res;
}

static gboolean gst_learn_src_activate_mode(GstPad * pad, GstObject * parent,
    GstPadMode mode, gboolean active) {
  GstLearn* learn = GST_LEARN(parent);
  gboolean res; 

  switch (mode) {
    case GST_PAD_MODE_PULL: {
      GST_ERROR_OBJECT(learn, "only support activate src pad in push mode"); 
      res = FALSE;
      break;
    }
    case GST_PAD_MODE_PUSH: {
      GST_DEBUG_OBJECT(learn, "activate src pad to push mode");
    }
    default: {
      res = TRUE;
      break;
    }
  }

  return res;
}

static GstStateChangeReturn gst_learn_change_state(GstElement* element, 
    GstStateChange transition) {
  GstLearn* learn = GST_LEARN(element);

  GstStateChangeReturn ret = GST_ELEMENT_CLASS(gst_learn_parent_class)->change_state(element, transition);
  
  return ret;
}

static gboolean gst_learn_append_spspps(GstLearn* learn, GstByteWriter* writer) {
  GstMapInfo codec_data_map;
  if (!gst_buffer_map(learn->codec_data, &codec_data_map, GST_MAP_READ)) {
    GST_ERROR_OBJECT(learn, "map codec data failed, mark negotiation failed");
    learn->negotiation = FALSE;
    return FALSE;
  }

  // skip nal length size bytes
  int offset = 5;
  // get number sps
  int nb_sps = codec_data_map.data[offset] & 0x1f;
  offset++;

  int i = 0;
  for (i = 0; i < nb_sps; ++i) {
    guint16 sps_size = GST_READ_UINT16_BE(codec_data_map.data + offset);
    offset += 2;
    gst_byte_writer_put_uint32_be(writer, 0x00000001);
    gst_byte_writer_put_data(writer, codec_data_map.data + offset, sps_size);
    offset += sps_size;
  }

  // get number pps
  int nb_pps = codec_data_map.data[offset];
  offset++;
  for (i = 0; i < nb_pps; ++i) {
    guint16 pps_size = GST_READ_UINT16_BE(codec_data_map.data + offset);
    offset += 2;
    gst_byte_writer_put_uint32_be(writer, 0x00000001);
    gst_byte_writer_put_data(writer, codec_data_map.data + offset, pps_size);
    offset += pps_size;
  }
  gst_buffer_unmap(learn->codec_data, &codec_data_map);

  return TRUE;
}

enum GstLearnNaluType {
  nalu_type_unknown = 0,
  nalu_type_sps = 1,
  nalu_type_pps = 2,
  nalu_type_sei = 3,
  nalu_type_idr = 4,
  nalu_type_non_idr = 5,
  nalu_type_aud = 6,
};

static enum GstLearnNaluType gst_learn_h264_nal_type(guint8 naltype) {
  if ((naltype & 0x1f) == 1) {
    return nalu_type_non_idr;
  } else if ((naltype & 0x1f) == 5) {
    return nalu_type_idr;
  } else if ((naltype & 0x1f) == 6) {
    return nalu_type_sei;
  } else if ((naltype & 0x1f) == 7) {
    return nalu_type_sps;
  } else if ((naltype & 0x1f) == 8) {
    return nalu_type_pps;
  } else if ((naltype & 0x1f) == 9) {
    return nalu_type_aud; 
  } else {
    return nalu_type_unknown; 
  }
}

static GstBuffer* gst_learn_h264_avcc_to_annexb(GstLearn* learn, GstBuffer* in) {
  int i;
  GstBuffer* out = NULL;
  GstByteWriter writer;

  gst_byte_writer_init_with_size(&writer, gst_buffer_get_size(in) * 2, FALSE);

  GstMapInfo in_map;
  if (!gst_buffer_map(in, &in_map, GST_MAP_READ)) {
    GST_ERROR_OBJECT(learn, "map input buffer failed");
    return out; 
  }

  int buf_offset = 0;
  while (buf_offset < in_map.size) {
    guint32 nal_size = 0;
    switch (learn->nal_length_size) {
      case 1:
        nal_size = GST_READ_UINT8(in_map.data + buf_offset);
        break;
      case 2:
        nal_size = GST_READ_UINT16_BE(in_map.data + buf_offset);
        break;
      case 4:
        nal_size = GST_READ_UINT32_BE(in_map.data + buf_offset);
        break;
      default:
        GST_WARNING_OBJECT(learn, "unsupport nal length size: %d", learn->nal_length_size);
        break;
    }
    buf_offset += learn->nal_length_size;

    enum GstLearnNaluType nal_type = gst_learn_h264_nal_type(*(in_map.data+buf_offset));
    if (nal_type == nalu_type_idr) {
      if (!gst_learn_append_spspps(learn, &writer)) {
        GST_WARNING_OBJECT(learn, "append spspps failed");
      }
    }

    gst_byte_writer_put_uint32_be(&writer, 0x00000001);
    gst_byte_writer_put_data(&writer, in_map.data + buf_offset, MIN(nal_size, in_map.size - buf_offset));
    buf_offset += nal_size;
  }
  gst_buffer_unmap(in, &in_map);

  out = gst_byte_writer_reset_and_get_buffer(&writer);
  gst_buffer_copy_into(out, in, GST_BUFFER_COPY_METADATA, 0, 0);

  return out;
}

static GstFlowReturn gst_learn_chain(GstPad* pad, GstObject* parent, GstBuffer* buffer) {
  GstLearn* learn = GST_LEARN(parent);
  GST_DEBUG_OBJECT(learn, "received buffer %p", buffer);

  if (!learn->negotiation) {
    GST_ERROR_OBJECT(learn, "not negotiation with upstream");
    return GST_FLOW_ERROR;
  }

  GstBuffer* annexb_buffer = gst_learn_h264_avcc_to_annexb(learn, buffer);
  if (!annexb_buffer) {
    GST_WARNING_OBJECT(learn, "convert h264 from avcc to annexb failed");
    return GST_FLOW_OK;
  }

  GstFlowReturn res;
  GList* pads = GST_ELEMENT_CAST(learn)->srcpads;
  while (pads) {
    GstPad* pad = GST_PAD_CAST(pads->data);
    GstLearnPad* src_pad = GST_LEARN_PAD(pad);

    if (!src_pad->negotiation) {
      GST_ERROR_OBJECT(src_pad, "src pad negotiation failed");
      pads = g_list_next(pads);
      continue;
    }

    GST_LOG_OBJECT(pad, "starting to push buffer %p at pts %" GST_TIME_FORMAT
        ", dts %" GST_TIME_FORMAT"", annexb_buffer, 
        GST_TIME_ARGS(GST_BUFFER_PTS(annexb_buffer)), 
        GST_TIME_ARGS(GST_BUFFER_DTS(annexb_buffer)));

    res = gst_pad_push(pad, gst_buffer_ref(annexb_buffer));
    if (res != GST_FLOW_OK) {
      GST_ERROR_OBJECT(pad, "push buffer received error %s", gst_flow_get_name(res));
      gst_mini_object_unref(GST_MINI_OBJECT_CAST(annexb_buffer));
      break;
    }

    pads = g_list_next(pads);
  }
  gst_mini_object_unref(GST_MINI_OBJECT_CAST(annexb_buffer));
  gst_mini_object_unref(GST_MINI_OBJECT_CAST(buffer));

  return res;
}

static GstFlowReturn gst_learn_chain_list (GstPad * pad, GstObject * parent, 
    GstBufferList * list) {
  GST_DEBUG_OBJECT(pad, "received buffer list %p, discard", list);
  gst_mini_object_unref(GST_MINI_OBJECT_CAST(list));
  return GST_FLOW_OK;
}

gboolean gstlearn_plugin_init(GstPlugin* plugin) {
  return gst_element_register(plugin, "learn", 
      GST_RANK_SECONDARY, gst_learn_get_type());
}

3.2 测试代码

测试代码从本地读取 flv 文件,解复用得到 h264 码流后,打包 rtp_over_udp 进行网络输出。
测试程序在启动的时候添加一个 branch,并在 1 秒后再次添加一个 branch,所以可以输出到两个不同的目标端口。

#include <gst/gst.h>

GstElement* pipeline;
GstElement* source;
GstElement* flvdemux;
GstElement* learn;
GstElement* first_queue;
GstElement* first_rtppack;
GstElement* first_udpsink;
GstElement* second_queue;
GstElement* second_rtppack;
GstElement* second_udpsink;

static GMainLoop* main_loop;

gchar* host;
guint16 start_port;

static void pipeline_message(GstBus* _bus, GstMessage* msg, void* data) {
  switch (GST_MESSAGE_TYPE(msg)) {
    case (GST_MESSAGE_STATE_CHANGED): {
      GstState old_state, new_state, pending_state;
      gst_message_parse_state_changed(msg, 
          &old_state, &new_state, &pending_state);
      g_print("bus message: pipeline state changed from %s to %s\n", 
          gst_element_state_get_name(old_state), 
          gst_element_state_get_name(new_state));
      break;
    }
    case (GST_MESSAGE_ASYNC_DONE): {
      g_print("bus message: return async done\n");
      break;
    }
    case (GST_MESSAGE_ERROR): {
      GError* err;
      gchar* debug_info;
      gst_message_parse_error(msg, &err, &debug_info);
      g_print("error received from element %s : %s\n",
	  GST_OBJECT_NAME(msg->src), 
	  err->message);
      g_print("debugging information %s\n", (debug_info ? debug_info : "none"));
      g_clear_error(&err);
      g_free(debug_info);
      break;
    }
    case GST_MESSAGE_EOS:
      g_print ("end-of-stream reached\n");
      break;
    default: {
      g_print("bus message: %s send %s\n", 
          GST_MESSAGE_SRC_NAME(msg), GST_MESSAGE_TYPE_NAME(msg));
      break;
    }
  }
}

gboolean add_new_branch(GstElement** queue, GstElement** rtppack, 
    GstElement** sink, guint rtp_port, const char* name) {
  g_print("\nstart add new branch: %s\n\n", name);

  gchar* tmp_name = g_strdup_printf("%s_queue", name);
  *queue = gst_element_factory_make("queue", tmp_name);
  g_free(tmp_name);
  /**
  g_object_set(*queue, "max-size-time", 0, NULL);
  g_object_set(*queue, "max-size-bytes", 0, NULL);
  g_object_set(*queue, "max-size-buffers", 0, NULL);
  **/

  tmp_name = g_strdup_printf("%s_rtppack", name);
  *rtppack = gst_element_factory_make("rtph264pay", tmp_name);
  g_free(tmp_name);

  tmp_name = g_strdup_printf("%s_sink", name);
  *sink = gst_element_factory_make("udpsink", tmp_name);
  g_free(tmp_name);

  g_object_set(*rtppack, "ssrc", 0x1111, NULL);
  g_object_set(*rtppack, "pt", 96, NULL);
  g_object_set(*rtppack, "config-interval", -1, NULL);
  g_object_set(*rtppack, "aggregate-mode", 1, NULL);

  g_object_set(*sink, "host", host, NULL);
  g_object_set(*sink, "port", rtp_port, NULL);

  gst_bin_add_many(GST_BIN(pipeline), *queue, *rtppack, *sink, NULL);
  if (!gst_element_link_many(*queue, *rtppack, *sink, NULL)) {
    g_printerr("new branch link element failed\n");
    return FALSE;
  }

  GstPad* learn_pad = gst_element_get_request_pad(learn, "src_%u");
  GstPad* queue_pad = gst_element_get_static_pad(*queue, "sink");
  if (gst_pad_link(learn_pad, queue_pad) != GST_PAD_LINK_OK) {
    g_printerr("elements pads link failed\n");
    return FALSE;
  }
  gst_object_unref(learn_pad);
  gst_object_unref(queue_pad);

  gst_element_sync_state_with_parent(*queue);
  gst_element_sync_state_with_parent(*rtppack);
  gst_element_sync_state_with_parent(*sink);

  g_print("new branch: %s add succeed\n", name);

  return TRUE;
}

static gboolean add_second_branch(gpointer userdata) {
  if (!add_new_branch(&second_queue, &second_rtppack, &second_udpsink, start_port+2, "second branch")) {
    g_printerr("add second branch failed\n");
  }
  return FALSE;
}

static void pad_added_handler(GstElement *src, GstPad *new_pad, void* userdata) {
  g_print("received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));

  GstCaps* new_pad_caps = NULL;
  GstPad *sink_pad = gst_element_get_static_pad(learn, "sink");
  // if already linked, we have nothing to do here
  if (gst_pad_is_linked(sink_pad)) {
    g_print("we are already linked. ignoring.\n");
    goto exit;
  }

  // check the new pad's type
  new_pad_caps = gst_pad_get_current_caps(new_pad);
  GstStructure* new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
  const gchar* new_pad_type = gst_structure_get_name(new_pad_struct);
  // if (!g_str_has_prefix (new_pad_type, "audio/mpeg")) 
  if (!g_str_has_prefix(new_pad_type, "video/x-h264")) {
    g_print("it has type '%s' which is not raw audio, ignoring\n", new_pad_type);
    goto exit;
  }

  // try link
  GstPadLinkReturn ret = gst_pad_link(new_pad, sink_pad);
  if (GST_PAD_LINK_FAILED(ret)) {
    g_print("type is '%s' but link failed\n", new_pad_type);
  } else {
    g_print("link succeeded (type '%s')\n", new_pad_type);
  }

exit:
  // unreference the new pad's caps, if we got them
  if (new_pad_caps != NULL)
    gst_caps_unref(new_pad_caps);

  // unreference the sink pad
  gst_object_unref(sink_pad);
}

extern gboolean gstlearn_plugin_init();
void load_learn_plugin() {
  gst_plugin_register_static(
    GST_VERSION_MAJOR, 
    GST_VERSION_MINOR, 
    "learn",
    "gst plugin for learn",
    gstlearn_plugin_init,
    "1.0",
    "LGPL",
    "test just",
    "good",
    "url"
  );
}

int main(int argc, char *argv[]) {
  if (argc < 3) {
    g_print("usage: ./mytest 127.0.0.1 10002\n");
    exit(0);
  }

  // fetch host and port
  host = g_strdup(argv[1]);
  start_port = atoi(argv[2]);

  // initialize gstreamer
  gst_init(&argc, &argv);

  load_learn_plugin();

  pipeline = gst_pipeline_new("test-pipeline");
  source = gst_element_factory_make("filesrc", "file_src");
  flvdemux = gst_element_factory_make("flvdemux", "flvdemux");
  learn = gst_element_factory_make("learn", "mylearn");

  gst_bin_add_many(GST_BIN(pipeline), source, flvdemux, learn, NULL);
  if (!gst_element_link(source, flvdemux)) {
    g_printerr ("elements could not be linked\n");
    return -1;
  }

  // set the uri to play
  g_object_set(source, "location", "./test.flv", NULL);

  // connect to the pad-added signal
  g_signal_connect(flvdemux, "pad-added", G_CALLBACK(pad_added_handler), NULL);

  // add first branch
  if (!add_new_branch(&first_queue, &first_rtppack, &first_udpsink, start_port, "first branch")) {
    g_printerr("add first branch failed\n");
    return -1;
  }

  // add timer for second branch
  guint timeout = 1000;
  guint second_branch_timer_id = g_timeout_add(timeout, add_second_branch, NULL);

  // start playing
  GstStateChangeReturn ret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    g_printerr("unable to set the pipeline to the playing state %d\n", ret);
    return -1;
  }

  GstBus* bus = gst_element_get_bus(pipeline);
  gst_bus_add_signal_watch(bus);
  gint bus_signal_hander = g_signal_connect(bus, "message", G_CALLBACK(pipeline_message), NULL);

  g_print("start main looping...\n");
  main_loop = g_main_loop_new(NULL, FALSE);
  g_main_loop_run(main_loop);

  gst_object_unref(bus);
  gst_element_set_state(pipeline, GST_STATE_NULL);
  gst_object_unref(pipeline);

  return 0;
}

3.3 编译运行

需要先编译出 gstreamer 动态库,然后使用如下 shell 脚本进行编译:

#!/bin/bash

export PKG_CONFIG_PATH=/your_path/gstreamer/lib64/pkgconfig
export GST_PLUGIN_PATH=/your_path/gstreamer/lib64:/your_path/gstreamer/lib64/gstreamer-1.0
export LD_LIBRARY_PATH=/your_path/gstreamer/lib64:/your_path/gstreamer/lib64/gstreamer-1.0:/your_path/gstreamer/lib

gcc learn_test.c gstlearn.c -o mytest `pkg-config --cflags --libs gstreamer-1.0 gstreamer-app-1.0`

运行前,复制上面 LD_LIBRARY_PATH 的内容到终端上,以运行时正确加载动态库。
该测试程序会默认打开当前目录下的 test.flv 文件,文件下载如下(来自 srs 的测试视频文件):
https://files.cnblogs.com/files/blogs/721434/test.zip?t=1657779132

如果使用 ffplay 进行播放,可以新建一个如下内容的 .sdp 文件:

v=0
o=- 0 0 IN IP4 192.168.0.106
s=rtp
c=IN IP4 192.168.0.106
m=video 10002 RTP/AVP 96
a=rtpmap:96 H264/90000
a=fmtp:96 packetization-mode=1

ffplay 播放命令如下:

ffplay -protocol_whitelist "file,rtp,udp,tcp" rtph264_over_udp.sdp

4. 总结

上面的 learn 插件有一些没考虑的问题,比如内存泄漏、多线程下 core 掉、优雅退出等,不过整体不影响作为一个简单示例来进行学习。
后续的文章都会以此插件为参考来进行讲解。

posted @ 2022-07-14 11:21  小夕nike  阅读(638)  评论(0编辑  收藏  举报