diff options
author | Alfred E. Heggestad <aeh@db.org> | 2014-12-25 17:36:37 +0100 |
---|---|---|
committer | Alfred E. Heggestad <aeh@db.org> | 2014-12-25 17:36:37 +0100 |
commit | f55560474869956b374606973e4633206e9bf0da (patch) | |
tree | 21f0e05840a94e5a2598c1b2e3b43ec6e279b594 /modules/gst_video | |
parent | 5e4499b1543f62ba82f9e4f66c921bafe24bf497 (diff) |
gst_video: new vidcodec module using gstreamer
the original code was based on avcodec, and written by
Victor Sergienko and Fadeev Alexander. Later the code was
reviewed by Alfred and cleaned up a bit.
this module can use hardware-encoding of H.264 for
devices that support it (via gstreamer), by tweaking
the pipeline string a bit.
Diffstat (limited to 'modules/gst_video')
-rw-r--r-- | modules/gst_video/encode.c | 534 | ||||
-rw-r--r-- | modules/gst_video/gst_video.c | 64 | ||||
-rw-r--r-- | modules/gst_video/gst_video.h | 32 | ||||
-rw-r--r-- | modules/gst_video/h264.c | 160 | ||||
-rw-r--r-- | modules/gst_video/module.mk | 12 | ||||
-rw-r--r-- | modules/gst_video/sdp.c | 53 |
6 files changed, 855 insertions, 0 deletions
diff --git a/modules/gst_video/encode.c b/modules/gst_video/encode.c new file mode 100644 index 0000000..50b9e48 --- /dev/null +++ b/modules/gst_video/encode.c @@ -0,0 +1,534 @@ +/** + * @file gst_video/encode.c Video codecs using Gstreamer video pipeline + * + * Copyright (C) 2010 - 2013 Creytiv.com + * Copyright (C) 2014 Fadeev Alexander + */ + +#define __USE_POSIX199309 +#define _BSD_SOURCE 1 +#include <stdlib.h> +#include <string.h> +#include <sys/time.h> +#include <unistd.h> +#include <pthread.h> +#include <re.h> +#include <rem.h> +#include <baresip.h> +#include <gst/gst.h> +#include <gst/video/video.h> +#include <gst/app/gstappsrc.h> +#include "gst_video.h" + + +struct videnc_state { + + struct vidsz size; + unsigned fps; + unsigned bitrate; + unsigned pktsize; + + struct { + uint32_t packetization_mode; + uint32_t profile_idc; + uint32_t profile_iop; + uint32_t level_idc; + uint32_t max_fs; + uint32_t max_smbps; + } h264; + + videnc_packet_h *pkth; + void *pkth_arg; + + /* Gstreamer */ + GstElement *pipeline, *source, *sink; + GstBus *bus; + gulong need_data_handler; + gulong enough_data_handler; + gulong new_buffer_handler; + bool gst_inited; + + /* Main loop thread. */ + int run; + pthread_t tid; + + /* Thread synchronization. */ + pthread_mutex_t mutex; + pthread_cond_t wait; + int bwait; +}; + + +static void gst_encoder_close(struct videnc_state *st); + + +static void internal_bus_watch_handler(struct videnc_state *st) +{ + GError *err; + gchar *d; + GstMessage *msg = gst_bus_pop(st->bus); + + if (!msg) { + /* take a nap (300ms) */ + usleep(300 * 1000); + return; + } + + switch (GST_MESSAGE_TYPE(msg)) { + + case GST_MESSAGE_EOS: + + /* XXX decrementing repeat count? */ + + /* Re-start stream */ + gst_element_set_state(st->pipeline, GST_STATE_NULL); + gst_element_set_state(st->pipeline, GST_STATE_PLAYING); + break; + + case GST_MESSAGE_ERROR: + gst_message_parse_error(msg, &err, &d); + + warning("gst_video: Error: %d(%m) message=%s\n", err->code, + err->code, err->message); + warning("gst_video: Debug: %s\n", d); + + g_free(d); + g_error_free(err); + + st->run = FALSE; + break; + + default: + break; + } + + gst_message_unref(msg); +} + + +static void *internal_thread(void *arg) +{ + struct videnc_state *st = arg; + + /* Now set to playing and iterate. */ + debug("gst_video: Setting pipeline to PLAYING\n"); + + gst_element_set_state(st->pipeline, GST_STATE_PLAYING); + + while (st->run) { + internal_bus_watch_handler(st); + } + + debug("gst_video: Pipeline thread was stopped.\n"); + + return NULL; +} + + +static void internal_appsrc_start_feed(GstElement * pipeline, guint size, + struct videnc_state *st) +{ + (void)pipeline; + (void)size; + + if (!st) + return; + + pthread_mutex_lock(&st->mutex); + st->bwait = FALSE; + pthread_cond_signal(&st->wait); + pthread_mutex_unlock(&st->mutex); +} + + +static void internal_appsrc_stop_feed(GstElement * pipeline, + struct videnc_state *st) +{ + (void)pipeline; + + if (!st) + return; + + pthread_mutex_lock(&st->mutex); + st->bwait = TRUE; + pthread_mutex_unlock(&st->mutex); +} + + +/* The appsink has received a buffer */ +static void internal_appsink_new_buffer(GstElement *sink, + struct videnc_state *st) +{ + GstBuffer *buffer; + + if (!st) + return; + + /* Retrieve the buffer */ + g_signal_emit_by_name(sink, "pull-buffer", &buffer); + + if (buffer) { + guint8 *data = GST_BUFFER_DATA(buffer); + guint size = GST_BUFFER_SIZE(buffer); + + h264_packetize(data, size, st->pktsize, + st->pkth, st->pkth_arg); + + gst_buffer_unref(buffer); + } +} + + +/** + * Set up the Gstreamer pipeline. Appsrc gets raw frames, and appsink takes + * encoded frames. + * + * The pipeline looks like this: + * + * <pre> + * .--------. .-----------. .----------. + * | appsrc | | x264enc | | appsink | + * | .----| |----. .---| |----. | + * | |src |-->|sink| |src|-->|sink|-----+-->handoff + * | '----| |----' '---| |----' | handler + * '--------' '-----------' '----------' + * </pre> + */ +static int gst_encoder_init(struct videnc_state *st, int width, int height, + int framerate, int bitrate) +{ + GError* gerror = NULL; + char pipeline[1024]; + int err = 0; + + gst_encoder_close(st); + + snprintf(pipeline, sizeof(pipeline), + "appsrc name=source is-live=TRUE block=TRUE do-timestamp=TRUE ! " + "videoparse width=%d height=%d format=i420 framerate=%d/1 ! " + "x264enc byte-stream=TRUE rc-lookahead=0" + " sync-lookahead=0 bitrate=%d ! " + "appsink name=sink emit-signals=TRUE drop=TRUE", + width, height, framerate, bitrate / 1000 /* kbit/s */); + + debug("gst_video: format: yu12 = yuv420p = i420\n"); + + /* Initialize pipeline. */ + st->pipeline = gst_parse_launch(pipeline, &gerror); + if (gerror) { + warning("gst_video: launch error: %s: %s\n", + gerror->message, pipeline); + err = gerror->code; + g_error_free(gerror); + goto out; + } + + st->source = gst_bin_get_by_name(GST_BIN(st->pipeline), "source"); + st->sink = gst_bin_get_by_name(GST_BIN(st->pipeline), "sink"); + if (!st->source || !st->sink) { + warning("gst_video: failed to get source or sink" + " pipeline elements\n"); + err = ENOMEM; + goto out; + } + + /* Configure appsource */ + st->need_data_handler = g_signal_connect(st->source, "need-data", + G_CALLBACK(internal_appsrc_start_feed), st); + st->enough_data_handler = g_signal_connect(st->source, "enough-data", + G_CALLBACK(internal_appsrc_stop_feed), st); + + /* Configure appsink. */ + st->new_buffer_handler = g_signal_connect(st->sink, "new-buffer", + G_CALLBACK(internal_appsink_new_buffer), st); + + /********************* Misc **************************/ + + /* Bus watch */ + st->bus = gst_pipeline_get_bus(GST_PIPELINE(st->pipeline)); + + /********************* Thread **************************/ + + /* Synchronization primitives. */ + pthread_mutex_init(&st->mutex, NULL); + pthread_cond_init(&st->wait, NULL); + st->bwait = FALSE; + + err = gst_element_set_state(st->pipeline, GST_STATE_PLAYING); + if (GST_STATE_CHANGE_FAILURE == err) { + g_warning("set state returned GST_STATE_CHANGE_FAILUER\n"); + } + + /* Launch thread with gstreamer loop. */ + st->run = true; + err = pthread_create(&st->tid, NULL, internal_thread, st); + if (err) { + st->run = false; + goto out; + } + + st->gst_inited = true; + + out: + return err; +} + + +static int gst_video_push(struct videnc_state *st, const uint8_t *src, + size_t size) +{ + GstBuffer *buffer; + int ret = 0; + + if (!st) { + return EINVAL; + } + + if (!size) { + warning("gst_video: push: eos returned %d at %d\n", + ret, __LINE__); + gst_app_src_end_of_stream((GstAppSrc *)st->source); + return ret; + } + + /* Wait "start feed". */ + pthread_mutex_lock(&st->mutex); + if (st->bwait) { +#define WAIT_TIME_SECONDS 5 + struct timespec ts; + struct timeval tp; + gettimeofday(&tp, NULL); + ts.tv_sec = tp.tv_sec; + ts.tv_nsec = tp.tv_usec * 1000; + ts.tv_sec += WAIT_TIME_SECONDS; + /* Wait. */ + ret = pthread_cond_timedwait(&st->wait, &st->mutex, &ts); + if (ETIMEDOUT == ret) { + warning("gst_video: Raw frame is lost" + " because of timeout\n"); + return ret; + } + } + pthread_mutex_unlock(&st->mutex); + + /* Create a new empty buffer */ + buffer = gst_buffer_new(); + GST_BUFFER_MALLOCDATA(buffer) = (guint8 *)src; + GST_BUFFER_SIZE(buffer) = (guint)size; + GST_BUFFER_DATA(buffer) = GST_BUFFER_MALLOCDATA(buffer); + + ret = gst_app_src_push_buffer((GstAppSrc *)st->source, buffer); + + if (ret != GST_FLOW_OK) { + warning("gst_video: push buffer returned" + " %d for %d bytes \n", ret, size); + return ret; + } + + return ret; +} + + +static void gst_encoder_close(struct videnc_state *st) +{ + if (!st) + return; + + st->gst_inited = false; + + /* Remove asynchronous callbacks to prevent using gst_video_t + context ("st") after releasing. */ + if (st->source) { + g_signal_handler_disconnect(st->source, + st->need_data_handler); + g_signal_handler_disconnect(st->source, + st->enough_data_handler); + } + if (st->sink) { + g_signal_handler_disconnect(st->sink, st->new_buffer_handler); + } + + /* Stop thread. */ + if (st->run) { + st->run = false; + pthread_join(st->tid, NULL); + } + + if (st->source) { + gst_object_unref(GST_OBJECT(st->source)); + st->source = NULL; + } + if (st->sink) { + gst_object_unref(GST_OBJECT(st->sink)); + st->sink = NULL; + } + if (st->bus) { + gst_object_unref(GST_OBJECT(st->bus)); + st->bus = NULL; + } + + if (st->pipeline) { + gst_element_set_state(st->pipeline, GST_STATE_NULL); + gst_object_unref(GST_OBJECT(st->pipeline)); + st->pipeline = NULL; + } +} + + +static void encode_destructor(void *arg) +{ + struct videnc_state *st = arg; + + gst_encoder_close(st); +} + + +static int decode_sdpparam_h264(struct videnc_state *st, const struct pl *name, + const struct pl *val) +{ + if (0 == pl_strcasecmp(name, "packetization-mode")) { + st->h264.packetization_mode = pl_u32(val); + + if (st->h264.packetization_mode != 0) { + warning("gst_video: illegal packetization-mode %u\n", + st->h264.packetization_mode); + return EPROTO; + } + } + else if (0 == pl_strcasecmp(name, "profile-level-id")) { + struct pl prof = *val; + if (prof.l != 6) { + warning("gst_video: invalid profile-level-id (%r)\n", + val); + return EPROTO; + } + + prof.l = 2; + st->h264.profile_idc = pl_x32(&prof); prof.p += 2; + st->h264.profile_iop = pl_x32(&prof); prof.p += 2; + st->h264.level_idc = pl_x32(&prof); + } + else if (0 == pl_strcasecmp(name, "max-fs")) { + st->h264.max_fs = pl_u32(val); + } + else if (0 == pl_strcasecmp(name, "max-smbps")) { + st->h264.max_smbps = pl_u32(val); + } + + return 0; +} + + +static void param_handler(const struct pl *name, const struct pl *val, + void *arg) +{ + struct videnc_state *st = arg; + + (void)decode_sdpparam_h264(st, name, val); +} + + +int gst_video_encode_update(struct videnc_state **vesp, + const struct vidcodec *vc, + struct videnc_param *prm, const char *fmtp) +{ + struct videnc_state *ves; + int err = 0; + + if (!vesp || !vc || !prm) + return EINVAL; + + ves = *vesp; + + if (!ves) { + + ves = mem_zalloc(sizeof(*ves), encode_destructor); + if (!ves) + return ENOMEM; + + *vesp = ves; + } + else { + if (ves->gst_inited && (ves->bitrate != prm->bitrate || + ves->pktsize != prm->pktsize || + ves->fps != prm->fps)) { + gst_encoder_close(ves); + } + } + + if (str_isset(fmtp)) { + struct pl sdp_fmtp; + + pl_set_str(&sdp_fmtp, fmtp); + + fmt_param_apply(&sdp_fmtp, param_handler, ves); + } + + ves->bitrate = prm->bitrate; + ves->pktsize = prm->pktsize; + ves->fps = prm->fps; + + info("gst_video: video encoder %s: %d fps, %d bit/s, pktsize=%u\n", + vc->name, prm->fps, prm->bitrate, prm->pktsize); + + return err; +} + + +int gst_video_encode(struct videnc_state *st, bool update, + const struct vidframe *frame, + videnc_packet_h *pkth, void *arg) +{ + uint8_t *data; + size_t size; + int height; + int err; + + if (!st || !frame || !pkth || frame->fmt != VID_FMT_YUV420P) + return EINVAL; + + if (!st->gst_inited || !vidsz_cmp(&st->size, &frame->size)) { + + err = gst_encoder_init(st, frame->size.w, frame->size.h, + st->fps, st->bitrate); + + if (err) { + warning("gst_video codec: gst_video_alloc failed\n"); + return err; + } + + st->pkth = pkth; + st->pkth_arg = arg; + + /* To detect if requested size was changed. */ + st->size = frame->size; + } + + if (update) { + debug("gst_video: gstreamer picture update" + ", it's not implemented...\n"); + } + + height = frame->size.h; + + /* NOTE: I420 (YUV420P): hardcoded. */ + size = frame->linesize[0] * height + + frame->linesize[1] * height * 0.5 + + frame->linesize[2] * height * 0.5; + + data = malloc(size); /* XXX: memory-leak ? */ + if (!data) + return ENOMEM; + + size = 0; + + /* XXX: avoid memcpy here ? */ + memcpy(&data[size], frame->data[0], frame->linesize[0] * height); + size += frame->linesize[0] * height; + memcpy(&data[size], frame->data[1], frame->linesize[1] * height * 0.5); + size += frame->linesize[1] * height * 0.5; + memcpy(&data[size], frame->data[2], frame->linesize[2] * height * 0.5); + size += frame->linesize[2] * height * 0.5; + + return gst_video_push(st, data, size); +} diff --git a/modules/gst_video/gst_video.c b/modules/gst_video/gst_video.c new file mode 100644 index 0000000..b04dbe4 --- /dev/null +++ b/modules/gst_video/gst_video.c @@ -0,0 +1,64 @@ +/** + * @file gst_video.c Video codecs using Gstreamer + * + * Copyright (C) 2010 Creytiv.com + * Copyright (C) 2014 Fadeev Alexander + */ + +#include <re.h> +#include <rem.h> +#include <baresip.h> +#include <gst/gst.h> +#include "gst_video.h" + + +/** + * @defgroup gst_video gst_video + * + * This module implements video codecs using Gstreamer. + * + * Currently only H.264 encoding is supported, but this can be extended + * if needed. No decoding is done by this module, so that must be done by + * another video-codec module. + * + * Thanks to Victor Sergienko and Fadeev Alexander for the + * initial version, which was based on avcodec module. + */ + + +static struct vidcodec h264 = { + .name = "H264", + .variant = "packetization-mode=0", + .encupdh = gst_video_encode_update, + .ench = gst_video_encode, + .fmtp_ench = gst_video_fmtp_enc, + .fmtp_cmph = gst_video_fmtp_cmp, +}; + + +static int module_init(void) +{ + gst_init(NULL, NULL); + + vidcodec_register(&h264); + + info("gst_video: using gstreamer H.264 encoder\n"); + + return 0; +} + + +static int module_close(void) +{ + vidcodec_unregister(&h264); + + return 0; +} + + +EXPORT_SYM const struct mod_export DECL_EXPORTS(gst_video) = { + "gst_video", + "vidcodec", + module_init, + module_close +}; diff --git a/modules/gst_video/gst_video.h b/modules/gst_video/gst_video.h new file mode 100644 index 0000000..b77e979 --- /dev/null +++ b/modules/gst_video/gst_video.h @@ -0,0 +1,32 @@ +/** + * @file gst_video.h Gstreamer video pipeline -- internal API + * + * Copyright (C) 2010 - 2014 Creytiv.com + * Copyright (C) 2014 Fadeev Alexander + */ + + +/* Encode */ +struct videnc_state; + +int gst_video_encode_update(struct videnc_state **vesp, + const struct vidcodec *vc, + struct videnc_param *prm, const char *fmtp); +int gst_video_encode(struct videnc_state *st, bool update, + const struct vidframe *frame, + videnc_packet_h *pkth, void *arg); + + +/* SDP */ +uint32_t gst_video_h264_packetization_mode(const char *fmtp); +int gst_video_fmtp_enc(struct mbuf *mb, const struct sdp_format *fmt, + bool offer, void *arg); +bool gst_video_fmtp_cmp(const char *fmtp1, const char *fmtp2, void *data); + + +/* H.264 */ +extern const uint8_t h264_level_idc; + +int h264_packetize(const uint8_t *buf, size_t len, + size_t pktsize, + videnc_packet_h *pkth, void *arg); diff --git a/modules/gst_video/h264.c b/modules/gst_video/h264.c new file mode 100644 index 0000000..6f314e8 --- /dev/null +++ b/modules/gst_video/h264.c @@ -0,0 +1,160 @@ +/** + * @file gst_video/h264.c H.264 Packetization + * + * Copyright (C) 2010 Creytiv.com + */ +#include <string.h> +#include <re.h> +#include <rem.h> +#include <baresip.h> +#include "gst_video.h" + + +/** NAL unit types (RFC 3984, Table 1) */ +enum { + H264_NAL_UNKNOWN = 0, + /* 1-23 NAL unit Single NAL unit packet per H.264 */ + H264_NAL_SLICE = 1, + H264_NAL_DPA = 2, + H264_NAL_DPB = 3, + H264_NAL_DPC = 4, + H264_NAL_IDR_SLICE = 5, + H264_NAL_SEI = 6, + H264_NAL_SPS = 7, + H264_NAL_PPS = 8, + H264_NAL_AUD = 9, + H264_NAL_END_SEQUENCE = 10, + H264_NAL_END_STREAM = 11, + H264_NAL_FILLER_DATA = 12, + H264_NAL_SPS_EXT = 13, + H264_NAL_AUX_SLICE = 19, + + H264_NAL_STAP_A = 24, /**< Single-time aggregation packet */ + H264_NAL_STAP_B = 25, /**< Single-time aggregation packet */ + H264_NAL_MTAP16 = 26, /**< Multi-time aggregation packet */ + H264_NAL_MTAP24 = 27, /**< Multi-time aggregation packet */ + H264_NAL_FU_A = 28, /**< Fragmentation unit */ + H264_NAL_FU_B = 29, /**< Fragmentation unit */ +}; + + +const uint8_t h264_level_idc = 0x0c; + + +/* + * Find the NAL start sequence in a H.264 byte stream + * + * @note: copied from ffmpeg source + */ +static const uint8_t *h264_find_startcode(const uint8_t *p, const uint8_t *end) +{ + const uint8_t *a = p + 4 - ((long)p & 3); + + for (end -= 3; p < a && p < end; p++ ) { + if (p[0] == 0 && p[1] == 0 && p[2] == 1) + return p; + } + + for (end -= 3; p < end; p += 4) { + uint32_t x = *(const uint32_t*)(void *)p; + if ( (x - 0x01010101) & (~x) & 0x80808080 ) { + if (p[1] == 0 ) { + if ( p[0] == 0 && p[2] == 1 ) + return p; + if ( p[2] == 0 && p[3] == 1 ) + return p+1; + } + if ( p[3] == 0 ) { + if ( p[2] == 0 && p[4] == 1 ) + return p+2; + if ( p[4] == 0 && p[5] == 1 ) + return p+3; + } + } + } + + for (end += 3; p < end; p++) { + if (p[0] == 0 && p[1] == 0 && p[2] == 1) + return p; + } + + return end + 3; +} + + +static int rtp_send_data(const uint8_t *hdr, size_t hdr_sz, + const uint8_t *buf, size_t sz, bool eof, + videnc_packet_h *pkth, void *arg) +{ + return pkth(eof, hdr, hdr_sz, buf, sz, arg); +} + + +static int h264_nal_send(bool first, bool last, + bool marker, uint32_t ihdr, const uint8_t *buf, + size_t size, size_t maxsz, + videnc_packet_h *pkth, void *arg) +{ + uint8_t hdr = (uint8_t)ihdr; + int err = 0; + + if (first && last && size <= maxsz) { + err = rtp_send_data(&hdr, 1, buf, size, marker, + pkth, arg); + } + else { + uint8_t fu_hdr[2]; + const uint8_t type = hdr & 0x1f; + const uint8_t nri = hdr & 0x60; + const size_t sz = maxsz - 2; + + fu_hdr[0] = nri | H264_NAL_FU_A; + fu_hdr[1] = first ? (1<<7 | type) : type; + + while (size > sz) { + err |= rtp_send_data(fu_hdr, 2, buf, sz, false, + pkth, arg); + buf += sz; + size -= sz; + fu_hdr[1] &= ~(1 << 7); + } + + if (last) + fu_hdr[1] |= 1<<6; /* end bit */ + + err |= rtp_send_data(fu_hdr, 2, buf, size, marker && last, + pkth, arg); + } + + return err; +} + + +int h264_packetize(const uint8_t *buf, size_t len, + size_t pktsize, + videnc_packet_h *pkth, void *arg) +{ + const uint8_t *start = buf; + const uint8_t *end = start + len; + const uint8_t *r; + int err = 0; + + r = h264_find_startcode(buf, end); + + while (r < end) { + const uint8_t *r1; + + /* skip zeros */ + while (!*(r++)) + ; + + r1 = h264_find_startcode(r, end); + + err |= h264_nal_send(true, true, (r1 >= end), r[0], + r+1, r1-r-1, pktsize, + pkth, arg); + r = r1; + } + + return err; +} diff --git a/modules/gst_video/module.mk b/modules/gst_video/module.mk new file mode 100644 index 0000000..64e0bad --- /dev/null +++ b/modules/gst_video/module.mk @@ -0,0 +1,12 @@ +# +# module.mk +# +# Copyright (C) 2010 Creytiv.com +# + +MOD := gst_video +$(MOD)_SRCS += gst_video.c h264.c encode.c sdp.c +$(MOD)_LFLAGS += `pkg-config --libs gstreamer-0.10 gstreamer-app-0.10` +CFLAGS += `pkg-config --cflags gstreamer-0.10 gstreamer-app-0.10` + +include mk/mod.mk diff --git a/modules/gst_video/sdp.c b/modules/gst_video/sdp.c new file mode 100644 index 0000000..d489de7 --- /dev/null +++ b/modules/gst_video/sdp.c @@ -0,0 +1,53 @@ +/** + * @file gst_video/sdp.c H.264 SDP Functions + * + * Copyright (C) 2010 Creytiv.com + */ + +#include <re.h> +#include <baresip.h> +#include "gst_video.h" + + +uint32_t gst_video_h264_packetization_mode(const char *fmtp) +{ + struct pl pl, mode; + + if (!fmtp) + return 0; + + pl_set_str(&pl, fmtp); + + if (fmt_param_get(&pl, "packetization-mode", &mode)) + return pl_u32(&mode); + + return 0; +} + + +int gst_video_fmtp_enc(struct mbuf *mb, const struct sdp_format *fmt, + bool offer, void *arg) +{ + struct vidcodec *vc = arg; + const uint8_t profile_idc = 0x42; /* baseline profile */ + const uint8_t profile_iop = 0x80; + (void)offer; + + if (!mb || !fmt || !vc) + return 0; + + return mbuf_printf(mb, "a=fmtp:%s" + " packetization-mode=0" + ";profile-level-id=%02x%02x%02x" + "\r\n", + fmt->id, profile_idc, profile_iop, h264_level_idc); +} + + +bool gst_video_fmtp_cmp(const char *fmtp1, const char *fmtp2, void *data) +{ + (void)data; + + return gst_video_h264_packetization_mode(fmtp1) == + gst_video_h264_packetization_mode(fmtp2); +} |