The Intel driver unhelpfully injects SEI messages which the user
didn't ask for into the stream when in constant-bitrate mode.  This
change disables that "feature" by writing a zero-length SEI packed
header which the driver will then use instead of its version.
---
 libavcodec/vaapi_encode_h264.c | 20 ++++++++++++++++++++
 1 file changed, 20 insertions(+)

diff --git a/libavcodec/vaapi_encode_h264.c b/libavcodec/vaapi_encode_h264.c
index 347dc90..366a008 100644
--- a/libavcodec/vaapi_encode_h264.c
+++ b/libavcodec/vaapi_encode_h264.c
@@ -487,6 +487,24 @@ static int 
vaapi_encode_h264_write_slice_header(AVCodecContext *avctx,
                                                         tmp, header_len);
 }

+static int vaapi_encode_h264_write_extra_header(AVCodecContext *avctx,
+                                                VAAPIEncodePicture *pic,
+                                                int index, int *type,
+                                                char *data, size_t *data_len)
+{
+    if (index == 0) {
+        // The Intel driver will inject an unhelpful SEI message if we
+        // don't provide one, so just give it an empty buffer to chew on
+        // harmlessly.
+        *type     = VAEncPackedHeaderH264_SEI;
+        *data_len = 0;
+        return 0;
+
+    } else {
+        return AVERROR_EOF;
+    }
+}
+
 static int vaapi_encode_h264_init_sequence_params(AVCodecContext *avctx)
 {
     VAAPIEncodeContext                 *ctx = avctx->priv_data;
@@ -877,6 +895,8 @@ static VAAPIEncodeType vaapi_encode_type_h264 = {

     .slice_header_type     = VAEncPackedHeaderH264_Slice,
     .write_slice_header    = &vaapi_encode_h264_write_slice_header,
+
+    .write_extra_header    = &vaapi_encode_h264_write_extra_header,
 };

 static av_cold int vaapi_encode_h264_init(AVCodecContext *avctx)
-- 
2.8.0.rc3

_______________________________________________
libav-devel mailing list
libav-devel@libav.org
https://lists.libav.org/mailman/listinfo/libav-devel

Reply via email to