On 26/04/18 06:34, Xiang, Haihao wrote: > On Sun, 2018-04-22 at 16:29 +0100, Mark Thompson wrote: >> This was previously accessible only via AVCodecContext.compression_level >> for all encoders except H.264 where it was also a private option. This >> makes it an explicit option everywhere. >> --- >> libavcodec/vaapi_encode.c | 87 >> ++++++++++++++++++++++++++--------------- >> - >> libavcodec/vaapi_encode.h | 8 ++++ >> libavcodec/vaapi_encode_h264.c | 6 --- >> 3 files changed, 62 insertions(+), 39 deletions(-) >> >> diff --git a/libavcodec/vaapi_encode.c b/libavcodec/vaapi_encode.c >> index 9c39bcdeff..a5eb072843 100644 >> --- a/libavcodec/vaapi_encode.c >> +++ b/libavcodec/vaapi_encode.c >> @@ -1381,6 +1381,52 @@ static av_cold int >> vaapi_encode_init_rate_control(AVCodecContext *avctx) >> return 0; >> } >> >> +static av_cold int vaapi_encode_init_quality(AVCodecContext *avctx) >> +{ >> +#if VA_CHECK_VERSION(0, 36, 0) >> + VAAPIEncodeContext *ctx = avctx->priv_data; >> + VAStatus vas; >> + VAConfigAttrib attr = { VAConfigAttribEncQualityRange }; >> + >> + vas = vaGetConfigAttributes(ctx->hwctx->display, >> + ctx->va_profile, >> + ctx->va_entrypoint, >> + &attr, 1); >> + if (vas != VA_STATUS_SUCCESS) { >> + av_log(avctx, AV_LOG_ERROR, "Failed to query quality " >> + "config attribute: %d (%s).\n", vas, vaErrorStr(vas)); >> + return AVERROR_EXTERNAL; >> + } >> + >> + if (attr.value == VA_ATTRIB_NOT_SUPPORTED) { >> + if (ctx->quality != 0) { >> + av_log(avctx, AV_LOG_WARNING, "Quality attribute is not " >> + "supported: will use default quality level.\n"); >> + } >> + } else { >> + if (ctx->quality > attr.value) { >> + av_log(avctx, AV_LOG_WARNING, "Invalid quality level: " >> + "valid range is 0-%d, using %d.\n", >> + attr.value, attr.value); >> + ctx->quality = attr.value; >> + } >> + >> + ctx->quality_params.misc.type = >> + VAEncMiscParameterTypeQualityLevel; >> + ctx->quality_params.quality.quality_level = >> + ctx->quality; >> + >> + vaapi_encode_add_global_param(avctx, &ctx->quality_params.misc, >> + sizeof(ctx->quality_params)); >> + } >> +#else >> + av_log(avctx, AV_LOG_WARNING, "The encode quality option is " >> + "not supported with this VAAPI version.\n"); >> +#endif >> + >> + return 0; >> +} >> + >> static void vaapi_encode_free_output_buffer(void *opaque, >> uint8_t *data) >> { >> @@ -1562,6 +1608,14 @@ av_cold int ff_vaapi_encode_init(AVCodecContext >> *avctx) >> if (err < 0) >> goto fail; >> >> + if (!ctx->quality) >> + ctx->quality = avctx->compression_level; > > > 0 means the default quality level in use in VAAPI, so does it make sense to > assign avctx->compression_level which is not FF_COMPRESSION_DEFAULT to ctx- >> quality?
Right, the default for the quality option should also be -1 (meaning unset), because otherwise it will always try to set it (including on drivers which don't support the option). >> + if (ctx->quality >= 0) { >> + err = vaapi_encode_init_quality(avctx); >> + if (err < 0) >> + goto fail; >> + } >> + >> vas = vaCreateConfig(ctx->hwctx->display, >> ctx->va_profile, ctx->va_entrypoint, >> ctx->config_attributes, ctx->nb_config_attributes, >> @@ -1611,39 +1665,6 @@ av_cold int ff_vaapi_encode_init(AVCodecContext >> *avctx) >> goto fail; >> } >> >> - if (avctx->compression_level >= 0) { >> -#if VA_CHECK_VERSION(0, 36, 0) >> - VAConfigAttrib attr = { VAConfigAttribEncQualityRange }; >> - >> - vas = vaGetConfigAttributes(ctx->hwctx->display, >> - ctx->va_profile, >> - ctx->va_entrypoint, >> - &attr, 1); >> - if (vas != VA_STATUS_SUCCESS) { >> - av_log(avctx, AV_LOG_WARNING, "Failed to query quality " >> - "attribute: will use default compression level.\n"); >> - } else { >> - if (avctx->compression_level > attr.value) { >> - av_log(avctx, AV_LOG_WARNING, "Invalid compression " >> - "level: valid range is 0-%d, using %d.\n", >> - attr.value, attr.value); >> - avctx->compression_level = attr.value; >> - } >> - >> - ctx->quality_params.misc.type = >> - VAEncMiscParameterTypeQualityLevel; >> - ctx->quality_params.quality.quality_level = >> - avctx->compression_level; >> - >> - vaapi_encode_add_global_param(avctx, &ctx->quality_params.misc, >> - sizeof(ctx->quality_params)); >> - } >> -#else >> - av_log(avctx, AV_LOG_WARNING, "The encode compression level " >> - "option is not supported with this VAAPI version.\n"); >> -#endif >> - } >> - >> ctx->input_order = 0; >> ctx->output_delay = avctx->max_b_frames; >> ctx->decode_delay = 1; >> diff --git a/libavcodec/vaapi_encode.h b/libavcodec/vaapi_encode.h >> index 9e0826b30e..35d48eff2f 100644 >> --- a/libavcodec/vaapi_encode.h >> +++ b/libavcodec/vaapi_encode.h >> @@ -111,6 +111,10 @@ typedef struct VAAPIEncodeContext { >> >> // Global options. >> >> + // Encoding quality level. >> + // (Also exposed via AVCodecContext.compression_level.) >> + int quality; >> + > > AVCodecContext.compression_level just takes effects when quality is 0 in the > current implementation, it would be better to mention this limitation in the > comment. "is 0" here is meaning "was not expicitly set" - if it's -1 instead as noted above then this is more obvious. Setting both doesn't make sense because they mean the same thing - quality is clearer for the ffmpeg utility, while compression_level is simpler for libavcodec users. If you still think this is confusing then it may be better to drop the "quality" name entirely and only use the standard option? >> // Use low power encoding mode. >> int low_power; >> >> @@ -302,6 +306,10 @@ int ff_vaapi_encode_close(AVCodecContext *avctx); >> >> >> #define VAAPI_ENCODE_COMMON_OPTIONS \ >> + { "quality", \ >> + "Set encode quality (trades off against speed, higher is faster)", \ >> + OFFSET(common.quality), AV_OPT_TYPE_INT, \ >> + { .i64 = 0 }, 0, INT_MAX, FLAGS }, \ >> { "low_power", \ >> "Use low-power encoding mode (only available on some platforms; " \ >> "may not support all encoding features)", \ >> diff --git a/libavcodec/vaapi_encode_h264.c b/libavcodec/vaapi_encode_h264.c >> index 49dc5229b5..a20cf15c1e 100644 >> --- a/libavcodec/vaapi_encode_h264.c >> +++ b/libavcodec/vaapi_encode_h264.c >> @@ -51,7 +51,6 @@ typedef struct VAAPIEncodeH264Context { >> >> // User options. >> int qp; >> - int quality; >> int coder; >> int aud; >> int sei; >> @@ -828,9 +827,6 @@ static av_cold int >> vaapi_encode_h264_configure(AVCodecContext *avctx) >> av_assert0(0 && "Invalid RC mode."); >> } >> >> - if (avctx->compression_level == FF_COMPRESSION_DEFAULT) >> - avctx->compression_level = priv->quality; >> - >> if (priv->sei & SEI_IDENTIFIER) { >> const char *lavc = LIBAVCODEC_IDENT; >> const char *vaapi = VA_VERSION_S; >> @@ -970,8 +966,6 @@ static const AVOption vaapi_encode_h264_options[] = { >> >> { "qp", "Constant QP (for P-frames; scaled by qfactor/qoffset for I/B)", >> OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 20 }, 0, 52, FLAGS }, >> - { "quality", "Set encode quality (trades off against speed, higher is >> faster)", >> - OFFSET(quality), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 8, FLAGS }, >> { "coder", "Entropy coder type", >> OFFSET(coder), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, FLAGS, "coder" }, >> { "cavlc", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = 0 }, INT_MIN, >> INT_MAX, FLAGS, "coder" }, Thanks, - Mark _______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org http://ffmpeg.org/mailman/listinfo/ffmpeg-devel