From: wm4 <nfx...@googlemail.com> This adds a new API, which allows the API user to query the required AVHWFramesContext parameters. This also reduces code duplication across the hwaccels by introducing ff_decode_get_hw_frames_ctx(), which uses the new API function.
TODO: - reindent that block of code in vaapi_decode.c - fix all hwaccels other than h264 - cuda support? - add APIchanges and version bump --- libavcodec/avcodec.h | 77 +++++++++++++++++++++ libavcodec/decode.c | 67 ++++++++++++++++++ libavcodec/decode.h | 9 +++ libavcodec/dxva2.c | 51 ++++++-------- libavcodec/dxva2_h264.c | 3 + libavcodec/dxva2_hevc.c | 3 + libavcodec/dxva2_internal.h | 3 + libavcodec/vaapi_decode.c | 162 ++++++++++++++++++++------------------------ libavcodec/vaapi_decode.h | 5 +- libavcodec/vaapi_h264.c | 1 + libavcodec/vaapi_hevc.c | 1 + libavcodec/vdpau.c | 58 +++++++--------- libavcodec/vdpau_h264.c | 1 + libavcodec/vdpau_internal.h | 2 + 14 files changed, 293 insertions(+), 150 deletions(-) diff --git a/libavcodec/avcodec.h b/libavcodec/avcodec.h index 162f1abe4b..9ccac05ec3 100644 --- a/libavcodec/avcodec.h +++ b/libavcodec/avcodec.h @@ -2990,6 +2990,16 @@ typedef struct AVHWAccel { * Internal hwaccel capabilities. */ int caps_internal; + + /** + * Fill the given hw_frames context with current codec parameters. Called + * from get_format. Refer to avcodec_fill_hw_frames_parameters() for + * details. + * + * This CAN be called before AVHWAccel.init is called, and you must assume + * that avctx->hwaccel_priv_data is invalid. + */ + int (*frame_params)(AVCodecContext *avctx, AVBufferRef *hw_frames_ctx); } AVHWAccel; /** @@ -3436,6 +3446,73 @@ int avcodec_open2(AVCodecContext *avctx, const AVCodec *codec, AVDictionary **op */ int avcodec_close(AVCodecContext *avctx); +/** + * Fill the given hw_frames struct with values adequate for hardware decoding. + * This is meant to get called from the get_format callback, and as a helper for + * preparing a AVHWFramesContext for AVCodecContext.hw_frames_ctx. This API is + * for decoding with certain hardware acceleration modes/APIs only. Calling this + * function is not a requirement. An API user can setup + * AVCodecContext.hw_frames_ctx fully manually, or set + * AVCodecContext.hw_device_ctx (which will make the hwaccel implementation + * setup hw_frames_ctx fully automatically). Using fully manual setup is + * discouraged. + * + * There are a number of requirements for calling this function: + * + * - It must be called from get_format with the same avctx parameter if you + * expect it to fill correct parameters. Calling it outside of get_format is + * not allowed, and can trigger undefined behavior. + * - If the decoder does not support this functionality, AVERROR(ENOENT) will + * be returned. This happens only if the format is a software format, or if + * the decoder does not support custom allocated AVHWFramesContext properly. + * - The hw_pix_fmt must be one of the choices suggested by get_format. If the + * user decides to use a hw_frames context prepared with this API function, + * the user must return the same hw_pix_fmt from get_format. + * - The hw_frames context must be allocated from a device type that supports + * the given hw_pix_fmt. + * - The passed hw_frames context must not have been initialized yet. + * - The API function may overwrite any fields in the hw_frames context. It will + * not actually initialize the context. A user calls this API function to get + * basic parameters, and can afterwards modify the parameters as needed. + * - After calling this API function, it is the user's responsibility to + * initialize the hw_frames context, and to set AVCodecContext.hw_frames_ctx + * to it. If done, this must be done before returning from get_format (this + * is implied by the normal AVCodecContext.hw_frames_ctx API rules). + * - The hw_frames parameters may change every time time get_format is called. + * Also, AVCodecContext.hw_frames_ctx is reset before get_format. So you are + * inherently required to go through this process again on every get_format + * call. + * - It is perfectly possible to call this function without actually using + * the resulting hw_frames context. One use-case might be trying to reuse a + * previously initialized hw_frames_ctx, and calling this API function only + * to test whether the required frame parameters have changed. + * + * The function will set at least the following fields on hw_frames (potentially + * more, depending on hwaccel API): + * + * - Set the format field to hw_pix_fmt. + * - Set the sw_format field to the most suited and most versatile format. (An + * implication is that this will prefer generic formats over opaque formats + * with arbitrary restrictions, if possible.) + * - Set the width/height fields to the coded frame size, rounded up to the + * API-specific minimum alignment. + * - Only _if_ the hwaccel requires a pre-allocated pool: set the initial_pool_size + * field to the number of maximum reference surfaces possible with the codec, + * plus 1 surface for the user to work (meaning the user can safely reference + * at most 1 decoded surface at a time), plus additional buffering introduced + * by frame threading. If the hwaccel does not require pre-allocation, the + * field is left to 0, and the codec will allocate new surfaces on demand + * during decoding. + * + * @param avctx The context which is currently calling get_format, and which + * implicitly contains all state needed for filling hw_frames_ctx + * properly. + * @param hw_pix_fmt The hwaccel format you are going to return from get_format. + * @param hw_frames_ctx A reference to an _uninitialized_ AVHWFramesContext. + * Fields will be set to values required for decoding. + */ +int avcodec_fill_hw_frames_parameters(AVCodecContext *avctx, enum AVPixelFormat hw_pix_fmt, AVBufferRef *hw_frames_ctx); + /** * Free all allocated data in the given subtitle struct. * diff --git a/libavcodec/decode.c b/libavcodec/decode.c index c76ee6696a..bfd84ef11a 100644 --- a/libavcodec/decode.c +++ b/libavcodec/decode.c @@ -669,6 +669,73 @@ static AVHWAccel *find_hwaccel(enum AVCodecID codec_id, return NULL; } +int ff_decode_get_hw_frames_ctx(AVCodecContext *avctx, + enum AVHWDeviceType dev_type) +{ + AVHWDeviceContext *device_ctx; + AVHWFramesContext *frames_ctx; + int ret; + + if (!avctx->hwaccel) + return AVERROR(ENOSYS); + + if (avctx->hw_frames_ctx) + return 0; + if (!avctx->hw_device_ctx) { + av_log(avctx, AV_LOG_ERROR, "A hardware frames or device context is " + "required for hardware accelerated decoding.\n"); + return AVERROR(EINVAL); + } + + device_ctx = (AVHWDeviceContext *)avctx->hw_device_ctx->data; + if (device_ctx->type != dev_type) { + av_log(avctx, AV_LOG_ERROR, "Device type %s expected for hardware " + "decoding, but got %s.\n", av_hwdevice_get_type_name(dev_type), + av_hwdevice_get_type_name(device_ctx->type)); + return AVERROR(EINVAL); + } + + avctx->hw_frames_ctx = av_hwframe_ctx_alloc(avctx->hw_device_ctx); + if (!avctx->hw_frames_ctx) + return AVERROR(ENOMEM); + + ret = avcodec_fill_hw_frames_parameters(avctx, avctx->hwaccel->pix_fmt, avctx->hw_frames_ctx); + if (ret < 0) { + av_buffer_unref(&avctx->hw_frames_ctx); + return ret; + } + + frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; + + + if (frames_ctx->initial_pool_size) { + // We guarantee 4 base work surfaces. The function above guarantees 1 + // (the absolute minimum), so add the missing count. + frames_ctx->initial_pool_size += 3; + + // Add an additional surface per thread is frame threading is enabled. + if (avctx->active_thread_type & FF_THREAD_FRAME) + frames_ctx->initial_pool_size += avctx->thread_count; + } + + ret = av_hwframe_ctx_init(avctx->hw_frames_ctx); + if (ret < 0) { + av_buffer_unref(&avctx->hw_frames_ctx); + return ret; + } + + return 0; +} + +int avcodec_fill_hw_frames_parameters(AVCodecContext *avctx, enum AVPixelFormat hw_pix_fmt, AVBufferRef *hw_frames_ctx) +{ + AVHWAccel *hwa = find_hwaccel(avctx->codec_id, hw_pix_fmt); + if (!hwa || !hwa->frame_params) + return AVERROR(ENOENT); + + return hwa->frame_params(avctx, hw_frames_ctx); +} + static int setup_hwaccel(AVCodecContext *avctx, const enum AVPixelFormat fmt, const char *name) diff --git a/libavcodec/decode.h b/libavcodec/decode.h index 235f355f82..37b2e45c63 100644 --- a/libavcodec/decode.h +++ b/libavcodec/decode.h @@ -23,6 +23,7 @@ #include "libavutil/buffer.h" #include "libavutil/frame.h" +#include "libavutil/hwcontext.h" #include "avcodec.h" @@ -70,4 +71,12 @@ int ff_decode_get_packet(AVCodecContext *avctx, AVPacket *pkt); void ff_decode_bsfs_uninit(AVCodecContext *avctx); +/** + * Make sure avctx.hw_frames_ctx is set. If it's not set, the function will + * try to allocate it from hw_device_ctx. If that is not possible, an error + * message is printed, and an error code is returned. + */ +int ff_decode_get_hw_frames_ctx(AVCodecContext *avctx, + enum AVHWDeviceType dev_type); + #endif /* AVCODEC_DECODE_H */ diff --git a/libavcodec/dxva2.c b/libavcodec/dxva2.c index 9ceb6236d4..a09c9683c3 100644 --- a/libavcodec/dxva2.c +++ b/libavcodec/dxva2.c @@ -29,6 +29,7 @@ #include "libavutil/time.h" #include "avcodec.h" +#include "decode.h" #include "dxva2_internal.h" /* define all the GUIDs used directly here, @@ -572,14 +573,20 @@ static void ff_dxva2_unlock(AVCodecContext *avctx) #endif } -// This must work before the decoder is created. -// This somehow needs to be exported to the user. -static void dxva_adjust_hwframes(AVCodecContext *avctx, AVHWFramesContext *frames_ctx) +int ff_dxva2_common_frame_params(AVCodecContext *avctx, + AVBufferRef *hw_frames_ctx) { - FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx); + AVHWFramesContext *frames_ctx = (AVHWFramesContext *)hw_frames_ctx->data; + AVHWDeviceContext *device_ctx = frames_ctx->device_ctx; int surface_alignment, num_surfaces; - frames_ctx->format = sctx->pix_fmt; + if (device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) { + frames_ctx->format = AV_PIX_FMT_DXVA2_VLD; + } else if (device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) { + frames_ctx->format = AV_PIX_FMT_D3D11; + } else { + return AVERROR(EINVAL); + } /* decoding MPEG-2 requires additional alignment on some Intel GPUs, but it causes issues for H.264 on certain AMD GPUs..... */ @@ -627,12 +634,16 @@ static void dxva_adjust_hwframes(AVCodecContext *avctx, AVHWFramesContext *frame frames_hwctx->BindFlags |= D3D11_BIND_DECODER; } #endif + + return 0; } int ff_dxva2_decode_init(AVCodecContext *avctx) { FFDXVASharedContext *sctx = DXVA_SHARED_CONTEXT(avctx); - AVHWFramesContext *frames_ctx = NULL; + AVHWFramesContext *frames_ctx; + enum AVHWDeviceType dev_type = avctx->hwaccel->pix_fmt == AV_PIX_FMT_DXVA2_VLD + ? AV_HWDEVICE_TYPE_DXVA2 : AV_HWDEVICE_TYPE_D3D11VA; int ret = 0; // Old API. @@ -642,32 +653,14 @@ int ff_dxva2_decode_init(AVCodecContext *avctx) // (avctx->pix_fmt is not updated yet at this point) sctx->pix_fmt = avctx->hwaccel->pix_fmt; - if (!avctx->hw_frames_ctx && !avctx->hw_device_ctx) { - av_log(avctx, AV_LOG_ERROR, "Either a hw_frames_ctx or a hw_device_ctx needs to be set for hardware decoding.\n"); - return AVERROR(EINVAL); - } - - if (avctx->hw_frames_ctx) { - frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; - } else { - avctx->hw_frames_ctx = av_hwframe_ctx_alloc(avctx->hw_device_ctx); - if (!avctx->hw_frames_ctx) - return AVERROR(ENOMEM); - - frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; - - dxva_adjust_hwframes(avctx, frames_ctx); - - ret = av_hwframe_ctx_init(avctx->hw_frames_ctx); - if (ret < 0) - goto fail; - } + ret = ff_decode_get_hw_frames_ctx(avctx, dev_type); + if (ret < 0) + return ret; + frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; sctx->device_ctx = frames_ctx->device_ctx; - if (frames_ctx->format != sctx->pix_fmt || - !((sctx->pix_fmt == AV_PIX_FMT_D3D11 && CONFIG_D3D11VA) || - (sctx->pix_fmt == AV_PIX_FMT_DXVA2_VLD && CONFIG_DXVA2))) { + if (frames_ctx->format != sctx->pix_fmt) { av_log(avctx, AV_LOG_ERROR, "Invalid pixfmt for hwaccel!\n"); ret = AVERROR(EINVAL); goto fail; diff --git a/libavcodec/dxva2_h264.c b/libavcodec/dxva2_h264.c index de0885058a..8ce8c358c5 100644 --- a/libavcodec/dxva2_h264.c +++ b/libavcodec/dxva2_h264.c @@ -523,6 +523,7 @@ AVHWAccel ff_h264_dxva2_hwaccel = { .start_frame = dxva2_h264_start_frame, .decode_slice = dxva2_h264_decode_slice, .end_frame = dxva2_h264_end_frame, + .frame_params = ff_dxva2_common_frame_params, .frame_priv_data_size = sizeof(struct dxva2_picture_context), .priv_data_size = sizeof(FFDXVASharedContext), }; @@ -539,6 +540,7 @@ AVHWAccel ff_h264_d3d11va_hwaccel = { .start_frame = dxva2_h264_start_frame, .decode_slice = dxva2_h264_decode_slice, .end_frame = dxva2_h264_end_frame, + .frame_params = ff_dxva2_common_frame_params, .frame_priv_data_size = sizeof(struct dxva2_picture_context), .priv_data_size = sizeof(FFDXVASharedContext), }; @@ -555,6 +557,7 @@ AVHWAccel ff_h264_d3d11va2_hwaccel = { .start_frame = dxva2_h264_start_frame, .decode_slice = dxva2_h264_decode_slice, .end_frame = dxva2_h264_end_frame, + .frame_params = ff_dxva2_common_frame_params, .frame_priv_data_size = sizeof(struct dxva2_picture_context), .priv_data_size = sizeof(FFDXVASharedContext), }; diff --git a/libavcodec/dxva2_hevc.c b/libavcodec/dxva2_hevc.c index 4bff26d6a8..1d665f07d1 100644 --- a/libavcodec/dxva2_hevc.c +++ b/libavcodec/dxva2_hevc.c @@ -432,6 +432,7 @@ AVHWAccel ff_hevc_dxva2_hwaccel = { .start_frame = dxva2_hevc_start_frame, .decode_slice = dxva2_hevc_decode_slice, .end_frame = dxva2_hevc_end_frame, + .frame_params = ff_dxva2_common_frame_params, .frame_priv_data_size = sizeof(struct hevc_dxva2_picture_context), .priv_data_size = sizeof(FFDXVASharedContext), }; @@ -448,6 +449,7 @@ AVHWAccel ff_hevc_d3d11va_hwaccel = { .start_frame = dxva2_hevc_start_frame, .decode_slice = dxva2_hevc_decode_slice, .end_frame = dxva2_hevc_end_frame, + .frame_params = ff_dxva2_common_frame_params, .frame_priv_data_size = sizeof(struct hevc_dxva2_picture_context), .priv_data_size = sizeof(FFDXVASharedContext), }; @@ -464,6 +466,7 @@ AVHWAccel ff_hevc_d3d11va2_hwaccel = { .start_frame = dxva2_hevc_start_frame, .decode_slice = dxva2_hevc_decode_slice, .end_frame = dxva2_hevc_end_frame, + .frame_params = ff_dxva2_common_frame_params, .frame_priv_data_size = sizeof(struct hevc_dxva2_picture_context), .priv_data_size = sizeof(FFDXVASharedContext), }; diff --git a/libavcodec/dxva2_internal.h b/libavcodec/dxva2_internal.h index 901cc11144..42ff346226 100644 --- a/libavcodec/dxva2_internal.h +++ b/libavcodec/dxva2_internal.h @@ -156,6 +156,9 @@ int ff_dxva2_decode_init(AVCodecContext *avctx); int ff_dxva2_decode_uninit(AVCodecContext *avctx); +int ff_dxva2_common_frame_params(AVCodecContext *avctx, + AVBufferRef *hw_frames_ctx); + int ff_dxva2_is_d3d11(const AVCodecContext *avctx); #endif /* AVCODEC_DXVA2_INTERNAL_H */ diff --git a/libavcodec/vaapi_decode.c b/libavcodec/vaapi_decode.c index a63c4c62ec..5e4eef6b35 100644 --- a/libavcodec/vaapi_decode.c +++ b/libavcodec/vaapi_decode.c @@ -21,6 +21,7 @@ #include "libavutil/pixdesc.h" #include "avcodec.h" +#include "decode.h" #include "internal.h" #include "vaapi_decode.h" @@ -270,10 +271,15 @@ static const struct { #undef MAP }; -static int vaapi_decode_make_config(AVCodecContext *avctx) +/* + * Set *va_config and the frames_ref fields from the current codec parameters + * in avctx. + */ +static int vaapi_decode_make_config(AVCodecContext *avctx, + AVBufferRef *device_ref, + VAConfigID *va_config, + AVBufferRef *frames_ref) { - VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data; - AVVAAPIHWConfig *hwconfig = NULL; AVHWFramesConstraints *constraints = NULL; VAStatus vas; @@ -283,13 +289,16 @@ static int vaapi_decode_make_config(AVCodecContext *avctx) int profile_count, exact_match, alt_profile; const AVPixFmtDescriptor *sw_desc, *desc; + AVHWDeviceContext *device = (AVHWDeviceContext*)device_ref->data; + AVVAAPIDeviceContext *hwctx = device->hwctx; + codec_desc = avcodec_descriptor_get(avctx->codec_id); if (!codec_desc) { err = AVERROR(EINVAL); goto fail; } - profile_count = vaMaxNumProfiles(ctx->hwctx->display); + profile_count = vaMaxNumProfiles(hwctx->display); profile_list = av_malloc_array(profile_count, sizeof(VAProfile)); if (!profile_list) { @@ -297,7 +306,7 @@ static int vaapi_decode_make_config(AVCodecContext *avctx) goto fail; } - vas = vaQueryConfigProfiles(ctx->hwctx->display, + vas = vaQueryConfigProfiles(hwctx->display, profile_list, &profile_count); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to query profiles: " @@ -355,12 +364,9 @@ static int vaapi_decode_make_config(AVCodecContext *avctx) } } - ctx->va_profile = profile; - ctx->va_entrypoint = VAEntrypointVLD; - - vas = vaCreateConfig(ctx->hwctx->display, ctx->va_profile, - ctx->va_entrypoint, NULL, 0, - &ctx->va_config); + vas = vaCreateConfig(hwctx->display, profile, + VAEntrypointVLD, NULL, 0, + va_config); if (vas != VA_STATUS_SUCCESS) { av_log(avctx, AV_LOG_ERROR, "Failed to create decode " "configuration: %d (%s).\n", vas, vaErrorStr(vas)); @@ -368,20 +374,15 @@ static int vaapi_decode_make_config(AVCodecContext *avctx) goto fail; } - hwconfig = av_hwdevice_hwconfig_alloc(avctx->hw_device_ctx ? - avctx->hw_device_ctx : - ctx->frames->device_ref); + hwconfig = av_hwdevice_hwconfig_alloc(device_ref); if (!hwconfig) { err = AVERROR(ENOMEM); goto fail; } - hwconfig->config_id = ctx->va_config; + hwconfig->config_id = *va_config; constraints = - av_hwdevice_get_hwframe_constraints(avctx->hw_device_ctx ? - avctx->hw_device_ctx : - ctx->frames->device_ref, - hwconfig); + av_hwdevice_get_hwframe_constraints(device_ref, hwconfig); if (!constraints) { err = AVERROR(ENOMEM); goto fail; @@ -407,11 +408,20 @@ static int vaapi_decode_make_config(AVCodecContext *avctx) goto fail; } + if (frames_ref) { + AVHWFramesContext *frames = (AVHWFramesContext *)frames_ref->data; + + frames->width = avctx->coded_width; + frames->height = avctx->coded_height; + frames->format = AV_PIX_FMT_VAAPI; + + // TODO: reindent below + // Find the first format in the list which matches the expected // bit depth and subsampling. If none are found (this can happen // when 10-bit streams are decoded to 8-bit surfaces, for example) // then just take the first format on the list. - ctx->surface_format = constraints->valid_sw_formats[0]; + frames->sw_format = constraints->valid_sw_formats[0]; sw_desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt); for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) { desc = av_pix_fmt_desc_get(constraints->valid_sw_formats[i]); @@ -425,30 +435,27 @@ static int vaapi_decode_make_config(AVCodecContext *avctx) } if (j < desc->nb_components) continue; - ctx->surface_format = constraints->valid_sw_formats[i]; + frames->sw_format = constraints->valid_sw_formats[i]; break; } - // Start with at least four surfaces. - ctx->surface_count = 4; + frames->initial_pool_size = 1; // Add per-codec number of surfaces used for storing reference frames. switch (avctx->codec_id) { case AV_CODEC_ID_H264: case AV_CODEC_ID_HEVC: - ctx->surface_count += 16; + frames->initial_pool_size += 16; break; case AV_CODEC_ID_VP9: - ctx->surface_count += 8; + frames->initial_pool_size += 8; break; case AV_CODEC_ID_VP8: - ctx->surface_count += 3; + frames->initial_pool_size += 3; break; default: - ctx->surface_count += 2; + frames->initial_pool_size += 2; + } } - // Add an additional surface per thread is frame threading is enabled. - if (avctx->active_thread_type & FF_THREAD_FRAME) - ctx->surface_count += avctx->thread_count; av_hwframe_constraints_free(&constraints); av_freep(&hwconfig); @@ -458,14 +465,38 @@ static int vaapi_decode_make_config(AVCodecContext *avctx) fail: av_hwframe_constraints_free(&constraints); av_freep(&hwconfig); - if (ctx->va_config != VA_INVALID_ID) { - vaDestroyConfig(ctx->hwctx->display, ctx->va_config); - ctx->va_config = VA_INVALID_ID; + if (*va_config != VA_INVALID_ID) { + vaDestroyConfig(hwctx->display, *va_config); + *va_config = VA_INVALID_ID; } av_freep(&profile_list); return err; } +int ff_vaapi_common_frame_params(AVCodecContext *avctx, + AVBufferRef *hw_frames_ctx) +{ + AVHWFramesContext *hw_frames = (AVHWFramesContext *)hw_frames_ctx->data; + AVHWDeviceContext *device_ctx = hw_frames->device_ctx; + AVVAAPIDeviceContext *hwctx; + VAConfigID va_config = VA_INVALID_ID; + int err; + + if (device_ctx->type != AV_HWDEVICE_TYPE_VAAPI) + return AVERROR(EINVAL); + hwctx = device_ctx->hwctx; + + err = vaapi_decode_make_config(avctx, hw_frames->device_ref, &va_config, + hw_frames_ctx); + if (err) + return err; + + if (va_config != VA_INVALID_ID) + vaDestroyConfig(hwctx->display, va_config); + + return 0; +} + int ff_vaapi_decode_init(AVCodecContext *avctx) { VAAPIDecodeContext *ctx = avctx->internal->hwaccel_priv_data; @@ -502,36 +533,8 @@ int ff_vaapi_decode_init(AVCodecContext *avctx) ctx->hwctx->driver_quirks = AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS; - } else -#endif - if (avctx->hw_frames_ctx) { - // This structure has a shorter lifetime than the enclosing - // AVCodecContext, so we inherit the references from there - // and do not need to make separate ones. - - ctx->frames = (AVHWFramesContext*)avctx->hw_frames_ctx->data; - ctx->hwfc = ctx->frames->hwctx; - ctx->device = ctx->frames->device_ctx; - ctx->hwctx = ctx->device->hwctx; - - } else if (avctx->hw_device_ctx) { - ctx->device = (AVHWDeviceContext*)avctx->hw_device_ctx->data; - ctx->hwctx = ctx->device->hwctx; - - if (ctx->device->type != AV_HWDEVICE_TYPE_VAAPI) { - av_log(avctx, AV_LOG_ERROR, "Device supplied for VAAPI " - "decoding must be a VAAPI device (not %d).\n", - ctx->device->type); - err = AVERROR(EINVAL); - goto fail; - } - - } else { - av_log(avctx, AV_LOG_ERROR, "A hardware device or frames context " - "is required for VAAPI decoding.\n"); - err = AVERROR(EINVAL); - goto fail; } +#endif #if FF_API_VAAPI_CONTEXT if (ctx->have_old_context) { @@ -543,34 +546,19 @@ int ff_vaapi_decode_init(AVCodecContext *avctx) } else { #endif - err = vaapi_decode_make_config(avctx); - if (err) + err = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_VAAPI); + if (err < 0) goto fail; - if (!avctx->hw_frames_ctx) { - avctx->hw_frames_ctx = av_hwframe_ctx_alloc(avctx->hw_device_ctx); - if (!avctx->hw_frames_ctx) { - err = AVERROR(ENOMEM); - goto fail; - } - ctx->frames = (AVHWFramesContext*)avctx->hw_frames_ctx->data; + ctx->frames = (AVHWFramesContext*)avctx->hw_frames_ctx->data; + ctx->hwfc = ctx->frames->hwctx; + ctx->device = ctx->frames->device_ctx; + ctx->hwctx = ctx->device->hwctx; - ctx->frames->format = AV_PIX_FMT_VAAPI; - ctx->frames->width = avctx->coded_width; - ctx->frames->height = avctx->coded_height; - - ctx->frames->sw_format = ctx->surface_format; - ctx->frames->initial_pool_size = ctx->surface_count; - - err = av_hwframe_ctx_init(avctx->hw_frames_ctx); - if (err < 0) { - av_log(avctx, AV_LOG_ERROR, "Failed to initialise internal " - "frames context: %d.\n", err); - goto fail; - } - - ctx->hwfc = ctx->frames->hwctx; - } + err = vaapi_decode_make_config(avctx, ctx->frames->device_ref, + &ctx->va_config, avctx->hw_frames_ctx); + if (err) + goto fail; vas = vaCreateContext(ctx->hwctx->display, ctx->va_config, avctx->coded_width, avctx->coded_height, diff --git a/libavcodec/vaapi_decode.h b/libavcodec/vaapi_decode.h index 0ff400e34c..e195e863a0 100644 --- a/libavcodec/vaapi_decode.h +++ b/libavcodec/vaapi_decode.h @@ -53,8 +53,6 @@ typedef struct VAAPIDecodePicture { } VAAPIDecodePicture; typedef struct VAAPIDecodeContext { - VAProfile va_profile; - VAEntrypoint va_entrypoint; VAConfigID va_config; VAContextID va_context; @@ -96,4 +94,7 @@ int ff_vaapi_decode_cancel(AVCodecContext *avctx, int ff_vaapi_decode_init(AVCodecContext *avctx); int ff_vaapi_decode_uninit(AVCodecContext *avctx); +int ff_vaapi_common_frame_params(AVCodecContext *avctx, + AVBufferRef *hw_frames_ctx); + #endif /* AVCODEC_VAAPI_DECODE_H */ diff --git a/libavcodec/vaapi_h264.c b/libavcodec/vaapi_h264.c index f765523005..0a5c0dfc76 100644 --- a/libavcodec/vaapi_h264.c +++ b/libavcodec/vaapi_h264.c @@ -399,6 +399,7 @@ AVHWAccel ff_h264_vaapi_hwaccel = { .frame_priv_data_size = sizeof(VAAPIDecodePicture), .init = &ff_vaapi_decode_init, .uninit = &ff_vaapi_decode_uninit, + .frame_params = &ff_vaapi_common_frame_params, .priv_data_size = sizeof(VAAPIDecodeContext), .caps_internal = HWACCEL_CAP_ASYNC_SAFE, }; diff --git a/libavcodec/vaapi_hevc.c b/libavcodec/vaapi_hevc.c index 4b4d8782ac..085d84142d 100644 --- a/libavcodec/vaapi_hevc.c +++ b/libavcodec/vaapi_hevc.c @@ -434,6 +434,7 @@ AVHWAccel ff_hevc_vaapi_hwaccel = { .frame_priv_data_size = sizeof(VAAPIDecodePictureHEVC), .init = ff_vaapi_decode_init, .uninit = ff_vaapi_decode_uninit, + .frame_params = ff_vaapi_common_frame_params, .priv_data_size = sizeof(VAAPIDecodeContext), .caps_internal = HWACCEL_CAP_ASYNC_SAFE, }; diff --git a/libavcodec/vdpau.c b/libavcodec/vdpau.c index 68d0813f65..fd346aec4b 100644 --- a/libavcodec/vdpau.c +++ b/libavcodec/vdpau.c @@ -24,6 +24,7 @@ #include <limits.h> #include "avcodec.h" +#include "decode.h" #include "internal.h" #include "h264dec.h" #include "vc1.h" @@ -100,6 +101,25 @@ int av_vdpau_get_surface_parameters(AVCodecContext *avctx, return 0; } +int ff_vdpau_common_frame_params(AVCodecContext *avctx, + AVBufferRef *hw_frames_ctx) +{ + AVHWFramesContext *hw_frames = (AVHWFramesContext*)hw_frames_ctx->data; + VdpChromaType type; + uint32_t width; + uint32_t height; + + if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height)) + return AVERROR(ENOSYS); + + hw_frames->format = AV_PIX_FMT_VDPAU; + hw_frames->sw_format = avctx->sw_pix_fmt; + hw_frames->width = width; + hw_frames->height = height; + + return 0; +} + int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, int level) { @@ -115,6 +135,7 @@ int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, VdpChromaType type; uint32_t width; uint32_t height; + int ret; vdctx->width = UINT32_MAX; vdctx->height = UINT32_MAX; @@ -142,41 +163,14 @@ int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile, type != VDP_CHROMA_TYPE_420) return AVERROR(ENOSYS); } else { - AVHWFramesContext *frames_ctx = NULL; + AVHWFramesContext *frames_ctx; AVVDPAUDeviceContext *dev_ctx; - // We assume the hw_frames_ctx always survives until ff_vdpau_common_uninit - // is called. This holds true as the user is not allowed to touch - // hw_device_ctx, or hw_frames_ctx after get_format (and ff_get_format - // itself also uninits before unreffing hw_frames_ctx). - if (avctx->hw_frames_ctx) { - frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; - } else if (avctx->hw_device_ctx) { - int ret; - - avctx->hw_frames_ctx = av_hwframe_ctx_alloc(avctx->hw_device_ctx); - if (!avctx->hw_frames_ctx) - return AVERROR(ENOMEM); - - frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; - frames_ctx->format = AV_PIX_FMT_VDPAU; - frames_ctx->sw_format = avctx->sw_pix_fmt; - frames_ctx->width = avctx->coded_width; - frames_ctx->height = avctx->coded_height; - - ret = av_hwframe_ctx_init(avctx->hw_frames_ctx); - if (ret < 0) { - av_buffer_unref(&avctx->hw_frames_ctx); - return ret; - } - } - - if (!frames_ctx) { - av_log(avctx, AV_LOG_ERROR, "A hardware frames context is " - "required for VDPAU decoding.\n"); - return AVERROR(EINVAL); - } + ret = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_VDPAU); + if (ret < 0) + return ret; + frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; dev_ctx = frames_ctx->device_ctx->hwctx; vdctx->device = dev_ctx->device; diff --git a/libavcodec/vdpau_h264.c b/libavcodec/vdpau_h264.c index a18941848a..8edbc44021 100644 --- a/libavcodec/vdpau_h264.c +++ b/libavcodec/vdpau_h264.c @@ -273,6 +273,7 @@ AVHWAccel ff_h264_vdpau_hwaccel = { .frame_priv_data_size = sizeof(struct vdpau_picture_context), .init = vdpau_h264_init, .uninit = ff_vdpau_common_uninit, + .frame_params = ff_vdpau_common_frame_params, .priv_data_size = sizeof(VDPAUContext), .caps_internal = HWACCEL_CAP_ASYNC_SAFE, }; diff --git a/libavcodec/vdpau_internal.h b/libavcodec/vdpau_internal.h index a0eb46c48e..8194a9ce36 100644 --- a/libavcodec/vdpau_internal.h +++ b/libavcodec/vdpau_internal.h @@ -119,5 +119,7 @@ int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame, int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx); int ff_vdpau_add_buffer(struct vdpau_picture_context *pic, const uint8_t *buf, uint32_t buf_size); +int ff_vdpau_common_frame_params(AVCodecContext *avctx, + AVBufferRef *hw_frames_ctx); #endif /* AVCODEC_VDPAU_INTERNAL_H */ -- 2.14.1 _______________________________________________ libav-devel mailing list libav-devel@libav.org https://lists.libav.org/mailman/listinfo/libav-devel