vaapi: Always free parameter buffers after vaEndPicture() with libva2
This is an ABI change in libva2: previously the Intel driver had this behaviour and it was implemented as a driver quirk, but now it is part of the specification so all drivers must do it.
This commit is contained in:
parent
f0a978a519
commit
bfc83acfd6
4 changed files with 6 additions and 6 deletions
|
@ -392,14 +392,14 @@ static int vaapi_encode_issue(AVCodecContext *avctx,
|
|||
err = AVERROR(EIO);
|
||||
// vaRenderPicture() has been called here, so we should not destroy
|
||||
// the parameter buffers unless separate destruction is required.
|
||||
if (ctx->hwctx->driver_quirks &
|
||||
if (HAVE_VAAPI_1 || ctx->hwctx->driver_quirks &
|
||||
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS)
|
||||
goto fail;
|
||||
else
|
||||
goto fail_at_end;
|
||||
}
|
||||
|
||||
if (ctx->hwctx->driver_quirks &
|
||||
if (HAVE_VAAPI_1 || ctx->hwctx->driver_quirks &
|
||||
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
|
||||
for (i = 0; i < pic->nb_param_buffers; i++) {
|
||||
vas = vaDestroyBuffer(ctx->hwctx->display,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue