Skip to content
Snippets Groups Projects
Commit defbb8bc authored by Mark Thompson's avatar Mark Thompson
Browse files

vaapi_vp9: Convert to use the new VAAPI hwaccel code

parent adb54e59
No related branches found
No related tags found
No related merge requests found
...@@ -21,135 +21,146 @@ ...@@ -21,135 +21,146 @@
*/ */
#include "libavutil/pixdesc.h" #include "libavutil/pixdesc.h"
#include "vaapi_internal.h" #include "vaapi_decode.h"
#include "vp9.h" #include "vp9.h"
static void fill_picture_parameters(AVCodecContext *avctx, static VASurfaceID vaapi_vp9_surface_id(const VP9Frame *vf)
const VP9SharedContext *h,
VADecPictureParameterBufferVP9 *pp)
{ {
if (vf)
return ff_vaapi_get_surface_id(vf->tf.f);
else
return VA_INVALID_SURFACE;
}
static int vaapi_vp9_start_frame(AVCodecContext *avctx,
av_unused const uint8_t *buffer,
av_unused uint32_t size)
{
const VP9SharedContext *h = avctx->priv_data;
VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
VADecPictureParameterBufferVP9 pic_param;
const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(avctx->sw_pix_fmt); const AVPixFmtDescriptor *pixdesc = av_pix_fmt_desc_get(avctx->sw_pix_fmt);
int i; int err, i;
pp->frame_width = avctx->width; pic->output_surface = vaapi_vp9_surface_id(&h->frames[CUR_FRAME]);
pp->frame_height = avctx->height;
pic_param = (VADecPictureParameterBufferVP9) {
pp->frame_header_length_in_bytes = h->h.uncompressed_header_size; .frame_width = avctx->width,
pp->first_partition_size = h->h.compressed_header_size; .frame_height = avctx->height,
pp->profile = h->h.profile; .pic_fields.bits = {
pp->bit_depth = h->h.bpp; .subsampling_x = pixdesc->log2_chroma_w,
.subsampling_y = pixdesc->log2_chroma_h,
pp->filter_level = h->h.filter.level; .frame_type = !h->h.keyframe,
pp->sharpness_level = h->h.filter.sharpness; .show_frame = !h->h.invisible,
pp->log2_tile_rows = h->h.tiling.log2_tile_rows; .error_resilient_mode = h->h.errorres,
pp->log2_tile_columns = h->h.tiling.log2_tile_cols; .intra_only = h->h.intraonly,
.allow_high_precision_mv = h->h.keyframe ? 0 : h->h.highprecisionmvs,
pp->pic_fields.bits.subsampling_x = pixdesc->log2_chroma_w; .mcomp_filter_type = h->h.filtermode ^ (h->h.filtermode <= 1),
pp->pic_fields.bits.subsampling_y = pixdesc->log2_chroma_h; .frame_parallel_decoding_mode = h->h.parallelmode,
pp->pic_fields.bits.frame_type = !h->h.keyframe; .reset_frame_context = h->h.resetctx,
pp->pic_fields.bits.show_frame = !h->h.invisible; .refresh_frame_context = h->h.refreshctx,
pp->pic_fields.bits.error_resilient_mode = h->h.errorres; .frame_context_idx = h->h.framectxid,
pp->pic_fields.bits.intra_only = h->h.intraonly;
pp->pic_fields.bits.allow_high_precision_mv = h->h.keyframe ? 0 : h->h.highprecisionmvs; .segmentation_enabled = h->h.segmentation.enabled,
pp->pic_fields.bits.mcomp_filter_type = h->h.filtermode ^ (h->h.filtermode <= 1); .segmentation_temporal_update = h->h.segmentation.temporal,
pp->pic_fields.bits.frame_parallel_decoding_mode = h->h.parallelmode; .segmentation_update_map = h->h.segmentation.update_map,
pp->pic_fields.bits.reset_frame_context = h->h.resetctx;
pp->pic_fields.bits.refresh_frame_context = h->h.refreshctx; .last_ref_frame = h->h.refidx[0],
pp->pic_fields.bits.frame_context_idx = h->h.framectxid; .last_ref_frame_sign_bias = h->h.signbias[0],
.golden_ref_frame = h->h.refidx[1],
pp->pic_fields.bits.segmentation_enabled = h->h.segmentation.enabled; .golden_ref_frame_sign_bias = h->h.signbias[1],
pp->pic_fields.bits.segmentation_temporal_update = h->h.segmentation.temporal; .alt_ref_frame = h->h.refidx[2],
pp->pic_fields.bits.segmentation_update_map = h->h.segmentation.update_map; .alt_ref_frame_sign_bias = h->h.signbias[2],
.lossless_flag = h->h.lossless,
pp->pic_fields.bits.last_ref_frame = h->h.refidx[0]; },
pp->pic_fields.bits.last_ref_frame_sign_bias = h->h.signbias[0];
pp->pic_fields.bits.golden_ref_frame = h->h.refidx[1]; .filter_level = h->h.filter.level,
pp->pic_fields.bits.golden_ref_frame_sign_bias = h->h.signbias[1]; .sharpness_level = h->h.filter.sharpness,
pp->pic_fields.bits.alt_ref_frame = h->h.refidx[2]; .log2_tile_rows = h->h.tiling.log2_tile_rows,
pp->pic_fields.bits.alt_ref_frame_sign_bias = h->h.signbias[2]; .log2_tile_columns = h->h.tiling.log2_tile_cols,
pp->pic_fields.bits.lossless_flag = h->h.lossless;
.frame_header_length_in_bytes = h->h.uncompressed_header_size,
.first_partition_size = h->h.compressed_header_size,
.profile = h->h.profile,
.bit_depth = h->h.bpp,
};
for (i = 0; i < 7; i++) for (i = 0; i < 7; i++)
pp->mb_segment_tree_probs[i] = h->h.segmentation.prob[i]; pic_param.mb_segment_tree_probs[i] = h->h.segmentation.prob[i];
if (h->h.segmentation.temporal) { if (h->h.segmentation.temporal) {
for (i = 0; i < 3; i++) for (i = 0; i < 3; i++)
pp->segment_pred_probs[i] = h->h.segmentation.pred_prob[i]; pic_param.segment_pred_probs[i] = h->h.segmentation.pred_prob[i];
} else { } else {
memset(pp->segment_pred_probs, 255, sizeof(pp->segment_pred_probs)); memset(pic_param.segment_pred_probs, 255, sizeof(pic_param.segment_pred_probs));
} }
for (i = 0; i < 8; i++) { for (i = 0; i < 8; i++) {
if (h->refs[i].f->buf[0]) { if (h->refs[i].f->buf[0])
pp->reference_frames[i] = ff_vaapi_get_surface_id(h->refs[i].f); pic_param.reference_frames[i] = ff_vaapi_get_surface_id(h->refs[i].f);
} else { else
pp->reference_frames[i] = VA_INVALID_ID; pic_param.reference_frames[i] = VA_INVALID_ID;
}
} }
}
static int vaapi_vp9_start_frame(AVCodecContext *avctx,
av_unused const uint8_t *buffer,
av_unused uint32_t size)
{
const VP9SharedContext *h = avctx->priv_data;
FFVAContext * const vactx = ff_vaapi_get_context(avctx);
VADecPictureParameterBufferVP9 *pic_param;
vactx->slice_param_size = sizeof(VASliceParameterBufferVP9);
pic_param = ff_vaapi_alloc_pic_param(vactx, sizeof(VADecPictureParameterBufferVP9)); err = ff_vaapi_decode_make_param_buffer(avctx, pic,
if (!pic_param) VAPictureParameterBufferType,
return -1; &pic_param, sizeof(pic_param));
fill_picture_parameters(avctx, h, pic_param); if (err < 0) {
ff_vaapi_decode_cancel(avctx, pic);
return err;
}
return 0; return 0;
} }
static int vaapi_vp9_end_frame(AVCodecContext *avctx) static int vaapi_vp9_end_frame(AVCodecContext *avctx)
{ {
FFVAContext * const vactx = ff_vaapi_get_context(avctx);
const VP9SharedContext *h = avctx->priv_data; const VP9SharedContext *h = avctx->priv_data;
int ret; VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
ret = ff_vaapi_commit_slices(vactx);
if (ret < 0)
goto finish;
ret = ff_vaapi_render_picture(vactx, ff_vaapi_get_surface_id(h->frames[CUR_FRAME].tf.f));
if (ret < 0)
goto finish;
finish: return ff_vaapi_decode_issue(avctx, pic);
ff_vaapi_common_end_frame(avctx);
return ret;
} }
static int vaapi_vp9_decode_slice(AVCodecContext *avctx, static int vaapi_vp9_decode_slice(AVCodecContext *avctx,
const uint8_t *buffer, const uint8_t *buffer,
uint32_t size) uint32_t size)
{ {
FFVAContext * const vactx = ff_vaapi_get_context(avctx);
const VP9SharedContext *h = avctx->priv_data; const VP9SharedContext *h = avctx->priv_data;
VASliceParameterBufferVP9 *slice_param; VAAPIDecodePicture *pic = h->frames[CUR_FRAME].hwaccel_picture_private;
int i; VASliceParameterBufferVP9 slice_param;
int err, i;
slice_param = (VASliceParameterBufferVP9*)ff_vaapi_alloc_slice(vactx, buffer, size); slice_param = (VASliceParameterBufferVP9) {
if (!slice_param) .slice_data_size = size,
return -1; .slice_data_offset = 0,
.slice_data_flag = VA_SLICE_DATA_FLAG_ALL,
};
for (i = 0; i < 8; i++) { for (i = 0; i < 8; i++) {
slice_param->seg_param[i].segment_flags.fields.segment_reference_enabled = h->h.segmentation.feat[i].ref_enabled; slice_param.seg_param[i] = (VASegmentParameterVP9) {
slice_param->seg_param[i].segment_flags.fields.segment_reference = h->h.segmentation.feat[i].ref_val; .segment_flags.fields = {
slice_param->seg_param[i].segment_flags.fields.segment_reference_skipped = h->h.segmentation.feat[i].skip_enabled; .segment_reference_enabled = h->h.segmentation.feat[i].ref_enabled,
.segment_reference = h->h.segmentation.feat[i].ref_val,
memcpy(slice_param->seg_param[i].filter_level, h->h.segmentation.feat[i].lflvl, sizeof(slice_param->seg_param[i].filter_level)); .segment_reference_skipped = h->h.segmentation.feat[i].skip_enabled,
},
.luma_dc_quant_scale = h->h.segmentation.feat[i].qmul[0][0],
.luma_ac_quant_scale = h->h.segmentation.feat[i].qmul[0][1],
.chroma_dc_quant_scale = h->h.segmentation.feat[i].qmul[1][0],
.chroma_ac_quant_scale = h->h.segmentation.feat[i].qmul[1][1],
};
memcpy(slice_param.seg_param[i].filter_level, h->h.segmentation.feat[i].lflvl, sizeof(slice_param.seg_param[i].filter_level));
}
slice_param->seg_param[i].luma_dc_quant_scale = h->h.segmentation.feat[i].qmul[0][0]; err = ff_vaapi_decode_make_slice_buffer(avctx, pic,
slice_param->seg_param[i].luma_ac_quant_scale = h->h.segmentation.feat[i].qmul[0][1]; &slice_param, sizeof(slice_param),
slice_param->seg_param[i].chroma_dc_quant_scale = h->h.segmentation.feat[i].qmul[1][0]; buffer, size);
slice_param->seg_param[i].chroma_ac_quant_scale = h->h.segmentation.feat[i].qmul[1][1]; if (err) {
ff_vaapi_decode_cancel(avctx, pic);
return err;
} }
return 0; return 0;
...@@ -163,7 +174,8 @@ AVHWAccel ff_vp9_vaapi_hwaccel = { ...@@ -163,7 +174,8 @@ AVHWAccel ff_vp9_vaapi_hwaccel = {
.start_frame = vaapi_vp9_start_frame, .start_frame = vaapi_vp9_start_frame,
.end_frame = vaapi_vp9_end_frame, .end_frame = vaapi_vp9_end_frame,
.decode_slice = vaapi_vp9_decode_slice, .decode_slice = vaapi_vp9_decode_slice,
.init = ff_vaapi_context_init, .frame_priv_data_size = sizeof(VAAPIDecodePicture),
.uninit = ff_vaapi_context_fini, .init = ff_vaapi_decode_init,
.priv_data_size = sizeof(FFVAContext), .uninit = ff_vaapi_decode_uninit,
.priv_data_size = sizeof(VAAPIDecodeContext),
}; };
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment