diff --git a/mpp/hal/rkdec/h265d/hal_h265d_com.c b/mpp/hal/rkdec/h265d/hal_h265d_com.c index e197734f..1a12ff6d 100644 --- a/mpp/hal/rkdec/h265d/hal_h265d_com.c +++ b/mpp/hal/rkdec/h265d/hal_h265d_com.c @@ -695,11 +695,11 @@ void hal_h265d_output_scalinglist_packet(void *hal, void *ptr, void *dxva) scalingList_t sl; RK_U32 i, j, pos; h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t*)dxva; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; if (!dxva_cxt->pp.scaling_list_enabled_flag) { return; } - if (memcmp((void*)&dxva_cxt->qm, reg_cxt->scaling_qm, sizeof(DXVA_Qmatrix_HEVC))) { + if (memcmp((void*)&dxva_cxt->qm, reg_ctx->scaling_qm, sizeof(DXVA_Qmatrix_HEVC))) { memset(&sl, 0, sizeof(scalingList_t)); for (i = 0; i < 6; i++) { for (j = 0; j < 16; j++) { @@ -720,9 +720,9 @@ void hal_h265d_output_scalinglist_packet(void *hal, void *ptr, void *dxva) if (i < 2) sl.sl_dc[1][i] = dxva_cxt->qm.ucScalingListDCCoefSizeID3[i]; } - hal_record_scaling_list((scalingFactor_t *)reg_cxt->scaling_rk, &sl); + hal_record_scaling_list((scalingFactor_t *)reg_ctx->scaling_rk, &sl); } - memcpy(ptr, reg_cxt->scaling_rk, sizeof(scalingFactor_t)); + memcpy(ptr, reg_ctx->scaling_rk, sizeof(scalingFactor_t)); } RK_U8 cabac_table[27456] = { diff --git a/mpp/hal/rkdec/h265d/hal_h265d_rkv.c b/mpp/hal/rkdec/h265d/hal_h265d_rkv.c index d8ecdec9..b81e7d7a 100644 --- a/mpp/hal/rkdec/h265d/hal_h265d_rkv.c +++ b/mpp/hal/rkdec/h265d/hal_h265d_rkv.c @@ -42,27 +42,27 @@ static MPP_RET hal_h265d_alloc_res(void *hal) { RK_S32 i = 0; RK_S32 ret = 0; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; - if (reg_cxt->fast_mode) { + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; + if (reg_ctx->fast_mode) { for (i = 0; i < MAX_GEN_REG; i++) { - reg_cxt->g_buf[i].hw_regs = + reg_ctx->g_buf[i].hw_regs = mpp_calloc_size(void, sizeof(H265d_REGS_t)); - ret = mpp_buffer_get(reg_cxt->group, - ®_cxt->g_buf[i].scaling_list_data, + ret = mpp_buffer_get(reg_ctx->group, + ®_ctx->g_buf[i].scaling_list_data, SCALING_LIST_SIZE); if (ret) { mpp_err("h265d scaling_list_data get buffer failed\n"); return ret; } - ret = mpp_buffer_get(reg_cxt->group, ®_cxt->g_buf[i].pps_data, + ret = mpp_buffer_get(reg_ctx->group, ®_ctx->g_buf[i].pps_data, PPS_SIZE); if (ret) { mpp_err("h265d pps_data get buffer failed\n"); return ret; } - ret = mpp_buffer_get(reg_cxt->group, ®_cxt->g_buf[i].rps_data, + ret = mpp_buffer_get(reg_ctx->group, ®_ctx->g_buf[i].rps_data, RPS_SIZE); if (ret) { mpp_err("h265d rps_data get buffer failed\n"); @@ -70,21 +70,21 @@ static MPP_RET hal_h265d_alloc_res(void *hal) } } } else { - reg_cxt->hw_regs = mpp_calloc_size(void, sizeof(H265d_REGS_t)); - ret = mpp_buffer_get(reg_cxt->group, ®_cxt->scaling_list_data, + reg_ctx->hw_regs = mpp_calloc_size(void, sizeof(H265d_REGS_t)); + ret = mpp_buffer_get(reg_ctx->group, ®_ctx->scaling_list_data, SCALING_LIST_SIZE); if (ret) { mpp_err("h265d scaling_list_data get buffer failed\n"); return ret; } - ret = mpp_buffer_get(reg_cxt->group, ®_cxt->pps_data, PPS_SIZE); + ret = mpp_buffer_get(reg_ctx->group, ®_ctx->pps_data, PPS_SIZE); if (ret) { mpp_err("h265d pps_data get buffer failed\n"); return ret; } - ret = mpp_buffer_get(reg_cxt->group, ®_cxt->rps_data, RPS_SIZE); + ret = mpp_buffer_get(reg_ctx->group, ®_ctx->rps_data, RPS_SIZE); if (ret) { mpp_err("h265d rps_data get buffer failed\n"); return ret; @@ -97,65 +97,65 @@ static MPP_RET hal_h265d_alloc_res(void *hal) static MPP_RET hal_h265d_release_res(void *hal) { RK_S32 ret = 0; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; RK_S32 i = 0; - if (reg_cxt->fast_mode) { + if (reg_ctx->fast_mode) { for (i = 0; i < MAX_GEN_REG; i++) { - if (reg_cxt->g_buf[i].scaling_list_data) { - ret = mpp_buffer_put(reg_cxt->g_buf[i].scaling_list_data); + if (reg_ctx->g_buf[i].scaling_list_data) { + ret = mpp_buffer_put(reg_ctx->g_buf[i].scaling_list_data); if (ret) { mpp_err("h265d scaling_list_data free buffer failed\n"); return ret; } } - if (reg_cxt->g_buf[i].pps_data) { - ret = mpp_buffer_put(reg_cxt->g_buf[i].pps_data); + if (reg_ctx->g_buf[i].pps_data) { + ret = mpp_buffer_put(reg_ctx->g_buf[i].pps_data); if (ret) { mpp_err("h265d pps_data free buffer failed\n"); return ret; } } - if (reg_cxt->g_buf[i].rps_data) { - ret = mpp_buffer_put(reg_cxt->g_buf[i].rps_data); + if (reg_ctx->g_buf[i].rps_data) { + ret = mpp_buffer_put(reg_ctx->g_buf[i].rps_data); if (ret) { mpp_err("h265d rps_data free buffer failed\n"); return ret; } } - if (reg_cxt->g_buf[i].hw_regs) { - mpp_free(reg_cxt->g_buf[i].hw_regs); - reg_cxt->g_buf[i].hw_regs = NULL; + if (reg_ctx->g_buf[i].hw_regs) { + mpp_free(reg_ctx->g_buf[i].hw_regs); + reg_ctx->g_buf[i].hw_regs = NULL; } } } else { - if (reg_cxt->scaling_list_data) { - ret = mpp_buffer_put(reg_cxt->scaling_list_data); + if (reg_ctx->scaling_list_data) { + ret = mpp_buffer_put(reg_ctx->scaling_list_data); if (ret) { mpp_err("h265d scaling_list_data free buffer failed\n"); return ret; } } - if (reg_cxt->pps_data) { - ret = mpp_buffer_put(reg_cxt->pps_data); + if (reg_ctx->pps_data) { + ret = mpp_buffer_put(reg_ctx->pps_data); if (ret) { mpp_err("h265d pps_data free buffer failed\n"); return ret; } } - if (reg_cxt->rps_data) { - ret = mpp_buffer_put(reg_cxt->rps_data); + if (reg_ctx->rps_data) { + ret = mpp_buffer_put(reg_ctx->rps_data); if (ret) { mpp_err("h265d rps_data free buffer failed\n"); return ret; } } - if (reg_cxt->hw_regs) { - mpp_free(reg_cxt->hw_regs); - reg_cxt->hw_regs = NULL; + if (reg_ctx->hw_regs) { + mpp_free(reg_ctx->hw_regs); + reg_ctx->hw_regs = NULL; } } return MPP_OK; @@ -164,40 +164,40 @@ static MPP_RET hal_h265d_release_res(void *hal) MPP_RET hal_h265d_rkv_init(void *hal, MppHalCfg *cfg) { MPP_RET ret = MPP_NOK; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; - mpp_slots_set_prop(reg_cxt->slots, SLOTS_HOR_ALIGN, hevc_hor_align); - mpp_slots_set_prop(reg_cxt->slots, SLOTS_VER_ALIGN, hevc_ver_align); + mpp_slots_set_prop(reg_ctx->slots, SLOTS_HOR_ALIGN, hevc_hor_align); + mpp_slots_set_prop(reg_ctx->slots, SLOTS_VER_ALIGN, hevc_ver_align); - reg_cxt->scaling_qm = mpp_calloc(DXVA_Qmatrix_HEVC, 1); - reg_cxt->sw_rps_buf = mpp_calloc(RK_U64, 400); + reg_ctx->scaling_qm = mpp_calloc(DXVA_Qmatrix_HEVC, 1); + reg_ctx->sw_rps_buf = mpp_calloc(RK_U64, 400); - if (reg_cxt->scaling_qm == NULL) { + if (reg_ctx->scaling_qm == NULL) { mpp_err("scaling_org alloc fail"); return MPP_ERR_MALLOC; } - reg_cxt->scaling_rk = mpp_calloc(scalingFactor_t, 1); - if (reg_cxt->scaling_rk == NULL) { + reg_ctx->scaling_rk = mpp_calloc(scalingFactor_t, 1); + if (reg_ctx->scaling_rk == NULL) { mpp_err("scaling_rk alloc fail"); return MPP_ERR_MALLOC; } - if (reg_cxt->group == NULL) { - ret = mpp_buffer_group_get_internal(®_cxt->group, MPP_BUFFER_TYPE_ION); + if (reg_ctx->group == NULL) { + ret = mpp_buffer_group_get_internal(®_ctx->group, MPP_BUFFER_TYPE_ION); if (ret) { mpp_err("h265d mpp_buffer_group_get failed\n"); return ret; } } - ret = mpp_buffer_get(reg_cxt->group, ®_cxt->cabac_table_data, sizeof(cabac_table)); + ret = mpp_buffer_get(reg_ctx->group, ®_ctx->cabac_table_data, sizeof(cabac_table)); if (ret) { mpp_err("h265d cabac_table get buffer failed\n"); return ret; } - ret = mpp_buffer_write(reg_cxt->cabac_table_data, 0, (void*)cabac_table, sizeof(cabac_table)); + ret = mpp_buffer_write(reg_ctx->cabac_table_data, 0, (void*)cabac_table, sizeof(cabac_table)); if (ret) { mpp_err("h265d write cabac_table data failed\n"); return ret; @@ -238,30 +238,30 @@ MPP_RET hal_h265d_rkv_init(void *hal, MppHalCfg *cfg) MPP_RET hal_h265d_rkv_deinit(void *hal) { RK_S32 ret = 0; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; - ret = mpp_buffer_put(reg_cxt->cabac_table_data); + ret = mpp_buffer_put(reg_ctx->cabac_table_data); if (ret) { mpp_err("h265d cabac_table free buffer failed\n"); return ret; } - if (reg_cxt->scaling_qm) { - mpp_free(reg_cxt->scaling_qm); + if (reg_ctx->scaling_qm) { + mpp_free(reg_ctx->scaling_qm); } - if (reg_cxt->sw_rps_buf) { - mpp_free(reg_cxt->sw_rps_buf); + if (reg_ctx->sw_rps_buf) { + mpp_free(reg_ctx->sw_rps_buf); } - if (reg_cxt->scaling_rk) { - mpp_free(reg_cxt->scaling_rk); + if (reg_ctx->scaling_rk) { + mpp_free(reg_ctx->scaling_rk); } hal_h265d_release_res(hal); - if (reg_cxt->group) { - ret = mpp_buffer_group_put(reg_cxt->group); + if (reg_ctx->group) { + ret = mpp_buffer_group_put(reg_ctx->group); if (ret) { mpp_err("h265d group free buffer failed\n"); return ret; @@ -276,19 +276,19 @@ static RK_S32 hal_h265d_v345_output_pps_packet(void *hal, void *dxva) RK_S32 i, j; RK_U32 log2_min_cb_size; RK_S32 width, height; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t*)dxva; BitputCtx_t bp; RK_U64 *pps_packet = mpp_calloc(RK_U64, fifo_len + 1); - if (NULL == reg_cxt || dxva_cxt == NULL) { - mpp_err("%s:%s:%d reg_cxt or dxva_cxt is NULL", + if (NULL == reg_ctx || dxva_cxt == NULL) { + mpp_err("%s:%s:%d reg_ctx or dxva_cxt is NULL", __FILE__, __FUNCTION__, __LINE__); MPP_FREE(pps_packet); return MPP_ERR_NULL_PTR; } - void *pps_ptr = mpp_buffer_get_ptr(reg_cxt->pps_data); + void *pps_ptr = mpp_buffer_get_ptr(reg_ctx->pps_data); if (NULL == pps_ptr) { mpp_err("pps_data get ptr error"); return MPP_ERR_NOMEM; @@ -461,8 +461,8 @@ static RK_S32 hal_h265d_v345_output_pps_packet(void *hal, void *dxva) } { - RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_cxt->scaling_list_data); - RK_U32 fd = mpp_buffer_get_fd(reg_cxt->scaling_list_data); + RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_ctx->scaling_list_data); + RK_U32 fd = mpp_buffer_get_fd(reg_ctx->scaling_list_data); hal_h265d_output_scalinglist_packet(hal, ptr_scaling, dxva); mpp_put_bits(&bp, fd, 32); @@ -490,19 +490,19 @@ static RK_S32 hal_h265d_output_pps_packet(void *hal, void *dxva) RK_S32 i, j; RK_U32 log2_min_cb_size; RK_S32 width, height; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t*)dxva; BitputCtx_t bp; RK_U64 *pps_packet = mpp_calloc(RK_U64, fifo_len + 1); - if (NULL == reg_cxt || dxva_cxt == NULL) { - mpp_err("%s:%s:%d reg_cxt or dxva_cxt is NULL", + if (NULL == reg_ctx || dxva_cxt == NULL) { + mpp_err("%s:%s:%d reg_ctx or dxva_cxt is NULL", __FILE__, __FUNCTION__, __LINE__); MPP_FREE(pps_packet); return MPP_ERR_NULL_PTR; } - void *pps_ptr = mpp_buffer_get_ptr(reg_cxt->pps_data); + void *pps_ptr = mpp_buffer_get_ptr(reg_ctx->pps_data); if (NULL == pps_ptr) { mpp_err("pps_data get ptr error"); return MPP_ERR_NOMEM; @@ -674,8 +674,8 @@ static RK_S32 hal_h265d_output_pps_packet(void *hal, void *dxva) } { - RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_cxt->scaling_list_data); - RK_U32 fd = mpp_buffer_get_fd(reg_cxt->scaling_list_data); + RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_ctx->scaling_list_data); + RK_U32 fd = mpp_buffer_get_fd(reg_ctx->scaling_list_data); hal_h265d_output_scalinglist_packet(hal, ptr_scaling, dxva); mpp_put_bits(&bp, fd, 32); @@ -765,19 +765,19 @@ MPP_RET hal_h265d_rkv_gen_regs(void *hal, HalTaskInfo *syn) h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t *)syn->dec.syntax.data; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; void *rps_ptr = NULL; - if (reg_cxt ->fast_mode) { + if (reg_ctx ->fast_mode) { for (i = 0; i < MAX_GEN_REG; i++) { - if (!reg_cxt->g_buf[i].use_flag) { + if (!reg_ctx->g_buf[i].use_flag) { syn->dec.reg_index = i; - reg_cxt->rps_data = reg_cxt->g_buf[i].rps_data; - reg_cxt->scaling_list_data = - reg_cxt->g_buf[i].scaling_list_data; - reg_cxt->pps_data = reg_cxt->g_buf[i].pps_data; - reg_cxt->hw_regs = reg_cxt->g_buf[i].hw_regs; - reg_cxt->g_buf[i].use_flag = 1; + reg_ctx->rps_data = reg_ctx->g_buf[i].rps_data; + reg_ctx->scaling_list_data = + reg_ctx->g_buf[i].scaling_list_data; + reg_ctx->pps_data = reg_ctx->g_buf[i].pps_data; + reg_ctx->hw_regs = reg_ctx->g_buf[i].hw_regs; + reg_ctx->g_buf[i].use_flag = 1; break; } } @@ -786,7 +786,7 @@ MPP_RET hal_h265d_rkv_gen_regs(void *hal, HalTaskInfo *syn) return MPP_ERR_NOMEM; } } - rps_ptr = mpp_buffer_get_ptr(reg_cxt->rps_data); + rps_ptr = mpp_buffer_get_ptr(reg_ctx->rps_data); if (NULL == rps_ptr) { mpp_err("rps_data get ptr error"); @@ -800,17 +800,17 @@ MPP_RET hal_h265d_rkv_gen_regs(void *hal, HalTaskInfo *syn) } /* output pps */ - if (reg_cxt->is_v345) { + if (reg_ctx->is_v345) { hal_h265d_v345_output_pps_packet(hal, syn->dec.syntax.data); } else { hal_h265d_output_pps_packet(hal, syn->dec.syntax.data); } - if (NULL == reg_cxt->hw_regs) { + if (NULL == reg_ctx->hw_regs) { return MPP_ERR_NULL_PTR; } - hw_regs = (H265d_REGS_t*)reg_cxt->hw_regs; + hw_regs = (H265d_REGS_t*)reg_ctx->hw_regs; memset(hw_regs, 0, sizeof(H265d_REGS_t)); log2_min_cb_size = dxva_cxt->pp.log2_min_luma_coding_block_size_minus3 + 3; @@ -834,7 +834,7 @@ MPP_RET hal_h265d_rkv_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->sw_y_virstride = virstrid_y >> 4; hw_regs->sw_yuv_virstride = virstrid_yuv >> 4; hw_regs->sw_sysctrl.sw_h26x_rps_mode = 0; - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.CurrPic.Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.CurrPic.Index7Bits, SLOT_BUFFER, &framebuf); hw_regs->sw_decout_base = mpp_buffer_get_fd(framebuf); //just index need map @@ -847,17 +847,17 @@ MPP_RET hal_h265d_rkv_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->sw_cur_poc = dxva_cxt->pp.CurrPicOrderCntVal; - mpp_buf_slot_get_prop(reg_cxt->packet_slots, syn->dec.input, SLOT_BUFFER, + mpp_buf_slot_get_prop(reg_ctx->packet_slots, syn->dec.input, SLOT_BUFFER, &streambuf); if ( dxva_cxt->bitstream == NULL) { dxva_cxt->bitstream = mpp_buffer_get_ptr(streambuf); } - if (reg_cxt->is_v345) { + if (reg_ctx->is_v345) { #ifdef HW_RPS hw_regs->sw_sysctrl.sw_wait_reset_en = 1; hw_regs->v345_reg_ends.reg064_mvc0.refp_layer_same_with_cur = 0xffff; - hal_h265d_slice_hw_rps(syn->dec.syntax.data, rps_ptr, reg_cxt->sw_rps_buf, reg_cxt->fast_mode); + hal_h265d_slice_hw_rps(syn->dec.syntax.data, rps_ptr, reg_ctx->sw_rps_buf, reg_ctx->fast_mode); #else hw_regs->sw_sysctrl.sw_h26x_rps_mode = 1; hal_h265d_slice_output_rps(syn->dec.syntax.data, rps_ptr); @@ -866,13 +866,13 @@ MPP_RET hal_h265d_rkv_gen_regs(void *hal, HalTaskInfo *syn) hal_h265d_slice_output_rps(syn->dec.syntax.data, rps_ptr); } - if (dxva_cxt->pp.slice_segment_header_extension_present_flag && !reg_cxt->is_v345) { + if (dxva_cxt->pp.slice_segment_header_extension_present_flag && !reg_ctx->is_v345) { update_stream_buffer(streambuf, syn); } - hw_regs->sw_cabactbl_base = mpp_buffer_get_fd(reg_cxt->cabac_table_data); - hw_regs->sw_pps_base = mpp_buffer_get_fd(reg_cxt->pps_data); - hw_regs->sw_rps_base = mpp_buffer_get_fd(reg_cxt->rps_data); + hw_regs->sw_cabactbl_base = mpp_buffer_get_fd(reg_ctx->cabac_table_data); + hw_regs->sw_pps_base = mpp_buffer_get_fd(reg_ctx->pps_data); + hw_regs->sw_rps_base = mpp_buffer_get_fd(reg_ctx->rps_data); hw_regs->sw_strm_rlc_base = mpp_buffer_get_fd(streambuf); stream_buf_size = mpp_buffer_get_size(streambuf); @@ -900,7 +900,7 @@ MPP_RET hal_h265d_rkv_gen_regs(void *hal, HalTaskInfo *syn) if (dxva_cxt->pp.RefPicList[i].bPicEntry != 0xff && dxva_cxt->pp.RefPicList[i].bPicEntry != 0x7f) { hw_regs->sw_refer_poc[i] = dxva_cxt->pp.PicOrderCntValList[i]; - mpp_buf_slot_get_prop(reg_cxt->slots, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_BUFFER, &framebuf); if (framebuf != NULL) { @@ -916,10 +916,10 @@ MPP_RET hal_h265d_rkv_gen_regs(void *hal, HalTaskInfo *syn) } if (sw_ref_valid) { - mpp_dev_set_reg_offset(reg_cxt->dev, 10, sw_ref_valid & 0xf); - mpp_dev_set_reg_offset(reg_cxt->dev, 11, ((sw_ref_valid >> 4) & 0xf)); - mpp_dev_set_reg_offset(reg_cxt->dev, 12, ((sw_ref_valid >> 8) & 0xf)); - mpp_dev_set_reg_offset(reg_cxt->dev, 13, ((sw_ref_valid >> 12) & 0xf)); + mpp_dev_set_reg_offset(reg_ctx->dev, 10, sw_ref_valid & 0xf); + mpp_dev_set_reg_offset(reg_ctx->dev, 11, ((sw_ref_valid >> 4) & 0xf)); + mpp_dev_set_reg_offset(reg_ctx->dev, 12, ((sw_ref_valid >> 8) & 0xf)); + mpp_dev_set_reg_offset(reg_ctx->dev, 13, ((sw_ref_valid >> 12) & 0xf)); } return ret; @@ -929,7 +929,7 @@ MPP_RET hal_h265d_rkv_start(void *hal, HalTaskInfo *task) { MPP_RET ret = MPP_OK; H265d_REGS_t *hw_regs = NULL; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; RK_S32 index = task->dec.reg_index; RK_U32 i; @@ -940,10 +940,10 @@ MPP_RET hal_h265d_rkv_start(void *hal, HalTaskInfo *task) return MPP_OK; } - if (reg_cxt->fast_mode) { - hw_regs = ( H265d_REGS_t *)reg_cxt->g_buf[index].hw_regs; + if (reg_ctx->fast_mode) { + hw_regs = ( H265d_REGS_t *)reg_ctx->g_buf[index].hw_regs; } else { - hw_regs = ( H265d_REGS_t *)reg_cxt->hw_regs; + hw_regs = ( H265d_REGS_t *)reg_ctx->hw_regs; } if (hw_regs == NULL) { @@ -954,8 +954,8 @@ MPP_RET hal_h265d_rkv_start(void *hal, HalTaskInfo *task) do { MppDevRegWrCfg wr_cfg; MppDevRegRdCfg rd_cfg; - RK_U32 reg_size = (reg_cxt->is_v345) ? V345_HEVC_REGISTERS : - (reg_cxt->client_type == VPU_CLIENT_RKVDEC) ? + RK_U32 reg_size = (reg_ctx->is_v345) ? V345_HEVC_REGISTERS : + (reg_ctx->client_type == VPU_CLIENT_RKVDEC) ? RKVDEC_V1_REGISTERS : RKVDEC_HEVC_REGISTERS; reg_size *= sizeof(RK_U32); @@ -969,7 +969,7 @@ MPP_RET hal_h265d_rkv_start(void *hal, HalTaskInfo *task) h265h_dbg(H265H_DBG_REG, "RK_HEVC_DEC: regs[%02d]=%08X\n", i, ((RK_U32 *)hw_regs)[i]); } - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -979,13 +979,13 @@ MPP_RET hal_h265d_rkv_start(void *hal, HalTaskInfo *task) rd_cfg.size = reg_size; rd_cfg.offset = 0; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_RD, &rd_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_RD, &rd_cfg); if (ret) { mpp_err_f("set register read failed %d\n", ret); break; } - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_CMD_SEND, NULL); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_CMD_SEND, NULL); if (ret) { mpp_err_f("send cmd failed %d\n", ret); break; @@ -999,7 +999,7 @@ MPP_RET hal_h265d_rkv_wait(void *hal, HalTaskInfo *task) { MPP_RET ret = MPP_OK; RK_S32 index = task->dec.reg_index; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; H265d_REGS_t *hw_regs = NULL; RK_S32 i; @@ -1009,13 +1009,13 @@ MPP_RET hal_h265d_rkv_wait(void *hal, HalTaskInfo *task) goto ERR_PROC; } - if (reg_cxt->fast_mode) { - hw_regs = ( H265d_REGS_t *)reg_cxt->g_buf[index].hw_regs; + if (reg_ctx->fast_mode) { + hw_regs = ( H265d_REGS_t *)reg_ctx->g_buf[index].hw_regs; } else { - hw_regs = ( H265d_REGS_t *)reg_cxt->hw_regs; + hw_regs = ( H265d_REGS_t *)reg_ctx->hw_regs; } - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_CMD_POLL, NULL); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_CMD_POLL, NULL); if (ret) mpp_err_f("poll cmd failed %d\n", ret); @@ -1025,31 +1025,31 @@ ERR_PROC: hw_regs->sw_interrupt.sw_dec_error_sta || hw_regs->sw_interrupt.sw_dec_timeout_sta || hw_regs->sw_interrupt.sw_dec_empty_sta) { - if (!reg_cxt->fast_mode) { - if (reg_cxt->dec_cb) - mpp_callback(reg_cxt->dec_cb, &task->dec); + if (!reg_ctx->fast_mode) { + if (reg_ctx->dec_cb) + mpp_callback(reg_ctx->dec_cb, &task->dec); } else { MppFrame mframe = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.output, + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.output, SLOT_FRAME_PTR, &mframe); if (mframe) { - reg_cxt->fast_mode_err_found = 1; + reg_ctx->fast_mode_err_found = 1; mpp_frame_set_errinfo(mframe, 1); } } } else { - if (reg_cxt->fast_mode && reg_cxt->fast_mode_err_found) { + if (reg_ctx->fast_mode && reg_ctx->fast_mode_err_found) { for (i = 0; i < (RK_S32)MPP_ARRAY_ELEMS(task->dec.refer); i++) { if (task->dec.refer[i] >= 0) { MppFrame frame_ref = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.refer[i], + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.refer[i], SLOT_FRAME_PTR, &frame_ref); h265h_dbg(H265H_DBG_FAST_ERR, "refer[%d] %d frame %p\n", i, task->dec.refer[i], frame_ref); if (frame_ref && mpp_frame_get_errinfo(frame_ref)) { MppFrame frame_out = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.output, + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.output, SLOT_FRAME_PTR, &frame_out); mpp_frame_set_errinfo(frame_out, 1); break; @@ -1063,8 +1063,8 @@ ERR_PROC: h265h_dbg(H265H_DBG_REG, "RK_HEVC_DEC: regs[1]=0x%08X, regs[45]=0x%08x\n", ((RK_U32 *)hw_regs)[1], ((RK_U32 *)hw_regs)[45]); } - if (reg_cxt->fast_mode) { - reg_cxt->g_buf[index].use_flag = 0; + if (reg_ctx->fast_mode) { + reg_ctx->g_buf[index].use_flag = 0; } return ret; diff --git a/mpp/hal/rkdec/h265d/hal_h265d_vdpu34x.c b/mpp/hal/rkdec/h265d/hal_h265d_vdpu34x.c index 5a97d8fe..26f5a3b6 100644 --- a/mpp/hal/rkdec/h265d/hal_h265d_vdpu34x.c +++ b/mpp/hal/rkdec/h265d/hal_h265d_vdpu34x.c @@ -98,28 +98,28 @@ static const FilterdColBufRatio filterd_fbc_off[CTU][FMT] = { static MPP_RET hal_h265d_vdpu34x_init(void *hal, MppHalCfg *cfg) { RK_S32 ret = 0; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; - mpp_slots_set_prop(reg_cxt->slots, SLOTS_HOR_ALIGN, hevc_hor_align); - mpp_slots_set_prop(reg_cxt->slots, SLOTS_VER_ALIGN, hevc_ver_align); + mpp_slots_set_prop(reg_ctx->slots, SLOTS_HOR_ALIGN, hevc_hor_align); + mpp_slots_set_prop(reg_ctx->slots, SLOTS_VER_ALIGN, hevc_ver_align); - reg_cxt->scaling_qm = mpp_calloc(DXVA_Qmatrix_HEVC, 1); - if (reg_cxt->scaling_qm == NULL) { + reg_ctx->scaling_qm = mpp_calloc(DXVA_Qmatrix_HEVC, 1); + if (reg_ctx->scaling_qm == NULL) { mpp_err("scaling_org alloc fail"); return MPP_ERR_MALLOC; } - reg_cxt->scaling_rk = mpp_calloc(scalingFactor_t, 1); - reg_cxt->pps_buf = mpp_calloc(RK_U64, 15); - reg_cxt->sw_rps_buf = mpp_calloc(RK_U64, 400); + reg_ctx->scaling_rk = mpp_calloc(scalingFactor_t, 1); + reg_ctx->pps_buf = mpp_calloc(RK_U64, 15); + reg_ctx->sw_rps_buf = mpp_calloc(RK_U64, 400); - if (reg_cxt->scaling_rk == NULL) { + if (reg_ctx->scaling_rk == NULL) { mpp_err("scaling_rk alloc fail"); return MPP_ERR_MALLOC; } - if (reg_cxt->group == NULL) { - ret = mpp_buffer_group_get_internal(®_cxt->group, MPP_BUFFER_TYPE_ION); + if (reg_ctx->group == NULL) { + ret = mpp_buffer_group_get_internal(®_ctx->group, MPP_BUFFER_TYPE_ION); if (ret) { mpp_err("h265d mpp_buffer_group_get failed\n"); return ret; @@ -128,33 +128,33 @@ static MPP_RET hal_h265d_vdpu34x_init(void *hal, MppHalCfg *cfg) { RK_U32 i = 0; - RK_U32 max_cnt = reg_cxt->fast_mode ? MAX_GEN_REG : 1; + RK_U32 max_cnt = reg_ctx->fast_mode ? MAX_GEN_REG : 1; //!< malloc buffers - ret = mpp_buffer_get(reg_cxt->group, ®_cxt->bufs, ALL_BUFFER_SIZE(max_cnt)); + ret = mpp_buffer_get(reg_ctx->group, ®_ctx->bufs, ALL_BUFFER_SIZE(max_cnt)); if (ret) { mpp_err("h265d mpp_buffer_get failed\n"); return ret; } - reg_cxt->bufs_fd = mpp_buffer_get_fd(reg_cxt->bufs); - reg_cxt->offset_cabac = CABAC_TAB_OFFSET; + reg_ctx->bufs_fd = mpp_buffer_get_fd(reg_ctx->bufs); + reg_ctx->offset_cabac = CABAC_TAB_OFFSET; for (i = 0; i < max_cnt; i++) { - reg_cxt->g_buf[i].hw_regs = mpp_calloc_size(void, sizeof(Vdpu34xH265dRegSet)); - reg_cxt->offset_spspps[i] = SPSPPS_OFFSET(i); - reg_cxt->offset_rps[i] = RPS_OFFSET(i); - reg_cxt->offset_sclst[i] = SCALIST_OFFSET(i); + reg_ctx->g_buf[i].hw_regs = mpp_calloc_size(void, sizeof(Vdpu34xH265dRegSet)); + reg_ctx->offset_spspps[i] = SPSPPS_OFFSET(i); + reg_ctx->offset_rps[i] = RPS_OFFSET(i); + reg_ctx->offset_sclst[i] = SCALIST_OFFSET(i); } } - if (!reg_cxt->fast_mode) { - reg_cxt->hw_regs = reg_cxt->g_buf[0].hw_regs; - reg_cxt->spspps_offset = reg_cxt->offset_spspps[0]; - reg_cxt->rps_offset = reg_cxt->offset_rps[0]; - reg_cxt->sclst_offset = reg_cxt->offset_sclst[0]; + if (!reg_ctx->fast_mode) { + reg_ctx->hw_regs = reg_ctx->g_buf[0].hw_regs; + reg_ctx->spspps_offset = reg_ctx->offset_spspps[0]; + reg_ctx->rps_offset = reg_ctx->offset_rps[0]; + reg_ctx->sclst_offset = reg_ctx->offset_sclst[0]; } - ret = mpp_buffer_write(reg_cxt->bufs, 0, (void*)cabac_table, sizeof(cabac_table)); + ret = mpp_buffer_write(reg_ctx->bufs, 0, (void*)cabac_table, sizeof(cabac_table)); if (ret) { mpp_err("h265d write cabac_table data failed\n"); return ret; @@ -185,39 +185,39 @@ static MPP_RET hal_h265d_vdpu34x_init(void *hal, MppHalCfg *cfg) static MPP_RET hal_h265d_vdpu34x_deinit(void *hal) { - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; - RK_U32 loop = reg_cxt->fast_mode ? MPP_ARRAY_ELEMS(reg_cxt->g_buf) : 1; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; + RK_U32 loop = reg_ctx->fast_mode ? MPP_ARRAY_ELEMS(reg_ctx->g_buf) : 1; RK_U32 i; - if (reg_cxt->bufs) { - mpp_buffer_put(reg_cxt->bufs); - reg_cxt->bufs = NULL; + if (reg_ctx->bufs) { + mpp_buffer_put(reg_ctx->bufs); + reg_ctx->bufs = NULL; } - loop = reg_cxt->fast_mode ? MPP_ARRAY_ELEMS(reg_cxt->rcb_buf) : 1; + loop = reg_ctx->fast_mode ? MPP_ARRAY_ELEMS(reg_ctx->rcb_buf) : 1; for (i = 0; i < loop; i++) { - if (reg_cxt->rcb_buf[i]) { - mpp_buffer_put(reg_cxt->rcb_buf[i]); - reg_cxt->rcb_buf[i] = NULL; + if (reg_ctx->rcb_buf[i]) { + mpp_buffer_put(reg_ctx->rcb_buf[i]); + reg_ctx->rcb_buf[i] = NULL; } } - if (reg_cxt->group) { - mpp_buffer_group_put(reg_cxt->group); - reg_cxt->group = NULL; + if (reg_ctx->group) { + mpp_buffer_group_put(reg_ctx->group); + reg_ctx->group = NULL; } for (i = 0; i < loop; i++) - MPP_FREE(reg_cxt->g_buf[i].hw_regs); + MPP_FREE(reg_ctx->g_buf[i].hw_regs); - MPP_FREE(reg_cxt->scaling_qm); - MPP_FREE(reg_cxt->scaling_rk); - MPP_FREE(reg_cxt->pps_buf); - MPP_FREE(reg_cxt->sw_rps_buf); + MPP_FREE(reg_ctx->scaling_qm); + MPP_FREE(reg_ctx->scaling_rk); + MPP_FREE(reg_ctx->pps_buf); + MPP_FREE(reg_ctx->sw_rps_buf); - if (reg_cxt->cmv_bufs) { - hal_bufs_deinit(reg_cxt->cmv_bufs); - reg_cxt->cmv_bufs = NULL; + if (reg_ctx->cmv_bufs) { + hal_bufs_deinit(reg_ctx->cmv_bufs); + reg_ctx->cmv_bufs = NULL; } return MPP_OK; @@ -230,19 +230,19 @@ static RK_S32 hal_h265d_v345_output_pps_packet(void *hal, void *dxva) RK_U32 addr; RK_U32 log2_min_cb_size; RK_S32 width, height; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; - Vdpu34xH265dRegSet *hw_reg = (Vdpu34xH265dRegSet*)(reg_cxt->hw_regs); + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; + Vdpu34xH265dRegSet *hw_reg = (Vdpu34xH265dRegSet*)(reg_ctx->hw_regs); h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t*)dxva; BitputCtx_t bp; - if (NULL == reg_cxt || dxva_cxt == NULL) { - mpp_err("%s:%s:%d reg_cxt or dxva_cxt is NULL", + if (NULL == reg_ctx || dxva_cxt == NULL) { + mpp_err("%s:%s:%d reg_ctx or dxva_cxt is NULL", __FILE__, __FUNCTION__, __LINE__); return MPP_ERR_NULL_PTR; } - void *pps_ptr = mpp_buffer_get_ptr(reg_cxt->bufs) + reg_cxt->spspps_offset; + void *pps_ptr = mpp_buffer_get_ptr(reg_ctx->bufs) + reg_ctx->spspps_offset; if (dxva_cxt->pp.ps_update_flag) { - RK_U64 *pps_packet = reg_cxt->pps_buf; + RK_U64 *pps_packet = reg_ctx->pps_buf; if (NULL == pps_ptr) { mpp_err("pps_data get ptr error"); return MPP_ERR_NOMEM; @@ -417,7 +417,7 @@ static RK_S32 hal_h265d_v345_output_pps_packet(void *hal, void *dxva) if (dxva_cxt->pp.scaling_list_enabled_flag) { MppDevRegOffsetCfg trans_cfg; - RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_cxt->bufs) + reg_cxt->sclst_offset; + RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_ctx->bufs) + reg_ctx->sclst_offset; if (dxva_cxt->pp.scaling_list_data_present_flag) { addr = (dxva_cxt->pp.pps_id + 16) * 1360; @@ -429,17 +429,17 @@ static RK_S32 hal_h265d_v345_output_pps_packet(void *hal, void *dxva) hal_h265d_output_scalinglist_packet(hal, ptr_scaling + addr, dxva); - hw_reg->h265d_addr.reg180_scanlist_addr = reg_cxt->bufs_fd; + hw_reg->h265d_addr.reg180_scanlist_addr = reg_ctx->bufs_fd; hw_reg->common.reg012.scanlist_addr_valid_en = 1; /* need to config addr */ trans_cfg.reg_idx = 180; - trans_cfg.offset = addr + reg_cxt->sclst_offset; - mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_OFFSET, &trans_cfg); + trans_cfg.offset = addr + reg_ctx->sclst_offset; + mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_OFFSET, &trans_cfg); } for (i = 0; i < 64; i++) - memcpy(pps_ptr + i * 112, reg_cxt->pps_buf, 112); + memcpy(pps_ptr + i * 112, reg_ctx->pps_buf, 112); #ifdef dump fwrite(pps_ptr, 1, 80 * 64, fp); RK_U32 *tmp = (RK_U32 *)pps_ptr; @@ -457,20 +457,20 @@ static RK_S32 hal_h265d_output_pps_packet(void *hal, void *dxva) RK_U32 addr; RK_U32 log2_min_cb_size; RK_S32 width, height; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t*)dxva; BitputCtx_t bp; - if (NULL == reg_cxt || dxva_cxt == NULL) { - mpp_err("%s:%s:%d reg_cxt or dxva_cxt is NULL", + if (NULL == reg_ctx || dxva_cxt == NULL) { + mpp_err("%s:%s:%d reg_ctx or dxva_cxt is NULL", __FILE__, __FUNCTION__, __LINE__); return MPP_ERR_NULL_PTR; } - void *pps_ptr = mpp_buffer_get_ptr(reg_cxt->bufs) + reg_cxt->spspps_offset; + void *pps_ptr = mpp_buffer_get_ptr(reg_ctx->bufs) + reg_ctx->spspps_offset; if (dxva_cxt->pp.ps_update_flag || dxva_cxt->pp.scaling_list_enabled_flag) { - RK_U64 *pps_packet = reg_cxt->pps_buf; + RK_U64 *pps_packet = reg_ctx->pps_buf; if (NULL == pps_ptr) { mpp_err("pps_data get ptr error"); @@ -640,7 +640,7 @@ static RK_S32 hal_h265d_output_pps_packet(void *hal, void *dxva) } { - RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_cxt->scaling_list_data); + RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_ctx->scaling_list_data); if (dxva_cxt->pp.scaling_list_data_present_flag) { addr = (dxva_cxt->pp.pps_id + 16) * 1360; } else if (dxva_cxt->pp.scaling_list_enabled_flag) { @@ -651,7 +651,7 @@ static RK_S32 hal_h265d_output_pps_packet(void *hal, void *dxva) hal_h265d_output_scalinglist_packet(hal, ptr_scaling + addr, dxva); - RK_U32 fd = mpp_buffer_get_fd(reg_cxt->scaling_list_data); + RK_U32 fd = mpp_buffer_get_fd(reg_ctx->scaling_list_data); /* need to config addr */ addr = fd | (addr << 10); @@ -659,10 +659,10 @@ static RK_S32 hal_h265d_output_pps_packet(void *hal, void *dxva) mpp_put_align(&bp, 64, 0xf); } for (i = 0; i < 64; i++) - memcpy(pps_ptr + i * 80, reg_cxt->pps_buf, 80); - } else if (reg_cxt->fast_mode) { + memcpy(pps_ptr + i * 80, reg_ctx->pps_buf, 80); + } else if (reg_ctx->fast_mode) { for (i = 0; i < 64; i++) - memcpy(pps_ptr + i * 80, reg_cxt->pps_buf, 80); + memcpy(pps_ptr + i * 80, reg_ctx->pps_buf, 80); } #ifdef dump @@ -766,7 +766,7 @@ static void hal_h265d_rcb_info_update(void *hal, void *dxva, Vdpu34xH265dRegSet *hw_regs, RK_S32 width, RK_S32 height) { - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t*)dxva; DXVA_PicParams_HEVC *pp = &dxva_cxt->pp; RK_U32 chroma_fmt_idc = pp->chroma_format_idc;//0 400,1 4202 ,422,3 444 @@ -774,35 +774,35 @@ static void hal_h265d_rcb_info_update(void *hal, void *dxva, RK_U8 ctu_size = 1 << (pp->log2_diff_max_min_luma_coding_block_size + pp->log2_min_luma_coding_block_size_minus3 + 3); RK_U32 num_tiles = pp->num_tile_rows_minus1 + 1; - if (reg_cxt->num_row_tiles != num_tiles || - reg_cxt->bit_depth != bit_depth || - reg_cxt->chroma_fmt_idc != chroma_fmt_idc || - reg_cxt->ctu_size != ctu_size || - reg_cxt->width != width || - reg_cxt->height != height) { + if (reg_ctx->num_row_tiles != num_tiles || + reg_ctx->bit_depth != bit_depth || + reg_ctx->chroma_fmt_idc != chroma_fmt_idc || + reg_ctx->ctu_size != ctu_size || + reg_ctx->width != width || + reg_ctx->height != height) { RK_U32 i = 0; - RK_U32 loop = reg_cxt->fast_mode ? MPP_ARRAY_ELEMS(reg_cxt->g_buf) : 1; + RK_U32 loop = reg_ctx->fast_mode ? MPP_ARRAY_ELEMS(reg_ctx->g_buf) : 1; - reg_cxt->rcb_buf_size = vdpu34x_get_rcb_buf_size((Vdpu34xRcbInfo*)reg_cxt->rcb_info, width, height); - h265d_refine_rcb_size((Vdpu34xRcbInfo*)reg_cxt->rcb_info, hw_regs, width, height, dxva_cxt); + reg_ctx->rcb_buf_size = vdpu34x_get_rcb_buf_size((Vdpu34xRcbInfo*)reg_ctx->rcb_info, width, height); + h265d_refine_rcb_size((Vdpu34xRcbInfo*)reg_ctx->rcb_info, hw_regs, width, height, dxva_cxt); for (i = 0; i < loop; i++) { MppBuffer rcb_buf; - if (reg_cxt->rcb_buf[i]) { - mpp_buffer_put(reg_cxt->rcb_buf[i]); - reg_cxt->rcb_buf[i] = NULL; + if (reg_ctx->rcb_buf[i]) { + mpp_buffer_put(reg_ctx->rcb_buf[i]); + reg_ctx->rcb_buf[i] = NULL; } - mpp_buffer_get(reg_cxt->group, &rcb_buf, reg_cxt->rcb_buf_size); - reg_cxt->rcb_buf[i] = rcb_buf; + mpp_buffer_get(reg_ctx->group, &rcb_buf, reg_ctx->rcb_buf_size); + reg_ctx->rcb_buf[i] = rcb_buf; } - reg_cxt->num_row_tiles = num_tiles; - reg_cxt->bit_depth = bit_depth; - reg_cxt->chroma_fmt_idc = chroma_fmt_idc; - reg_cxt->ctu_size = ctu_size; - reg_cxt->width = width; - reg_cxt->height = height; + reg_ctx->num_row_tiles = num_tiles; + reg_ctx->bit_depth = bit_depth; + reg_ctx->chroma_fmt_idc = chroma_fmt_idc; + reg_ctx->ctu_size = ctu_size; + reg_ctx->width = width; + reg_ctx->height = height; } } @@ -849,7 +849,7 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) RK_S32 distance = MAX_INT; h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t *)syn->dec.syntax.data; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; void *rps_ptr = NULL; RK_U32 stream_buf_size = 0; @@ -859,17 +859,17 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) return MPP_OK; } - if (reg_cxt ->fast_mode) { + if (reg_ctx ->fast_mode) { for (i = 0; i < MAX_GEN_REG; i++) { - if (!reg_cxt->g_buf[i].use_flag) { + if (!reg_ctx->g_buf[i].use_flag) { syn->dec.reg_index = i; - reg_cxt->spspps_offset = reg_cxt->offset_spspps[i]; - reg_cxt->rps_offset = reg_cxt->offset_rps[i]; - reg_cxt->sclst_offset = reg_cxt->offset_sclst[i]; + reg_ctx->spspps_offset = reg_ctx->offset_spspps[i]; + reg_ctx->rps_offset = reg_ctx->offset_rps[i]; + reg_ctx->sclst_offset = reg_ctx->offset_sclst[i]; - reg_cxt->hw_regs = reg_cxt->g_buf[i].hw_regs; - reg_cxt->g_buf[i].use_flag = 1; + reg_ctx->hw_regs = reg_ctx->g_buf[i].hw_regs; + reg_ctx->g_buf[i].use_flag = 1; break; } } @@ -878,7 +878,7 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) return MPP_ERR_NOMEM; } } - rps_ptr = mpp_buffer_get_ptr(reg_cxt->bufs) + reg_cxt->rps_offset; + rps_ptr = mpp_buffer_get_ptr(reg_ctx->bufs) + reg_ctx->rps_offset; if (NULL == rps_ptr) { mpp_err("rps_data get ptr error"); @@ -892,16 +892,16 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) } /* output pps */ - hw_regs = (Vdpu34xH265dRegSet*)reg_cxt->hw_regs; + hw_regs = (Vdpu34xH265dRegSet*)reg_ctx->hw_regs; memset(hw_regs, 0, sizeof(Vdpu34xH265dRegSet)); - if (reg_cxt->is_v34x) { + if (reg_ctx->is_v34x) { hal_h265d_v345_output_pps_packet(hal, syn->dec.syntax.data); } else { hal_h265d_output_pps_packet(hal, syn->dec.syntax.data); } - if (NULL == reg_cxt->hw_regs) { + if (NULL == reg_ctx->hw_regs) { return MPP_ERR_NULL_PTR; } @@ -911,30 +911,30 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) width = (dxva_cxt->pp.PicWidthInMinCbsY << log2_min_cb_size); height = (dxva_cxt->pp.PicHeightInMinCbsY << log2_min_cb_size); mv_size = (MPP_ALIGN(width, 64) * MPP_ALIGN(height, 64)) >> 3; - if (reg_cxt->cmv_bufs == NULL || reg_cxt->mv_size < mv_size) { + if (reg_ctx->cmv_bufs == NULL || reg_ctx->mv_size < mv_size) { size_t size = mv_size; - if (reg_cxt->cmv_bufs) { - hal_bufs_deinit(reg_cxt->cmv_bufs); - reg_cxt->cmv_bufs = NULL; + if (reg_ctx->cmv_bufs) { + hal_bufs_deinit(reg_ctx->cmv_bufs); + reg_ctx->cmv_bufs = NULL; } - hal_bufs_init(®_cxt->cmv_bufs); - if (reg_cxt->cmv_bufs == NULL) { + hal_bufs_init(®_ctx->cmv_bufs); + if (reg_ctx->cmv_bufs == NULL) { mpp_err_f("colmv bufs init fail"); return MPP_ERR_NULL_PTR; } - reg_cxt->mv_size = mv_size; - reg_cxt->mv_count = mpp_buf_slot_get_count(reg_cxt->slots); - hal_bufs_setup(reg_cxt->cmv_bufs, reg_cxt->mv_count, 1, &size); + reg_ctx->mv_size = mv_size; + reg_ctx->mv_count = mpp_buf_slot_get_count(reg_ctx->slots); + hal_bufs_setup(reg_ctx->cmv_bufs, reg_ctx->mv_count, 1, &size); } { MppFrame mframe = NULL; RK_U32 ver_virstride; - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.CurrPic.Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.CurrPic.Index7Bits, SLOT_FRAME_PTR, &mframe); stride_y = mpp_frame_get_hor_stride(mframe); ver_virstride = mpp_frame_get_ver_stride(mframe); @@ -967,10 +967,10 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->common.reg020_y_virstride.y_virstride = virstrid_y >> 4; } - if (MPP_FRAME_FMT_IS_HDR(mpp_frame_get_fmt(mframe)) && reg_cxt->cfg->base.enable_hdr_meta) + if (MPP_FRAME_FMT_IS_HDR(mpp_frame_get_fmt(mframe)) && reg_ctx->cfg->base.enable_hdr_meta) fill_hdr_meta_to_frame(mframe, HDR_HEVC); } - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.CurrPic.Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.CurrPic.Index7Bits, SLOT_BUFFER, &framebuf); hw_regs->common_addr.reg130_decout_base = mpp_buffer_get_fd(framebuf); //just index need map /*if out_base is equal to zero it means this frame may error @@ -981,21 +981,21 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) } fd = mpp_buffer_get_fd(framebuf); hw_regs->common_addr.reg130_decout_base = fd; - mv_buf = hal_bufs_get_buf(reg_cxt->cmv_bufs, dxva_cxt->pp.CurrPic.Index7Bits); + mv_buf = hal_bufs_get_buf(reg_ctx->cmv_bufs, dxva_cxt->pp.CurrPic.Index7Bits); hw_regs->common_addr.reg131_colmv_cur_base = mpp_buffer_get_fd(mv_buf->buf[0]); hw_regs->h265d_param.reg65.cur_top_poc = dxva_cxt->pp.CurrPicOrderCntVal; - mpp_buf_slot_get_prop(reg_cxt->packet_slots, syn->dec.input, SLOT_BUFFER, + mpp_buf_slot_get_prop(reg_ctx->packet_slots, syn->dec.input, SLOT_BUFFER, &streambuf); if ( dxva_cxt->bitstream == NULL) { dxva_cxt->bitstream = mpp_buffer_get_ptr(streambuf); } - if (reg_cxt->is_v34x) { + if (reg_ctx->is_v34x) { #ifdef HW_RPS hw_regs->common.reg012.wait_reset_en = 1; hw_regs->h265d_param.reg103.ref_pic_layer_same_with_cur = 0xffff; - hal_h265d_slice_hw_rps(syn->dec.syntax.data, rps_ptr, reg_cxt->sw_rps_buf, reg_cxt->fast_mode); + hal_h265d_slice_hw_rps(syn->dec.syntax.data, rps_ptr, reg_ctx->sw_rps_buf, reg_ctx->fast_mode); #else hw_regs->sw_sysctrl.sw_h26x_rps_mode = 1; hal_h265d_slice_output_rps(syn->dec.syntax.data, rps_ptr); @@ -1006,10 +1006,10 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) MppDevRegOffsetCfg trans_cfg; /* cabac table */ - hw_regs->h265d_addr.reg197_cabactbl_base = reg_cxt->bufs_fd; + hw_regs->h265d_addr.reg197_cabactbl_base = reg_ctx->bufs_fd; /* pps */ - hw_regs->h265d_addr.reg161_pps_base = reg_cxt->bufs_fd; - hw_regs->h265d_addr.reg163_rps_base = reg_cxt->bufs_fd; + hw_regs->h265d_addr.reg161_pps_base = reg_ctx->bufs_fd; + hw_regs->h265d_addr.reg163_rps_base = reg_ctx->bufs_fd; hw_regs->common_addr.reg128_rlc_base = mpp_buffer_get_fd(streambuf); hw_regs->common_addr.reg129_rlcwrite_base = mpp_buffer_get_fd(streambuf); @@ -1041,7 +1041,7 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->common.reg032_timeout_threshold = 0x3ffff; valid_ref = hw_regs->common_addr.reg130_decout_base; - reg_cxt->error_index = dxva_cxt->pp.CurrPic.Index7Bits; + reg_ctx->error_index = dxva_cxt->pp.CurrPic.Index7Bits; hw_regs->common_addr.reg132_error_ref_base = valid_ref; for (i = 0; i < (RK_S32)MPP_ARRAY_ELEMS(dxva_cxt->pp.RefPicList); i++) { @@ -1050,10 +1050,10 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) MppFrame mframe = NULL; hw_regs->h265d_param.reg67_82_ref_poc[i] = dxva_cxt->pp.PicOrderCntValList[i]; - mpp_buf_slot_get_prop(reg_cxt->slots, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_BUFFER, &framebuf); - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_FRAME_PTR, &mframe); if (framebuf != NULL) { hw_regs->h265d_addr.reg164_179_ref_base[i] = mpp_buffer_get_fd(framebuf); @@ -1063,21 +1063,21 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) && (!mpp_frame_get_errinfo(mframe))) { distance = pocdistance(dxva_cxt->pp.PicOrderCntValList[i], dxva_cxt->pp.current_poc); hw_regs->common_addr.reg132_error_ref_base = hw_regs->h265d_addr.reg164_179_ref_base[i]; - reg_cxt->error_index = dxva_cxt->pp.RefPicList[i].Index7Bits; + reg_ctx->error_index = dxva_cxt->pp.RefPicList[i].Index7Bits; hw_regs->common.reg021.error_intra_mode = 0; } } else { hw_regs->h265d_addr.reg164_179_ref_base[i] = valid_ref; } - mv_buf = hal_bufs_get_buf(reg_cxt->cmv_bufs, dxva_cxt->pp.RefPicList[i].Index7Bits); + mv_buf = hal_bufs_get_buf(reg_ctx->cmv_bufs, dxva_cxt->pp.RefPicList[i].Index7Bits); hw_regs->h265d_addr.reg181_196_colmv_base[i] = mpp_buffer_get_fd(mv_buf->buf[0]); SET_REF_VALID(hw_regs->h265d_param, i, 1); } } - if ((reg_cxt->error_index == dxva_cxt->pp.CurrPic.Index7Bits) && !dxva_cxt->pp.IntraPicFlag) { + if ((reg_ctx->error_index == dxva_cxt->pp.CurrPic.Index7Bits) && !dxva_cxt->pp.IntraPicFlag) { // mpp_err("current frm may be err, should skip process"); syn->dec.flags.ref_err = 1; return MPP_OK; @@ -1090,36 +1090,36 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) if (!hw_regs->common.reg021.error_intra_mode) { MppFrame mframe = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_BUFFER, &framebuf); - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_FRAME_PTR, &mframe); if (framebuf == NULL || mpp_frame_get_errinfo(mframe)) { - mv_buf = hal_bufs_get_buf(reg_cxt->cmv_bufs, reg_cxt->error_index); + mv_buf = hal_bufs_get_buf(reg_ctx->cmv_bufs, reg_ctx->error_index); hw_regs->h265d_addr.reg164_179_ref_base[i] = hw_regs->common_addr.reg132_error_ref_base; hw_regs->h265d_addr.reg181_196_colmv_base[i] = mpp_buffer_get_fd(mv_buf->buf[0]); } } } else { - mv_buf = hal_bufs_get_buf(reg_cxt->cmv_bufs, reg_cxt->error_index); + mv_buf = hal_bufs_get_buf(reg_ctx->cmv_bufs, reg_ctx->error_index); hw_regs->h265d_addr.reg164_179_ref_base[i] = hw_regs->common_addr.reg132_error_ref_base; hw_regs->h265d_addr.reg181_196_colmv_base[i] = mpp_buffer_get_fd(mv_buf->buf[0]); /* mark 3 to differ from current frame */ - if (reg_cxt->error_index == dxva_cxt->pp.CurrPic.Index7Bits) + if (reg_ctx->error_index == dxva_cxt->pp.CurrPic.Index7Bits) SET_POC_HIGNBIT_INFO(hw_regs->highpoc, i, poc_highbit, 3); } } trans_cfg.reg_idx = 161; - trans_cfg.offset = reg_cxt->spspps_offset; - mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_OFFSET, &trans_cfg); + trans_cfg.offset = reg_ctx->spspps_offset; + mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_OFFSET, &trans_cfg); /* rps */ trans_cfg.reg_idx = 163; - trans_cfg.offset = reg_cxt->rps_offset; - mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_OFFSET, &trans_cfg); + trans_cfg.offset = reg_ctx->rps_offset; + mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_OFFSET, &trans_cfg); hw_regs->common.reg013.timeout_mode = 1; hw_regs->common.reg013.cur_pic_is_idr = dxva_cxt->pp.IdrPicFlag;//p_hal->slice_long->idr_flag; @@ -1127,9 +1127,9 @@ static MPP_RET hal_h265d_vdpu34x_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->common.reg011.buf_empty_en = 1; hal_h265d_rcb_info_update(hal, dxva_cxt, hw_regs, width, height); - vdpu34x_setup_rcb(&hw_regs->common_addr, reg_cxt->dev, reg_cxt->fast_mode ? - reg_cxt->rcb_buf[syn->dec.reg_index] : reg_cxt->rcb_buf[0], - (Vdpu34xRcbInfo*)reg_cxt->rcb_info); + vdpu34x_setup_rcb(&hw_regs->common_addr, reg_ctx->dev, reg_ctx->fast_mode ? + reg_ctx->rcb_buf[syn->dec.reg_index] : reg_ctx->rcb_buf[0], + (Vdpu34xRcbInfo*)reg_ctx->rcb_info); vdpu34x_setup_statistic(&hw_regs->common, &hw_regs->statistic); return ret; @@ -1140,7 +1140,7 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) MPP_RET ret = MPP_OK; RK_U8* p = NULL; Vdpu34xH265dRegSet *hw_regs = NULL; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; RK_S32 index = task->dec.reg_index; RK_U32 i; @@ -1151,12 +1151,12 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) return MPP_OK; } - if (reg_cxt->fast_mode) { - p = (RK_U8*)reg_cxt->g_buf[index].hw_regs; - hw_regs = ( Vdpu34xH265dRegSet *)reg_cxt->g_buf[index].hw_regs; + if (reg_ctx->fast_mode) { + p = (RK_U8*)reg_ctx->g_buf[index].hw_regs; + hw_regs = ( Vdpu34xH265dRegSet *)reg_ctx->g_buf[index].hw_regs; } else { - p = (RK_U8*)reg_cxt->hw_regs; - hw_regs = ( Vdpu34xH265dRegSet *)reg_cxt->hw_regs; + p = (RK_U8*)reg_ctx->hw_regs; + hw_regs = ( Vdpu34xH265dRegSet *)reg_ctx->hw_regs; } if (hw_regs == NULL) { @@ -1178,7 +1178,7 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->common); wr_cfg.offset = OFFSET_COMMON_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1188,7 +1188,7 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->h265d_param); wr_cfg.offset = OFFSET_CODEC_PARAMS_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1198,7 +1198,7 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->common_addr); wr_cfg.offset = OFFSET_COMMON_ADDR_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1208,7 +1208,7 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->h265d_addr); wr_cfg.offset = OFFSET_CODEC_ADDR_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1218,7 +1218,7 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->statistic); wr_cfg.offset = OFFSET_STATISTIC_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1229,7 +1229,7 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->highpoc); wr_cfg.offset = OFFSET_POC_HIGHBIT_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1240,16 +1240,16 @@ static MPP_RET hal_h265d_vdpu34x_start(void *hal, HalTaskInfo *task) rd_cfg.size = sizeof(hw_regs->irq_status); rd_cfg.offset = OFFSET_INTERRUPT_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_RD, &rd_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_RD, &rd_cfg); if (ret) { mpp_err_f("set register read failed %d\n", ret); break; } /* rcb info for sram */ - vdpu34x_set_rcbinfo(reg_cxt->dev, (Vdpu34xRcbInfo*)reg_cxt->rcb_info); + vdpu34x_set_rcbinfo(reg_ctx->dev, (Vdpu34xRcbInfo*)reg_ctx->rcb_info); - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_CMD_SEND, NULL); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_CMD_SEND, NULL); if (ret) { mpp_err_f("send cmd failed %d\n", ret); break; @@ -1264,7 +1264,7 @@ static MPP_RET hal_h265d_vdpu34x_wait(void *hal, HalTaskInfo *task) { MPP_RET ret = MPP_OK; RK_S32 index = task->dec.reg_index; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; RK_U8* p = NULL; Vdpu34xH265dRegSet *hw_regs = NULL; RK_S32 i; @@ -1275,15 +1275,15 @@ static MPP_RET hal_h265d_vdpu34x_wait(void *hal, HalTaskInfo *task) goto ERR_PROC; } - if (reg_cxt->fast_mode) { - hw_regs = ( Vdpu34xH265dRegSet *)reg_cxt->g_buf[index].hw_regs; + if (reg_ctx->fast_mode) { + hw_regs = ( Vdpu34xH265dRegSet *)reg_ctx->g_buf[index].hw_regs; } else { - hw_regs = ( Vdpu34xH265dRegSet *)reg_cxt->hw_regs; + hw_regs = ( Vdpu34xH265dRegSet *)reg_ctx->hw_regs; } p = (RK_U8*)hw_regs; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_CMD_POLL, NULL); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_CMD_POLL, NULL); if (ret) mpp_err_f("poll cmd failed %d\n", ret); @@ -1294,31 +1294,31 @@ ERR_PROC: hw_regs->irq_status.reg224.buf_empty_sta || hw_regs->irq_status.reg224.dec_bus_sta || !hw_regs->irq_status.reg224.dec_rdy_sta) { - if (!reg_cxt->fast_mode) { - if (reg_cxt->dec_cb) - mpp_callback(reg_cxt->dec_cb, &task->dec); + if (!reg_ctx->fast_mode) { + if (reg_ctx->dec_cb) + mpp_callback(reg_ctx->dec_cb, &task->dec); } else { MppFrame mframe = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.output, + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.output, SLOT_FRAME_PTR, &mframe); if (mframe) { - reg_cxt->fast_mode_err_found = 1; + reg_ctx->fast_mode_err_found = 1; mpp_frame_set_errinfo(mframe, 1); } } } else { - if (reg_cxt->fast_mode && reg_cxt->fast_mode_err_found) { + if (reg_ctx->fast_mode && reg_ctx->fast_mode_err_found) { for (i = 0; i < (RK_S32)MPP_ARRAY_ELEMS(task->dec.refer); i++) { if (task->dec.refer[i] >= 0) { MppFrame frame_ref = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.refer[i], + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.refer[i], SLOT_FRAME_PTR, &frame_ref); h265h_dbg(H265H_DBG_FAST_ERR, "refer[%d] %d frame %p\n", i, task->dec.refer[i], frame_ref); if (frame_ref && mpp_frame_get_errinfo(frame_ref)) { MppFrame frame_out = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.output, + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.output, SLOT_FRAME_PTR, &frame_out); mpp_frame_set_errinfo(frame_out, 1); break; @@ -1341,8 +1341,8 @@ ERR_PROC: p += 4; } - if (reg_cxt->fast_mode) { - reg_cxt->g_buf[index].use_flag = 0; + if (reg_ctx->fast_mode) { + reg_ctx->g_buf[index].use_flag = 0; } return ret; diff --git a/mpp/hal/rkdec/h265d/hal_h265d_vdpu382.c b/mpp/hal/rkdec/h265d/hal_h265d_vdpu382.c index 7aed342f..0e243b16 100644 --- a/mpp/hal/rkdec/h265d/hal_h265d_vdpu382.c +++ b/mpp/hal/rkdec/h265d/hal_h265d_vdpu382.c @@ -98,28 +98,28 @@ static const FilterdColBufRatio filterd_fbc_off[CTU][FMT] = { static MPP_RET hal_h265d_vdpu382_init(void *hal, MppHalCfg *cfg) { RK_S32 ret = 0; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; - mpp_slots_set_prop(reg_cxt->slots, SLOTS_HOR_ALIGN, hevc_hor_align); - mpp_slots_set_prop(reg_cxt->slots, SLOTS_VER_ALIGN, hevc_ver_align); + mpp_slots_set_prop(reg_ctx->slots, SLOTS_HOR_ALIGN, hevc_hor_align); + mpp_slots_set_prop(reg_ctx->slots, SLOTS_VER_ALIGN, hevc_ver_align); - reg_cxt->scaling_qm = mpp_calloc(DXVA_Qmatrix_HEVC, 1); - if (reg_cxt->scaling_qm == NULL) { + reg_ctx->scaling_qm = mpp_calloc(DXVA_Qmatrix_HEVC, 1); + if (reg_ctx->scaling_qm == NULL) { mpp_err("scaling_org alloc fail"); return MPP_ERR_MALLOC; } - reg_cxt->scaling_rk = mpp_calloc(scalingFactor_t, 1); - reg_cxt->pps_buf = mpp_calloc(RK_U64, 15); - reg_cxt->sw_rps_buf = mpp_calloc(RK_U64, 400); + reg_ctx->scaling_rk = mpp_calloc(scalingFactor_t, 1); + reg_ctx->pps_buf = mpp_calloc(RK_U64, 15); + reg_ctx->sw_rps_buf = mpp_calloc(RK_U64, 400); - if (reg_cxt->scaling_rk == NULL) { + if (reg_ctx->scaling_rk == NULL) { mpp_err("scaling_rk alloc fail"); return MPP_ERR_MALLOC; } - if (reg_cxt->group == NULL) { - ret = mpp_buffer_group_get_internal(®_cxt->group, MPP_BUFFER_TYPE_ION); + if (reg_ctx->group == NULL) { + ret = mpp_buffer_group_get_internal(®_ctx->group, MPP_BUFFER_TYPE_ION); if (ret) { mpp_err("h265d mpp_buffer_group_get failed\n"); return ret; @@ -128,33 +128,33 @@ static MPP_RET hal_h265d_vdpu382_init(void *hal, MppHalCfg *cfg) { RK_U32 i = 0; - RK_U32 max_cnt = reg_cxt->fast_mode ? MAX_GEN_REG : 1; + RK_U32 max_cnt = reg_ctx->fast_mode ? MAX_GEN_REG : 1; //!< malloc buffers - ret = mpp_buffer_get(reg_cxt->group, ®_cxt->bufs, ALL_BUFFER_SIZE(max_cnt)); + ret = mpp_buffer_get(reg_ctx->group, ®_ctx->bufs, ALL_BUFFER_SIZE(max_cnt)); if (ret) { mpp_err("h265d mpp_buffer_get failed\n"); return ret; } - reg_cxt->bufs_fd = mpp_buffer_get_fd(reg_cxt->bufs); - reg_cxt->offset_cabac = CABAC_TAB_OFFSET; + reg_ctx->bufs_fd = mpp_buffer_get_fd(reg_ctx->bufs); + reg_ctx->offset_cabac = CABAC_TAB_OFFSET; for (i = 0; i < max_cnt; i++) { - reg_cxt->g_buf[i].hw_regs = mpp_calloc_size(void, sizeof(Vdpu382H265dRegSet)); - reg_cxt->offset_spspps[i] = SPSPPS_OFFSET(i); - reg_cxt->offset_rps[i] = RPS_OFFSET(i); - reg_cxt->offset_sclst[i] = SCALIST_OFFSET(i); + reg_ctx->g_buf[i].hw_regs = mpp_calloc_size(void, sizeof(Vdpu382H265dRegSet)); + reg_ctx->offset_spspps[i] = SPSPPS_OFFSET(i); + reg_ctx->offset_rps[i] = RPS_OFFSET(i); + reg_ctx->offset_sclst[i] = SCALIST_OFFSET(i); } } - if (!reg_cxt->fast_mode) { - reg_cxt->hw_regs = reg_cxt->g_buf[0].hw_regs; - reg_cxt->spspps_offset = reg_cxt->offset_spspps[0]; - reg_cxt->rps_offset = reg_cxt->offset_rps[0]; - reg_cxt->sclst_offset = reg_cxt->offset_sclst[0]; + if (!reg_ctx->fast_mode) { + reg_ctx->hw_regs = reg_ctx->g_buf[0].hw_regs; + reg_ctx->spspps_offset = reg_ctx->offset_spspps[0]; + reg_ctx->rps_offset = reg_ctx->offset_rps[0]; + reg_ctx->sclst_offset = reg_ctx->offset_sclst[0]; } - ret = mpp_buffer_write(reg_cxt->bufs, 0, (void*)cabac_table, sizeof(cabac_table)); + ret = mpp_buffer_write(reg_ctx->bufs, 0, (void*)cabac_table, sizeof(cabac_table)); if (ret) { mpp_err("h265d write cabac_table data failed\n"); return ret; @@ -177,7 +177,7 @@ static MPP_RET hal_h265d_vdpu382_init(void *hal, MppHalCfg *cfg) cfg->hw_info = hw_info; //save hw_info to context - reg_cxt->hw_info = hw_info; + reg_ctx->hw_info = hw_info; } #ifdef dump @@ -189,39 +189,39 @@ static MPP_RET hal_h265d_vdpu382_init(void *hal, MppHalCfg *cfg) static MPP_RET hal_h265d_vdpu382_deinit(void *hal) { - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; - RK_U32 loop = reg_cxt->fast_mode ? MPP_ARRAY_ELEMS(reg_cxt->g_buf) : 1; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; + RK_U32 loop = reg_ctx->fast_mode ? MPP_ARRAY_ELEMS(reg_ctx->g_buf) : 1; RK_U32 i; - if (reg_cxt->bufs) { - mpp_buffer_put(reg_cxt->bufs); - reg_cxt->bufs = NULL; + if (reg_ctx->bufs) { + mpp_buffer_put(reg_ctx->bufs); + reg_ctx->bufs = NULL; } - loop = reg_cxt->fast_mode ? MPP_ARRAY_ELEMS(reg_cxt->rcb_buf) : 1; + loop = reg_ctx->fast_mode ? MPP_ARRAY_ELEMS(reg_ctx->rcb_buf) : 1; for (i = 0; i < loop; i++) { - if (reg_cxt->rcb_buf[i]) { - mpp_buffer_put(reg_cxt->rcb_buf[i]); - reg_cxt->rcb_buf[i] = NULL; + if (reg_ctx->rcb_buf[i]) { + mpp_buffer_put(reg_ctx->rcb_buf[i]); + reg_ctx->rcb_buf[i] = NULL; } } - if (reg_cxt->group) { - mpp_buffer_group_put(reg_cxt->group); - reg_cxt->group = NULL; + if (reg_ctx->group) { + mpp_buffer_group_put(reg_ctx->group); + reg_ctx->group = NULL; } for (i = 0; i < loop; i++) - MPP_FREE(reg_cxt->g_buf[i].hw_regs); + MPP_FREE(reg_ctx->g_buf[i].hw_regs); - MPP_FREE(reg_cxt->scaling_qm); - MPP_FREE(reg_cxt->scaling_rk); - MPP_FREE(reg_cxt->pps_buf); - MPP_FREE(reg_cxt->sw_rps_buf); + MPP_FREE(reg_ctx->scaling_qm); + MPP_FREE(reg_ctx->scaling_rk); + MPP_FREE(reg_ctx->pps_buf); + MPP_FREE(reg_ctx->sw_rps_buf); - if (reg_cxt->cmv_bufs) { - hal_bufs_deinit(reg_cxt->cmv_bufs); - reg_cxt->cmv_bufs = NULL; + if (reg_ctx->cmv_bufs) { + hal_bufs_deinit(reg_ctx->cmv_bufs); + reg_ctx->cmv_bufs = NULL; } return MPP_OK; @@ -234,19 +234,19 @@ static RK_S32 hal_h265d_v382_output_pps_packet(void *hal, void *dxva) RK_U32 addr; RK_U32 log2_min_cb_size; RK_S32 width, height; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; - Vdpu382H265dRegSet *hw_reg = (Vdpu382H265dRegSet*)(reg_cxt->hw_regs); + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; + Vdpu382H265dRegSet *hw_reg = (Vdpu382H265dRegSet*)(reg_ctx->hw_regs); h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t*)dxva; BitputCtx_t bp; - if (NULL == reg_cxt || dxva_cxt == NULL) { - mpp_err("%s:%s:%d reg_cxt or dxva_cxt is NULL", + if (NULL == reg_ctx || dxva_cxt == NULL) { + mpp_err("%s:%s:%d reg_ctx or dxva_cxt is NULL", __FILE__, __FUNCTION__, __LINE__); return MPP_ERR_NULL_PTR; } - void *pps_ptr = mpp_buffer_get_ptr(reg_cxt->bufs) + reg_cxt->spspps_offset; + void *pps_ptr = mpp_buffer_get_ptr(reg_ctx->bufs) + reg_ctx->spspps_offset; if (dxva_cxt->pp.ps_update_flag) { - RK_U64 *pps_packet = reg_cxt->pps_buf; + RK_U64 *pps_packet = reg_ctx->pps_buf; if (NULL == pps_ptr) { mpp_err("pps_data get ptr error"); return MPP_ERR_NOMEM; @@ -421,7 +421,7 @@ static RK_S32 hal_h265d_v382_output_pps_packet(void *hal, void *dxva) if (dxva_cxt->pp.scaling_list_enabled_flag) { MppDevRegOffsetCfg trans_cfg; - RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_cxt->bufs) + reg_cxt->sclst_offset; + RK_U8 *ptr_scaling = (RK_U8 *)mpp_buffer_get_ptr(reg_ctx->bufs) + reg_ctx->sclst_offset; if (dxva_cxt->pp.scaling_list_data_present_flag) { addr = (dxva_cxt->pp.pps_id + 16) * 1360; @@ -433,17 +433,17 @@ static RK_S32 hal_h265d_v382_output_pps_packet(void *hal, void *dxva) hal_h265d_output_scalinglist_packet(hal, ptr_scaling + addr, dxva); - hw_reg->h265d_addr.reg180_scanlist_addr = reg_cxt->bufs_fd; + hw_reg->h265d_addr.reg180_scanlist_addr = reg_ctx->bufs_fd; hw_reg->common.reg012.scanlist_addr_valid_en = 1; /* need to config addr */ trans_cfg.reg_idx = 180; - trans_cfg.offset = addr + reg_cxt->sclst_offset; - mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_OFFSET, &trans_cfg); + trans_cfg.offset = addr + reg_ctx->sclst_offset; + mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_OFFSET, &trans_cfg); } for (i = 0; i < 64; i++) - memcpy(pps_ptr + i * 112, reg_cxt->pps_buf, 112); + memcpy(pps_ptr + i * 112, reg_ctx->pps_buf, 112); #ifdef dump fwrite(pps_ptr, 1, 80 * 64, fp); RK_U32 *tmp = (RK_U32 *)pps_ptr; @@ -561,7 +561,7 @@ static void hal_h265d_rcb_info_update(void *hal, void *dxva, Vdpu382H265dRegSet *hw_regs, RK_S32 width, RK_S32 height) { - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t*)dxva; DXVA_PicParams_HEVC *pp = &dxva_cxt->pp; RK_U32 chroma_fmt_idc = pp->chroma_format_idc;//0 400,1 4202 ,422,3 444 @@ -569,35 +569,35 @@ static void hal_h265d_rcb_info_update(void *hal, void *dxva, RK_U8 ctu_size = 1 << (pp->log2_diff_max_min_luma_coding_block_size + pp->log2_min_luma_coding_block_size_minus3 + 3); RK_U32 num_tiles = pp->num_tile_rows_minus1 + 1; - if (reg_cxt->num_row_tiles != num_tiles || - reg_cxt->bit_depth != bit_depth || - reg_cxt->chroma_fmt_idc != chroma_fmt_idc || - reg_cxt->ctu_size != ctu_size || - reg_cxt->width != width || - reg_cxt->height != height) { + if (reg_ctx->num_row_tiles != num_tiles || + reg_ctx->bit_depth != bit_depth || + reg_ctx->chroma_fmt_idc != chroma_fmt_idc || + reg_ctx->ctu_size != ctu_size || + reg_ctx->width != width || + reg_ctx->height != height) { RK_U32 i = 0; - RK_U32 loop = reg_cxt->fast_mode ? MPP_ARRAY_ELEMS(reg_cxt->g_buf) : 1; + RK_U32 loop = reg_ctx->fast_mode ? MPP_ARRAY_ELEMS(reg_ctx->g_buf) : 1; - reg_cxt->rcb_buf_size = vdpu382_get_rcb_buf_size((Vdpu382RcbInfo*)reg_cxt->rcb_info, width, height); - h265d_refine_rcb_size((Vdpu382RcbInfo*)reg_cxt->rcb_info, hw_regs, width, height, dxva_cxt); + reg_ctx->rcb_buf_size = vdpu382_get_rcb_buf_size((Vdpu382RcbInfo*)reg_ctx->rcb_info, width, height); + h265d_refine_rcb_size((Vdpu382RcbInfo*)reg_ctx->rcb_info, hw_regs, width, height, dxva_cxt); for (i = 0; i < loop; i++) { MppBuffer rcb_buf; - if (reg_cxt->rcb_buf[i]) { - mpp_buffer_put(reg_cxt->rcb_buf[i]); - reg_cxt->rcb_buf[i] = NULL; + if (reg_ctx->rcb_buf[i]) { + mpp_buffer_put(reg_ctx->rcb_buf[i]); + reg_ctx->rcb_buf[i] = NULL; } - mpp_buffer_get(reg_cxt->group, &rcb_buf, reg_cxt->rcb_buf_size); - reg_cxt->rcb_buf[i] = rcb_buf; + mpp_buffer_get(reg_ctx->group, &rcb_buf, reg_ctx->rcb_buf_size); + reg_ctx->rcb_buf[i] = rcb_buf; } - reg_cxt->num_row_tiles = num_tiles; - reg_cxt->bit_depth = bit_depth; - reg_cxt->chroma_fmt_idc = chroma_fmt_idc; - reg_cxt->ctu_size = ctu_size; - reg_cxt->width = width; - reg_cxt->height = height; + reg_ctx->num_row_tiles = num_tiles; + reg_ctx->bit_depth = bit_depth; + reg_ctx->chroma_fmt_idc = chroma_fmt_idc; + reg_ctx->ctu_size = ctu_size; + reg_ctx->width = width; + reg_ctx->height = height; } } @@ -644,7 +644,7 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) RK_S32 distance = MAX_INT; h265d_dxva2_picture_context_t *dxva_cxt = (h265d_dxva2_picture_context_t *)syn->dec.syntax.data; - HalH265dCtx *reg_cxt = ( HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = ( HalH265dCtx *)hal; void *rps_ptr = NULL; RK_U32 stream_buf_size = 0; @@ -654,17 +654,17 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) return MPP_OK; } - if (reg_cxt ->fast_mode) { + if (reg_ctx ->fast_mode) { for (i = 0; i < MAX_GEN_REG; i++) { - if (!reg_cxt->g_buf[i].use_flag) { + if (!reg_ctx->g_buf[i].use_flag) { syn->dec.reg_index = i; - reg_cxt->spspps_offset = reg_cxt->offset_spspps[i]; - reg_cxt->rps_offset = reg_cxt->offset_rps[i]; - reg_cxt->sclst_offset = reg_cxt->offset_sclst[i]; + reg_ctx->spspps_offset = reg_ctx->offset_spspps[i]; + reg_ctx->rps_offset = reg_ctx->offset_rps[i]; + reg_ctx->sclst_offset = reg_ctx->offset_sclst[i]; - reg_cxt->hw_regs = reg_cxt->g_buf[i].hw_regs; - reg_cxt->g_buf[i].use_flag = 1; + reg_ctx->hw_regs = reg_ctx->g_buf[i].hw_regs; + reg_ctx->g_buf[i].use_flag = 1; break; } } @@ -673,7 +673,7 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) return MPP_ERR_NOMEM; } } - rps_ptr = mpp_buffer_get_ptr(reg_cxt->bufs) + reg_cxt->rps_offset; + rps_ptr = mpp_buffer_get_ptr(reg_ctx->bufs) + reg_ctx->rps_offset; if (NULL == rps_ptr) { mpp_err("rps_data get ptr error"); @@ -687,12 +687,12 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) } /* output pps */ - hw_regs = (Vdpu382H265dRegSet*)reg_cxt->hw_regs; + hw_regs = (Vdpu382H265dRegSet*)reg_ctx->hw_regs; memset(hw_regs, 0, sizeof(Vdpu382H265dRegSet)); hal_h265d_v382_output_pps_packet(hal, syn->dec.syntax.data); - if (NULL == reg_cxt->hw_regs) { + if (NULL == reg_ctx->hw_regs) { return MPP_ERR_NULL_PTR; } @@ -702,30 +702,30 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) width = (dxva_cxt->pp.PicWidthInMinCbsY << log2_min_cb_size); height = (dxva_cxt->pp.PicHeightInMinCbsY << log2_min_cb_size); mv_size = (MPP_ALIGN(width, 64) * MPP_ALIGN(height, 64)) >> 3; - if (reg_cxt->cmv_bufs == NULL || reg_cxt->mv_size < mv_size) { + if (reg_ctx->cmv_bufs == NULL || reg_ctx->mv_size < mv_size) { size_t size = mv_size; - if (reg_cxt->cmv_bufs) { - hal_bufs_deinit(reg_cxt->cmv_bufs); - reg_cxt->cmv_bufs = NULL; + if (reg_ctx->cmv_bufs) { + hal_bufs_deinit(reg_ctx->cmv_bufs); + reg_ctx->cmv_bufs = NULL; } - hal_bufs_init(®_cxt->cmv_bufs); - if (reg_cxt->cmv_bufs == NULL) { + hal_bufs_init(®_ctx->cmv_bufs); + if (reg_ctx->cmv_bufs == NULL) { mpp_err_f("colmv bufs init fail"); return MPP_ERR_NULL_PTR; } - reg_cxt->mv_size = mv_size; - reg_cxt->mv_count = mpp_buf_slot_get_count(reg_cxt->slots); - hal_bufs_setup(reg_cxt->cmv_bufs, reg_cxt->mv_count, 1, &size); + reg_ctx->mv_size = mv_size; + reg_ctx->mv_count = mpp_buf_slot_get_count(reg_ctx->slots); + hal_bufs_setup(reg_ctx->cmv_bufs, reg_ctx->mv_count, 1, &size); } { MppFrame mframe = NULL; RK_U32 ver_virstride; - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.CurrPic.Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.CurrPic.Index7Bits, SLOT_FRAME_PTR, &mframe); stride_y = mpp_frame_get_hor_stride(mframe); ver_virstride = mpp_frame_get_ver_stride(mframe); @@ -756,10 +756,10 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->common.reg020_y_virstride.y_virstride = virstrid_y >> 4; } - if (MPP_FRAME_FMT_IS_HDR(mpp_frame_get_fmt(mframe)) && reg_cxt->cfg->base.enable_hdr_meta) + if (MPP_FRAME_FMT_IS_HDR(mpp_frame_get_fmt(mframe)) && reg_ctx->cfg->base.enable_hdr_meta) fill_hdr_meta_to_frame(mframe, HDR_HEVC); } - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.CurrPic.Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.CurrPic.Index7Bits, SLOT_BUFFER, &framebuf); hw_regs->common_addr.reg130_decout_base = mpp_buffer_get_fd(framebuf); //just index need map /*if out_base is equal to zero it means this frame may error @@ -770,19 +770,19 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) } fd = mpp_buffer_get_fd(framebuf); hw_regs->common_addr.reg130_decout_base = fd; - mv_buf = hal_bufs_get_buf(reg_cxt->cmv_bufs, dxva_cxt->pp.CurrPic.Index7Bits); + mv_buf = hal_bufs_get_buf(reg_ctx->cmv_bufs, dxva_cxt->pp.CurrPic.Index7Bits); hw_regs->common_addr.reg131_colmv_cur_base = mpp_buffer_get_fd(mv_buf->buf[0]); hw_regs->h265d_param.reg65.cur_top_poc = dxva_cxt->pp.CurrPicOrderCntVal; - mpp_buf_slot_get_prop(reg_cxt->packet_slots, syn->dec.input, SLOT_BUFFER, + mpp_buf_slot_get_prop(reg_ctx->packet_slots, syn->dec.input, SLOT_BUFFER, &streambuf); if ( dxva_cxt->bitstream == NULL) { dxva_cxt->bitstream = mpp_buffer_get_ptr(streambuf); } #ifdef HW_RPS hw_regs->h265d_param.reg103.ref_pic_layer_same_with_cur = 0xffff; - hal_h265d_slice_hw_rps(syn->dec.syntax.data, rps_ptr, reg_cxt->sw_rps_buf, reg_cxt->fast_mode); + hal_h265d_slice_hw_rps(syn->dec.syntax.data, rps_ptr, reg_ctx->sw_rps_buf, reg_ctx->fast_mode); #else hw_regs->sw_sysctrl.sw_h26x_rps_mode = 1; hal_h265d_slice_output_rps(syn->dec.syntax.data, rps_ptr); @@ -790,10 +790,10 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) MppDevRegOffsetCfg trans_cfg; /* cabac table */ - hw_regs->h265d_addr.reg197_cabactbl_base = reg_cxt->bufs_fd; + hw_regs->h265d_addr.reg197_cabactbl_base = reg_ctx->bufs_fd; /* pps */ - hw_regs->h265d_addr.reg161_pps_base = reg_cxt->bufs_fd; - hw_regs->h265d_addr.reg163_rps_base = reg_cxt->bufs_fd; + hw_regs->h265d_addr.reg161_pps_base = reg_ctx->bufs_fd; + hw_regs->h265d_addr.reg163_rps_base = reg_ctx->bufs_fd; hw_regs->common_addr.reg128_rlc_base = mpp_buffer_get_fd(streambuf); hw_regs->common_addr.reg129_rlcwrite_base = mpp_buffer_get_fd(streambuf); @@ -809,8 +809,8 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) aglin_offset); } hw_regs->common.reg010.dec_e = 1; - hw_regs->common.reg012.colmv_compress_en = reg_cxt->hw_info ? - reg_cxt->hw_info->cap_colmv_compress : 0; + hw_regs->common.reg012.colmv_compress_en = reg_ctx->hw_info ? + reg_ctx->hw_info->cap_colmv_compress : 0; hw_regs->common.reg024.cabac_err_en_lowbits = 0xffffdfff; hw_regs->common.reg025.cabac_err_en_highbits = 0x3ffbf9ff; @@ -834,7 +834,7 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->common.reg032_timeout_threshold = 0x3ffff; valid_ref = hw_regs->common_addr.reg130_decout_base; - reg_cxt->error_index = dxva_cxt->pp.CurrPic.Index7Bits; + reg_ctx->error_index = dxva_cxt->pp.CurrPic.Index7Bits; hw_regs->common_addr.reg132_error_ref_base = valid_ref; memset(&hw_regs->highpoc.reg205, 0, sizeof(RK_U32)); @@ -845,10 +845,10 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) MppFrame mframe = NULL; hw_regs->h265d_param.reg67_82_ref_poc[i] = dxva_cxt->pp.PicOrderCntValList[i]; - mpp_buf_slot_get_prop(reg_cxt->slots, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_BUFFER, &framebuf); - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_FRAME_PTR, &mframe); if (framebuf != NULL) { hw_regs->h265d_addr.reg164_179_ref_base[i] = mpp_buffer_get_fd(framebuf); @@ -858,7 +858,7 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) && (!mpp_frame_get_errinfo(mframe))) { distance = pocdistance(dxva_cxt->pp.PicOrderCntValList[i], dxva_cxt->pp.current_poc); hw_regs->common_addr.reg132_error_ref_base = hw_regs->h265d_addr.reg164_179_ref_base[i]; - reg_cxt->error_index = dxva_cxt->pp.RefPicList[i].Index7Bits; + reg_ctx->error_index = dxva_cxt->pp.RefPicList[i].Index7Bits; hw_regs->common.reg021.error_intra_mode = 0; } @@ -866,14 +866,14 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->h265d_addr.reg164_179_ref_base[i] = valid_ref; } - mv_buf = hal_bufs_get_buf(reg_cxt->cmv_bufs, dxva_cxt->pp.RefPicList[i].Index7Bits); + mv_buf = hal_bufs_get_buf(reg_ctx->cmv_bufs, dxva_cxt->pp.RefPicList[i].Index7Bits); hw_regs->h265d_addr.reg181_196_colmv_base[i] = mpp_buffer_get_fd(mv_buf->buf[0]); SET_REF_VALID(hw_regs->h265d_param, i, 1); } } - if ((reg_cxt->error_index == dxva_cxt->pp.CurrPic.Index7Bits) && !dxva_cxt->pp.IntraPicFlag) { + if ((reg_ctx->error_index == dxva_cxt->pp.CurrPic.Index7Bits) && !dxva_cxt->pp.IntraPicFlag) { // mpp_err("current frm may be err, should skip process"); syn->dec.flags.ref_err = 1; return MPP_OK; @@ -885,48 +885,48 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) dxva_cxt->pp.RefPicList[i].bPicEntry != 0x7f) { MppFrame mframe = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_BUFFER, &framebuf); - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.RefPicList[i].Index7Bits, SLOT_FRAME_PTR, &mframe); if (framebuf == NULL || mpp_frame_get_errinfo(mframe)) { - mv_buf = hal_bufs_get_buf(reg_cxt->cmv_bufs, reg_cxt->error_index); + mv_buf = hal_bufs_get_buf(reg_ctx->cmv_bufs, reg_ctx->error_index); hw_regs->h265d_addr.reg164_179_ref_base[i] = hw_regs->common_addr.reg132_error_ref_base; hw_regs->h265d_addr.reg181_196_colmv_base[i] = mpp_buffer_get_fd(mv_buf->buf[0]); } } else { - mv_buf = hal_bufs_get_buf(reg_cxt->cmv_bufs, reg_cxt->error_index); + mv_buf = hal_bufs_get_buf(reg_ctx->cmv_bufs, reg_ctx->error_index); hw_regs->h265d_addr.reg164_179_ref_base[i] = hw_regs->common_addr.reg132_error_ref_base; hw_regs->h265d_addr.reg181_196_colmv_base[i] = mpp_buffer_get_fd(mv_buf->buf[0]); /* mark 3 to differ from current frame */ - if (reg_cxt->error_index == dxva_cxt->pp.CurrPic.Index7Bits) + if (reg_ctx->error_index == dxva_cxt->pp.CurrPic.Index7Bits) SET_POC_HIGNBIT_INFO(hw_regs->highpoc, i, poc_highbit, 3); } } trans_cfg.reg_idx = 161; - trans_cfg.offset = reg_cxt->spspps_offset; - mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_OFFSET, &trans_cfg); + trans_cfg.offset = reg_ctx->spspps_offset; + mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_OFFSET, &trans_cfg); /* rps */ trans_cfg.reg_idx = 163; - trans_cfg.offset = reg_cxt->rps_offset; - mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_OFFSET, &trans_cfg); + trans_cfg.offset = reg_ctx->rps_offset; + mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_OFFSET, &trans_cfg); hw_regs->common.reg013.cur_pic_is_idr = dxva_cxt->pp.IdrPicFlag;//p_hal->slice_long->idr_flag; hw_regs->common.reg011.buf_empty_en = 1; hal_h265d_rcb_info_update(hal, dxva_cxt, hw_regs, width, height); - vdpu382_setup_rcb(&hw_regs->common_addr, reg_cxt->dev, reg_cxt->fast_mode ? - reg_cxt->rcb_buf[syn->dec.reg_index] : reg_cxt->rcb_buf[0], - (Vdpu382RcbInfo*)reg_cxt->rcb_info); + vdpu382_setup_rcb(&hw_regs->common_addr, reg_ctx->dev, reg_ctx->fast_mode ? + reg_ctx->rcb_buf[syn->dec.reg_index] : reg_ctx->rcb_buf[0], + (Vdpu382RcbInfo*)reg_ctx->rcb_info); { MppFrame mframe = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, dxva_cxt->pp.CurrPic.Index7Bits, + mpp_buf_slot_get_prop(reg_ctx->slots, dxva_cxt->pp.CurrPic.Index7Bits, SLOT_FRAME_PTR, &mframe); if (mpp_frame_get_thumbnail_en(mframe)) { @@ -934,7 +934,7 @@ static MPP_RET hal_h265d_vdpu382_gen_regs(void *hal, HalTaskInfo *syn) hw_regs->common_addr.reg130_decout_base; hw_regs->h265d_addr.reg199_scale_down_chorme_base = hw_regs->common_addr.reg130_decout_base; - vdpu382_setup_down_scale(mframe, reg_cxt->dev, &hw_regs->common); + vdpu382_setup_down_scale(mframe, reg_ctx->dev, &hw_regs->common); } else { hw_regs->h265d_addr.reg198_scale_down_luma_base = 0; hw_regs->h265d_addr.reg199_scale_down_chorme_base = 0; @@ -951,7 +951,7 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) MPP_RET ret = MPP_OK; RK_U8* p = NULL; Vdpu382H265dRegSet *hw_regs = NULL; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; RK_S32 index = task->dec.reg_index; RK_U32 i; @@ -962,12 +962,12 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) return MPP_OK; } - if (reg_cxt->fast_mode) { - p = (RK_U8*)reg_cxt->g_buf[index].hw_regs; - hw_regs = ( Vdpu382H265dRegSet *)reg_cxt->g_buf[index].hw_regs; + if (reg_ctx->fast_mode) { + p = (RK_U8*)reg_ctx->g_buf[index].hw_regs; + hw_regs = ( Vdpu382H265dRegSet *)reg_ctx->g_buf[index].hw_regs; } else { - p = (RK_U8*)reg_cxt->hw_regs; - hw_regs = ( Vdpu382H265dRegSet *)reg_cxt->hw_regs; + p = (RK_U8*)reg_ctx->hw_regs; + hw_regs = ( Vdpu382H265dRegSet *)reg_ctx->hw_regs; } if (hw_regs == NULL) { @@ -989,7 +989,7 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->common); wr_cfg.offset = OFFSET_COMMON_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -999,7 +999,7 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->h265d_param); wr_cfg.offset = OFFSET_CODEC_PARAMS_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1009,7 +1009,7 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->common_addr); wr_cfg.offset = OFFSET_COMMON_ADDR_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1019,7 +1019,7 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->h265d_addr); wr_cfg.offset = OFFSET_CODEC_ADDR_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1029,7 +1029,7 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->statistic); wr_cfg.offset = OFFSET_STATISTIC_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1039,7 +1039,7 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) wr_cfg.size = sizeof(hw_regs->highpoc); wr_cfg.offset = OFFSET_POC_HIGHBIT_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_WR, &wr_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_WR, &wr_cfg); if (ret) { mpp_err_f("set register write failed %d\n", ret); break; @@ -1049,15 +1049,15 @@ static MPP_RET hal_h265d_vdpu382_start(void *hal, HalTaskInfo *task) rd_cfg.size = sizeof(hw_regs->irq_status); rd_cfg.offset = OFFSET_INTERRUPT_REGS; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_REG_RD, &rd_cfg); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_REG_RD, &rd_cfg); if (ret) { mpp_err_f("set register read failed %d\n", ret); break; } /* rcb info for sram */ - vdpu382_set_rcbinfo(reg_cxt->dev, (Vdpu382RcbInfo*)reg_cxt->rcb_info); + vdpu382_set_rcbinfo(reg_ctx->dev, (Vdpu382RcbInfo*)reg_ctx->rcb_info); - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_CMD_SEND, NULL); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_CMD_SEND, NULL); if (ret) { mpp_err_f("send cmd failed %d\n", ret); break; @@ -1072,7 +1072,7 @@ static MPP_RET hal_h265d_vdpu382_wait(void *hal, HalTaskInfo *task) { MPP_RET ret = MPP_OK; RK_S32 index = task->dec.reg_index; - HalH265dCtx *reg_cxt = (HalH265dCtx *)hal; + HalH265dCtx *reg_ctx = (HalH265dCtx *)hal; RK_U8* p = NULL; Vdpu382H265dRegSet *hw_regs = NULL; RK_S32 i; @@ -1083,15 +1083,15 @@ static MPP_RET hal_h265d_vdpu382_wait(void *hal, HalTaskInfo *task) goto ERR_PROC; } - if (reg_cxt->fast_mode) { - hw_regs = ( Vdpu382H265dRegSet *)reg_cxt->g_buf[index].hw_regs; + if (reg_ctx->fast_mode) { + hw_regs = ( Vdpu382H265dRegSet *)reg_ctx->g_buf[index].hw_regs; } else { - hw_regs = ( Vdpu382H265dRegSet *)reg_cxt->hw_regs; + hw_regs = ( Vdpu382H265dRegSet *)reg_ctx->hw_regs; } p = (RK_U8*)hw_regs; - ret = mpp_dev_ioctl(reg_cxt->dev, MPP_DEV_CMD_POLL, NULL); + ret = mpp_dev_ioctl(reg_ctx->dev, MPP_DEV_CMD_POLL, NULL); if (ret) mpp_err_f("poll cmd failed %d\n", ret); @@ -1102,31 +1102,31 @@ ERR_PROC: hw_regs->irq_status.reg224.buf_empty_sta || hw_regs->irq_status.reg224.dec_bus_sta || !hw_regs->irq_status.reg224.dec_rdy_sta) { - if (!reg_cxt->fast_mode) { - if (reg_cxt->dec_cb) - mpp_callback(reg_cxt->dec_cb, &task->dec); + if (!reg_ctx->fast_mode) { + if (reg_ctx->dec_cb) + mpp_callback(reg_ctx->dec_cb, &task->dec); } else { MppFrame mframe = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.output, + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.output, SLOT_FRAME_PTR, &mframe); if (mframe) { - reg_cxt->fast_mode_err_found = 1; + reg_ctx->fast_mode_err_found = 1; mpp_frame_set_errinfo(mframe, 1); } } } else { - if (reg_cxt->fast_mode && reg_cxt->fast_mode_err_found) { + if (reg_ctx->fast_mode && reg_ctx->fast_mode_err_found) { for (i = 0; i < (RK_S32)MPP_ARRAY_ELEMS(task->dec.refer); i++) { if (task->dec.refer[i] >= 0) { MppFrame frame_ref = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.refer[i], + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.refer[i], SLOT_FRAME_PTR, &frame_ref); h265h_dbg(H265H_DBG_FAST_ERR, "refer[%d] %d frame %p\n", i, task->dec.refer[i], frame_ref); if (frame_ref && mpp_frame_get_errinfo(frame_ref)) { MppFrame frame_out = NULL; - mpp_buf_slot_get_prop(reg_cxt->slots, task->dec.output, + mpp_buf_slot_get_prop(reg_ctx->slots, task->dec.output, SLOT_FRAME_PTR, &frame_out); mpp_frame_set_errinfo(frame_out, 1); break; @@ -1149,8 +1149,8 @@ ERR_PROC: p += 4; } - if (reg_cxt->fast_mode) { - reg_cxt->g_buf[index].use_flag = 0; + if (reg_ctx->fast_mode) { + reg_ctx->g_buf[index].use_flag = 0; } return ret;