Home | History | Annotate | Download | only in parser
      1 /*
      2  INTEL CONFIDENTIAL
      3  Copyright 2009 Intel Corporation All Rights Reserved.
      4  The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intels prior express written permission.
      5 
      6  No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
      7  */
      8 
      9 
     10 #include <glib.h>
     11 #include <dlfcn.h>
     12 
     13 #include "h264.h"
     14 #include "vbp_loader.h"
     15 #include "vbp_utils.h"
     16 #include "vbp_h264_parser.h"
     17 
     18 
     19 /* number of bytes used to encode length of NAL payload. Default is 4 bytes. */
     20 static int NAL_length_size = 4;
     21 
     22 /* default scaling list table */
     23 unsigned char Default_4x4_Intra[16] =
     24 {
     25     6,13,20,28,
     26     13,20,28,32,
     27     20,28,32,37,
     28     28,32,37,42
     29 };
     30 
     31 unsigned char Default_4x4_Inter[16] =
     32 {
     33     10,14,20,24,
     34     14,20,24,27,
     35     20,24,27,30,
     36     24,27,30,34
     37 };
     38 
     39 unsigned char Default_8x8_Intra[64] =
     40 {
     41     6,10,13,16,18,23,25,27,
     42     10,11,16,18,23,25,27,29,
     43     13,16,18,23,25,27,29,31,
     44     16,18,23,25,27,29,31,33,
     45     18,23,25,27,29,31,33,36,
     46     23,25,27,29,31,33,36,38,
     47     25,27,29,31,33,36,38,40,
     48     27,29,31,33,36,38,40,42
     49 };
     50 
     51 unsigned char Default_8x8_Inter[64] =
     52 {
     53     9,13,15,17,19,21,22,24,
     54     13,13,17,19,21,22,24,25,
     55     15,17,19,21,22,24,25,27,
     56     17,19,21,22,24,25,27,28,
     57     19,21,22,24,25,27,28,30,
     58     21,22,24,25,27,28,30,32,
     59     22,24,25,27,28,30,32,33,
     60     24,25,27,28,30,32,33,35
     61 };
     62 
     63 unsigned char quant_flat[16] =
     64 {
     65     16,16,16,16,
     66     16,16,16,16,
     67     16,16,16,16,
     68     16,16,16,16
     69 };
     70 
     71 unsigned char quant8_flat[64] =
     72 {
     73     16,16,16,16,16,16,16,16,
     74     16,16,16,16,16,16,16,16,
     75     16,16,16,16,16,16,16,16,
     76     16,16,16,16,16,16,16,16,
     77     16,16,16,16,16,16,16,16,
     78     16,16,16,16,16,16,16,16,
     79     16,16,16,16,16,16,16,16,
     80     16,16,16,16,16,16,16,16
     81 };
     82 
     83 unsigned char* UseDefaultList[8] =
     84 {
     85     Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra,
     86     Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter,
     87     Default_8x8_Intra,
     88     Default_8x8_Inter
     89 };
     90 
     91 /**
     92  *
     93  */
     94 uint32 vbp_init_parser_entries_h264(vbp_context *pcontext)
     95 {
     96  	if (NULL == pcontext->parser_ops)
     97 	{
     98 		return VBP_PARM;
     99 	}
    100 	pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init");
    101 	if (NULL == pcontext->parser_ops->init)
    102 	{
    103 		ETRACE ("Failed to set entry point." );
    104 		return VBP_LOAD;
    105 	}
    106 
    107 	pcontext->parser_ops->parse_sc = viddec_parse_sc;
    108 
    109 	pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse");
    110 	if (NULL == pcontext->parser_ops->parse_syntax)
    111 	{
    112 		ETRACE ("Failed to set entry point." );
    113 		return VBP_LOAD;
    114 	}
    115 
    116 	pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size");
    117 	if (NULL == pcontext->parser_ops->get_cxt_size)
    118 	{
    119 		ETRACE ("Failed to set entry point." );
    120 		return VBP_LOAD;
    121 	}
    122 
    123 	pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done");
    124 	if (NULL == pcontext->parser_ops->is_wkld_done)
    125 	{
    126 		ETRACE ("Failed to set entry point." );
    127 		return VBP_LOAD;
    128 	}
    129 
    130 	/* entry point not needed */
    131 	pcontext->parser_ops->is_frame_start = NULL;
    132 	return VBP_OK;
    133 }
    134 
    135 
    136 /**
    137  *
    138  */
    139 uint32 vbp_allocate_query_data_h264(vbp_context *pcontext)
    140 {
    141 	if (NULL != pcontext->query_data)
    142 	{
    143 		return VBP_PARM;
    144 	}
    145 
    146 	pcontext->query_data = NULL;
    147 	vbp_data_h264 *query_data = NULL;
    148 
    149 	query_data = g_try_new0(vbp_data_h264, 1);
    150 	if (NULL == query_data)
    151 	{
    152 		goto cleanup;
    153 	}
    154 
    155 	/* assign the pointer */
    156 	pcontext->query_data = (void *)query_data;
    157 
    158 	query_data->pic_data = g_try_new0(vbp_picture_data_h264, MAX_NUM_PICTURES);
    159 	if (NULL == query_data->pic_data)
    160 	{
    161 		goto cleanup;
    162 	}
    163 
    164 	int i;
    165 	for (i = 0; i < MAX_NUM_PICTURES; i++)
    166 	{
    167 		query_data->pic_data[i].pic_parms = g_try_new0(VAPictureParameterBufferH264, 1);
    168 		if (NULL == query_data->pic_data[i].pic_parms)
    169 		{
    170 			goto cleanup;
    171 		}
    172 		query_data->pic_data[i].num_slices = 0;
    173 		query_data->pic_data[i].slc_data = g_try_new0(vbp_slice_data_h264, MAX_NUM_SLICES);
    174 		if (NULL == query_data->pic_data[i].slc_data)
    175 		{
    176 			goto cleanup;
    177 		}
    178 	}
    179 
    180 
    181 	query_data->IQ_matrix_buf = g_try_new0(VAIQMatrixBufferH264, 1);
    182 	if (NULL == query_data->IQ_matrix_buf)
    183 	{
    184 		goto cleanup;
    185 	}
    186 
    187 	query_data->codec_data = g_try_new0(vbp_codec_data_h264, 1);
    188 	if (NULL == query_data->codec_data)
    189 	{
    190 		goto cleanup;
    191 	}
    192 
    193 	return VBP_OK;
    194 
    195 cleanup:
    196 	vbp_free_query_data_h264(pcontext);
    197 
    198 	return VBP_MEM;
    199 }
    200 
    201 uint32 vbp_free_query_data_h264(vbp_context *pcontext)
    202 {
    203 	if (NULL == pcontext->query_data)
    204 	{
    205 		return VBP_OK;
    206 	}
    207 
    208 	int i;
    209 	vbp_data_h264 *query_data;
    210 	query_data = (vbp_data_h264 *)pcontext->query_data;
    211 
    212 	if (query_data->pic_data)
    213 	{
    214 		for (i = 0; i < MAX_NUM_PICTURES; i++)
    215 		{
    216 			g_free(query_data->pic_data[i].slc_data);
    217 			g_free(query_data->pic_data[i].pic_parms);
    218 		}
    219 		g_free(query_data->pic_data);
    220 	}
    221 
    222 	g_free(query_data->IQ_matrix_buf);
    223 	g_free(query_data->codec_data);
    224 	g_free(query_data);
    225 
    226 	pcontext->query_data = NULL;
    227 
    228 	return VBP_OK;
    229 }
    230 
    231 
    232 static inline uint16_t vbp_utils_ntohs(uint8_t* p)
    233 {
    234 	uint16_t i = ((*p) << 8) + ((*(p+1)));
    235 	return i;
    236 }
    237 
    238 static inline uint32_t vbp_utils_ntohl(uint8_t* p)
    239 {
    240 	uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3)));
    241 	return i;
    242 }
    243 
    244 
    245 static inline void vbp_set_VAPicture_h264(
    246 	int curr_picture_structure,
    247 	int bottom_field,
    248 	frame_store* store,
    249 	VAPictureH264* pic)
    250 {
    251 	if (FRAME == curr_picture_structure)
    252 	{
    253 		if (FRAME != viddec_h264_get_dec_structure(store))
    254 		{
    255 			WTRACE("Reference picture structure is not frame for current frame picture!");
    256 		}
    257 		pic->flags = 0;
    258 		pic->TopFieldOrderCnt = store->frame.poc;
    259 		pic->BottomFieldOrderCnt = store->frame.poc;
    260 	}
    261 	else
    262 	{
    263 		if (FRAME == viddec_h264_get_dec_structure(store))
    264 		{
    265 			WTRACE("reference picture structure is frame for current field picture!");
    266 		}
    267 		if (bottom_field)
    268 		{
    269 			pic->flags = VA_PICTURE_H264_BOTTOM_FIELD;
    270 			pic->TopFieldOrderCnt = store->top_field.poc;
    271 			pic->BottomFieldOrderCnt = store->bottom_field.poc;
    272 		}
    273 		else
    274 		{
    275 			pic->flags = VA_PICTURE_H264_TOP_FIELD;
    276 			pic->TopFieldOrderCnt = store->top_field.poc;
    277 			pic->BottomFieldOrderCnt = store->bottom_field.poc;
    278 		}
    279 	}
    280 }
    281 
    282 static inline void vbp_set_slice_ref_list_h264(
    283 	struct h264_viddec_parser* h264_parser,
    284 	VASliceParameterBufferH264 *slc_parms)
    285 {
    286 	int i, j;
    287 	int num_ref_idx_active = 0;
    288 	h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
    289 	uint8_t* p_list = NULL;
    290 	VAPictureH264* refPicListX = NULL;
    291 	frame_store* fs = NULL;
    292 
    293 	/* initialize ref picutre list, set picture id and flags to invalid. */
    294 
    295 	for (i = 0; i < 2; i++)
    296 	{
    297 		refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
    298 		for (j = 0; j < 32; j++)
    299     	{
    300 	    	refPicListX->picture_id = VA_INVALID_SURFACE;
    301 			refPicListX->frame_idx = 0;
    302 	    	refPicListX->flags = VA_PICTURE_H264_INVALID;
    303 			refPicListX->TopFieldOrderCnt = 0;
    304 			refPicListX->BottomFieldOrderCnt = 0;
    305 			refPicListX++;
    306 		}
    307     }
    308 
    309 	for (i = 0; i < 2; i++)
    310 	{
    311 		refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
    312 
    313   		if ((i == 0) &&
    314 			((h264_PtypeB == slice_header->slice_type) ||
    315 			(h264_PtypeP == slice_header->slice_type)))
    316    		{
    317        		num_ref_idx_active = slice_header->num_ref_idx_l0_active;
    318        		if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag)
    319        		{
    320        			p_list = h264_parser->info.slice_ref_list0;
    321        		}
    322        		else
    323        		{
    324        			p_list = h264_parser->info.dpb.listX_0;
    325        		}
    326    		}
    327    		else if((i == 1) && (h264_PtypeB == slice_header->slice_type))
    328    		{
    329        		num_ref_idx_active = slice_header->num_ref_idx_l1_active;
    330        		if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag)
    331        		{
    332        			p_list = h264_parser->info.slice_ref_list1;
    333          	}
    334          	else
    335          	{
    336             	p_list = h264_parser->info.dpb.listX_1;
    337          	}
    338    		}
    339    		else
    340    		{
    341        	 	num_ref_idx_active = 0;
    342        		p_list = NULL;
    343       	}
    344 
    345 
    346 		for (j = 0; j < num_ref_idx_active; j++)
    347 		{
    348 			fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]);
    349 
    350 			/* bit 5 indicates if reference picture is bottom field */
    351 			vbp_set_VAPicture_h264(
    352 				h264_parser->info.img.structure,
    353 				(p_list[j] & 0x20) >> 5,
    354 				fs,
    355 				refPicListX);
    356 
    357 			refPicListX->frame_idx = fs->frame_num;
    358 			refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE;
    359 			refPicListX++;
    360 		}
    361 	}
    362 }
    363 
    364 static inline void vbp_set_pre_weight_table_h264(
    365 	struct h264_viddec_parser* h264_parser,
    366 	VASliceParameterBufferH264 *slc_parms)
    367 {
    368 	h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
    369 	int i, j;
    370 
    371 	if ((((h264_PtypeP == slice_header->slice_type) ||
    372     	(h264_PtypeB == slice_header->slice_type)) &&
    373     	h264_parser->info.active_PPS.weighted_pred_flag) ||
    374     	((h264_PtypeB == slice_header->slice_type) &&
    375     	(1 == h264_parser->info.active_PPS.weighted_bipred_idc)))
    376     {
    377      	slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom;
    378     	slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom;
    379    		slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag;
    380    		slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag;
    381 		slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag;
    382    		slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag;
    383 
    384    		for (i = 0; i < 32; i++)
    385    		{
    386    			slc_parms->luma_weight_l0[i] = 	slice_header->sh_predwttbl.luma_weight_l0[i];
    387    			slc_parms->luma_offset_l0[i] = 	slice_header->sh_predwttbl.luma_offset_l0[i];
    388    			slc_parms->luma_weight_l1[i] = 	slice_header->sh_predwttbl.luma_weight_l1[i];
    389    			slc_parms->luma_offset_l1[i] = 	slice_header->sh_predwttbl.luma_offset_l1[i];
    390 
    391    			for (j = 0; j < 2; j++)
    392    			{
    393 	   			slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j];
    394 	   			slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j];
    395 	   			slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j];
    396 	   			slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j];
    397    			}
    398    		}
    399    	}
    400    	else
    401    	{
    402 	   	/* default weight table */
    403     	slc_parms->luma_log2_weight_denom = 5;
    404     	slc_parms->chroma_log2_weight_denom = 5;
    405     	slc_parms->luma_weight_l0_flag = 0;
    406     	slc_parms->luma_weight_l1_flag = 0;
    407 		slc_parms->chroma_weight_l0_flag = 0;
    408    		slc_parms->chroma_weight_l1_flag = 0;
    409     	for (i = 0; i < 32; i++)
    410     	{
    411     		slc_parms->luma_weight_l0[i] = 0;
    412     		slc_parms->luma_offset_l0[i] = 0;
    413     		slc_parms->luma_weight_l1[i] = 0;
    414     		slc_parms->luma_offset_l1[i] = 0;
    415 
    416 			for (j = 0; j < 2; j++)
    417 			{
    418    				slc_parms->chroma_weight_l0[i][j] = 0;
    419     			slc_parms->chroma_offset_l0[i][j] = 0;
    420     			slc_parms->chroma_weight_l1[i][j] = 0;
    421 	 			slc_parms->chroma_offset_l1[i][j] = 0;
    422 			}
    423 		}
    424     }
    425 }
    426 
    427 
    428 static inline void vbp_set_reference_frames_h264(
    429 	struct h264_viddec_parser *parser,
    430 	VAPictureParameterBufferH264* pic_parms)
    431 {
    432 	int buffer_idx;
    433 	int frame_idx;
    434 	frame_store* store = NULL;
    435 	h264_DecodedPictureBuffer* dpb = &(parser->info.dpb);
    436 	/* initialize reference frames */
    437 	for (frame_idx = 0; frame_idx < 16; frame_idx++)
    438 	{
    439 		pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
    440 		pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
    441 		pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
    442 		pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
    443 		pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
    444 	}
    445 	pic_parms->num_ref_frames = 0;
    446 
    447 	frame_idx = 0;
    448 
    449 	/* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer);  */
    450 	/* set short term reference frames */
    451 	for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++)
    452 	{
    453 		if (frame_idx >= 16)
    454 		{
    455 			WTRACE("Frame index is out of bound.");
    456 			break;
    457 		}
    458 
    459 		store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]];
    460 		/* if (store->is_used == 3 && store->frame.used_for_reference == 3) */
    461 		if (viddec_h264_get_is_used(store))
    462 		{
    463 			pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num;
    464 			pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
    465 			if (FRAME == parser->info.img.structure)
    466 			{
    467 				pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
    468 				pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
    469 			}
    470 			else
    471 			{
    472 				pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
    473 				pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
    474 				if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
    475 				{
    476 					/* if both fields are used for reference, just set flag to be frame (0) */
    477 				}
    478 				else
    479 				{
    480 					if (store->top_field.used_for_reference)
    481 						pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
    482 					if (store->bottom_field.used_for_reference)
    483 						pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
    484 				}
    485 			}
    486 		}
    487 		frame_idx++;
    488 	}
    489 
    490 	/* set long term reference frames */
    491 	for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++)
    492 	{
    493 		if (frame_idx >= 16)
    494 		{
    495 			WTRACE("Frame index is out of bound.");
    496 			break;
    497 		}
    498 		store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]];
    499 		if (!viddec_h264_get_is_long_term(store))
    500 		{
    501 			WTRACE("long term frame is not marked as long term.");
    502 		}
    503 		/*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */
    504 		if (viddec_h264_get_is_used(store))
    505 		{
    506 			pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE;
    507 			if (FRAME == parser->info.img.structure)
    508 			{
    509 				pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
    510 				pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
    511 			}
    512 			else
    513 			{
    514 				pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
    515 				pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
    516 				if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
    517 				{
    518 					/* if both fields are used for reference, just set flag to be frame (0)*/
    519 				}
    520 				else
    521 				{
    522 					if (store->top_field.used_for_reference)
    523 						pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
    524 					if (store->bottom_field.used_for_reference)
    525 						pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
    526 				}
    527 			}
    528 		}
    529 		frame_idx++;
    530 	}
    531 
    532 	pic_parms->num_ref_frames = frame_idx;
    533 
    534 	if (frame_idx > parser->info.active_SPS.num_ref_frames)
    535 	{
    536 		WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).",
    537 			frame_idx, parser->info.active_SPS.num_ref_frames);
    538 	}
    539 }
    540 
    541 
    542 static inline void vbp_set_scaling_list_h264(
    543 	struct h264_viddec_parser *parser,
    544 	VAIQMatrixBufferH264* IQ_matrix_buf)
    545 {
    546   	int i;
    547   	if (parser->info.active_PPS.pic_scaling_matrix_present_flag)
    548   	{
    549 		for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++)
    550     	{
    551       		if (parser->info.active_PPS.pic_scaling_list_present_flag[i])
    552       		{
    553         		if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) ||
    554             		((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]))
    555 				{
    556   	  				/* use default scaling list */
    557 	  				if (i < 6)
    558           			{
    559 	    				memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
    560 	  				}
    561 	  				else
    562           			{
    563 	    				memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
    564 	  				}
    565 				}
    566 				else
    567 				{
    568 	  				/* use PPS list */
    569 	  				if (i < 6)
    570 	  				{
    571 	    				memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16);
    572 	  				}
    573 	  				else
    574 	  				{
    575 	    				memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64);
    576 	  				}
    577 				}
    578       		}
    579       		else /* pic_scaling_list not present */
    580       		{
    581 				if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
    582 				{
    583   	  				/* SPS matrix present - use fallback rule B */
    584 	  				switch (i)
    585 	  				{
    586 	    				case 0:
    587 	    				case 3:
    588 	      					memcpy(IQ_matrix_buf->ScalingList4x4[i],
    589 								parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i],
    590                 				16);
    591 	      				break;
    592 
    593 					    case 6:
    594 	    				case 7:
    595 	      					memcpy(IQ_matrix_buf->ScalingList8x8[i - 6],
    596 								parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i],
    597                 				64);
    598 	      				break;
    599 
    600 	    				case 1:
    601 	    				case 2:
    602 	    				case 4:
    603 	    				case 5:
    604 	      					memcpy(IQ_matrix_buf->ScalingList4x4[i],
    605 								IQ_matrix_buf->ScalingList4x4[i - 1],
    606                 				16);
    607 	      				break;
    608 
    609             			default:
    610 	      					g_warning("invalid scaling list index.");
    611               			break;
    612 	  				}
    613 				}
    614 				else /* seq_scaling_matrix not present */
    615 				{
    616 	  				/* SPS matrix not present - use fallback rule A */
    617 	  				switch (i)
    618 	  				{
    619 	    				case 0:
    620 	    				case 3:
    621 	      					memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
    622 	      				break;
    623 
    624 	    				case 6:
    625 	    				case 7:
    626 	      					memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
    627               			break;
    628 
    629 				   	    case 1:
    630 	    				case 2:
    631 	    				case 4:
    632 	    				case 5:
    633 	      					memcpy(IQ_matrix_buf->ScalingList4x4[i],
    634 								IQ_matrix_buf->ScalingList4x4[i - 1],
    635                 				16);
    636             			break;
    637 
    638 	    				default:
    639 	      					WTRACE("invalid scaling list index.");
    640               			break;
    641 	  				}
    642 				} /* end of seq_scaling_matrix not present */
    643       		} /* end of  pic_scaling_list not present */
    644     	} /* for loop for each index from 0 to 7 */
    645   	} /* end of pic_scaling_matrix present */
    646   	else
    647   	{
    648     	/* PPS matrix not present, use SPS information */
    649     	if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
    650     	{
    651       		for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++)
    652       		{
    653 				if (parser->info.active_SPS.seq_scaling_list_present_flag[i])
    654 				{
    655           			if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) ||
    656             			((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6]))
    657 	  				{
    658  	    				/* use default scaling list */
    659 	    				if (i < 6)
    660             			{
    661 	      					memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
    662 	    				}
    663 	    				else
    664             			{
    665 				      		memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
    666 					    }
    667 					}
    668 					else
    669 					{
    670   	    				/* use SPS list */
    671 	    				if (i < 6)
    672 	    				{
    673 	      					memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16);
    674 	    				}
    675 	    				else
    676 	    				{
    677 	      					memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64);
    678 	    				}
    679 	  				}
    680 				}
    681 				else
    682 				{
    683 	  				/* SPS list not present - use fallback rule A */
    684 	  				switch (i)
    685 	  				{
    686 	    				case 0:
    687 	    				case 3:
    688 	      					memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
    689               			break;
    690 
    691 	    				case 6:
    692 	    				case 7:
    693 	      					memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
    694               			break;
    695 
    696 	    				case 1:
    697 	    				case 2:
    698 	    				case 4:
    699 	    				case 5:
    700 	      					memcpy(IQ_matrix_buf->ScalingList4x4[i],
    701 								IQ_matrix_buf->ScalingList4x4[i - 1],
    702                 				16);
    703               			break;
    704 
    705   	    				default:
    706 	      					WTRACE("invalid scaling list index.");
    707               			break;
    708 	  				}
    709 				}
    710       		}
    711     	}
    712     	else
    713     	{
    714       		/* SPS matrix not present - use flat lists */
    715       		for (i = 0; i < 6; i++)
    716       		{
    717 				memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16);
    718       		}
    719       		for (i = 0; i < 2; i++)
    720       		{
    721  				memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
    722       		}
    723     	}
    724   	}
    725 
    726   	if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) &&
    727   		(parser->info.active_PPS.pic_scaling_matrix_present_flag ||
    728   		parser->info.active_SPS.seq_scaling_matrix_present_flag))
    729   	{
    730      	for (i = 0; i < 2; i++)
    731       	{
    732  			memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
    733       	}
    734   	}
    735 }
    736 
    737 static void vbp_set_codec_data_h264(
    738 	struct h264_viddec_parser *parser,
    739 	vbp_codec_data_h264* codec_data)
    740 {
    741 	/* parameter id */
    742 	codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id;
    743 	codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id;
    744 
    745 	/* profile and level */
    746 	codec_data->profile_idc = parser->info.active_SPS.profile_idc;
    747 	codec_data->level_idc = parser->info.active_SPS.level_idc;
    748 
    749 
    750 	 codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2;
    751 
    752 
    753 	/* reference frames */
    754 	codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames;
    755 
    756 	if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag &&
    757 		!parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag)
    758 	{
    759 		/* no longer necessary: two fields share the same interlaced surface */
    760 		/* codec_data->num_ref_frames *= 2; */
    761 	}
    762 
    763 	codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
    764 
    765 	/* frame coding */
    766 	codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
    767 	codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
    768 
    769  	/* frame dimension */
    770 	codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16;
    771 
    772 	codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
    773 			(parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16;
    774 
    775 	/* frame cropping */
    776 	codec_data->frame_cropping_flag =
    777 		parser->info.active_SPS.sps_disp.frame_cropping_flag;
    778 
    779 	codec_data->frame_crop_rect_left_offset =
    780 		parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset;
    781 
    782 	codec_data->frame_crop_rect_right_offset =
    783 		parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset;
    784 
    785 	codec_data->frame_crop_rect_top_offset =
    786 		parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset;
    787 
    788 	codec_data->frame_crop_rect_bottom_offset =
    789 		parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset;
    790 
    791 	/* aspect ratio	  */
    792 	codec_data->aspect_ratio_info_present_flag =
    793 		parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag;
    794 
    795 	codec_data->aspect_ratio_idc =
    796 		parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
    797 
    798 	codec_data->sar_width =
    799 		parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
    800 
    801 	codec_data->sar_height =
    802 		parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
    803 
    804 	 /* video format */
    805 	 codec_data->video_format =
    806 		parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;
    807 
    808 	codec_data->video_format =
    809 		parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag;
    810 }
    811 
    812 
    813 static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index)
    814 {
    815 	viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
    816 
    817 	vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
    818 	struct h264_viddec_parser* parser = NULL;
    819 	vbp_picture_data_h264* pic_data = NULL;
    820   	VAPictureParameterBufferH264* pic_parms = NULL;
    821 
    822 	parser = (struct h264_viddec_parser *)cxt->codec_data;
    823 
    824 	if (0 == parser->info.SliceHeader.first_mb_in_slice)
    825 	{
    826 		/* a new picture is parsed */
    827 		query_data->num_pictures++;
    828 	}
    829 
    830 	if (query_data->num_pictures > MAX_NUM_PICTURES)
    831 	{
    832 		ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES);
    833 		return VBP_DATA;
    834 	}
    835 
    836 	int pic_data_index = query_data->num_pictures - 1;
    837 	if (pic_data_index < 0)
    838 	{
    839 		WTRACE("MB address does not start from 0!");
    840 		return VBP_DATA;
    841 	}
    842 
    843 	pic_data = &(query_data->pic_data[pic_data_index]);
    844 	pic_parms = pic_data->pic_parms;
    845 
    846 	if (parser->info.SliceHeader.first_mb_in_slice == 0)
    847 	{
    848 		/**
    849 		* picture parameter only needs to be set once,
    850 		* even multiple slices may be encoded
    851 		*/
    852 
    853 	  	/* VAPictureParameterBufferH264 */
    854 	  	pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE;
    855  		pic_parms->CurrPic.frame_idx = 0;
    856 	  	if (parser->info.img.field_pic_flag == 1)
    857 	  	{
    858 	    	if (parser->info.img.bottom_field_flag)
    859 			{
    860 				pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD;
    861 			}
    862 	    	else
    863 			{
    864 				/* also OK set to 0 (from test suite) */
    865 				pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD;
    866 			}
    867 	  	}
    868 		else
    869 		{
    870 			pic_parms->CurrPic.flags = 0; /* frame picture */
    871 		}
    872 	  	pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc;
    873 	  	pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc;
    874 	  	pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num;
    875 
    876 	  	/* don't care if current frame is used as long term reference */
    877 	  	if (parser->info.SliceHeader.nal_ref_idc != 0)
    878 	  	{
    879 	    	pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
    880 	  	}
    881 
    882 	  	pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1;
    883 
    884 	  	/* frame height in MBS */
    885 	  	pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
    886 			(parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1;
    887 
    888 	  	pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8;
    889 	  	pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8;
    890 
    891 
    892 		pic_parms->seq_fields.value = 0;
    893 	  	pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc;
    894 	  	pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag;
    895 	  	pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
    896 	  	pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
    897 	  	pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag;
    898 
    899 		/* new fields in libva 0.31 */
    900 		pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
    901         pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4;
    902         pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
    903         pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
    904         pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag;
    905 
    906 
    907 	  	/* referened from UMG_Moorstown_TestSuites */
    908 	  	pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0;
    909 
    910 	  	pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1;
    911 	  	pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type;
    912 		pic_parms->slice_group_change_rate_minus1 = 0;
    913 	  	pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26;
    914 		pic_parms->pic_init_qs_minus26 = 0;
    915 	  	pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset;
    916 	  	pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset;
    917 
    918 		pic_parms->pic_fields.value = 0;
    919 	  	pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag;
    920 	  	pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag;
    921 	  	pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc;
    922 	  	pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag;
    923 
    924 		/* new LibVA fields in v0.31*/
    925 		pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag;
    926 		pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag;
    927         pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag;
    928         pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0;
    929 
    930 	  	/* all slices in the pciture have the same field_pic_flag */
    931 	  	pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag;
    932 	  	pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag;
    933 
    934 	  	pic_parms->frame_num = parser->info.SliceHeader.frame_num;
    935 	}
    936 
    937 
    938 	/* set reference frames, and num_ref_frames */
    939   	vbp_set_reference_frames_h264(parser, pic_parms);
    940 	if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
    941 	{
    942 		/* num of reference frame is 0 if current picture is IDR */
    943 		pic_parms->num_ref_frames = 0;
    944 	}
    945 	else
    946 	{
    947 		/* actual num_ref_frames is set in vbp_set_reference_frames_h264 */
    948 	}
    949 
    950 	return VBP_OK;
    951 }
    952 
    953 #if 0
    954 static inline void vbp_update_reference_frames_h264_methodA(vbp_picture_data_h264* pic_data)
    955 {
    956   	VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms;
    957 
    958 	char is_used[16];
    959 	memset(is_used, 0, sizeof(is_used));
    960 
    961 	int ref_list;
    962 	int slice_index;
    963 	int i, j;
    964 	VAPictureH264* pRefList = NULL;
    965 
    966 	for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++)
    967 	{
    968 		VASliceParameterBufferH264* slice_parms =
    969 			 &(pic_data->slc_data[slice_index].slc_parms);
    970 
    971 		for (ref_list = 0; ref_list < 2; ref_list++)
    972 		{
    973 			if (0 == ref_list)
    974 				pRefList = slice_parms->RefPicList0;
    975 			else
    976 				pRefList = slice_parms->RefPicList1;
    977 
    978 			for (i = 0; i < 32; i++, pRefList++)
    979 			{
    980 				if (VA_PICTURE_H264_INVALID == pRefList->flags)
    981 					break;
    982 
    983 				for (j = 0; j < 16; j++)
    984 				{
    985 					if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt ==
    986 						pRefList->TopFieldOrderCnt)
    987 					{
    988 						is_used[j] = 1;
    989 						break;
    990 					}
    991 				}
    992 			}
    993 		}
    994 	}
    995 
    996 	int frame_idx = 0;
    997   	VAPictureH264* pRefFrame = pic_parms->ReferenceFrames;
    998   	for (i = 0; i < 16; i++)
    999 	{
   1000 		if (is_used[i])
   1001 		{
   1002 			memcpy(pRefFrame,
   1003 				&(pic_parms->ReferenceFrames[i]),
   1004 				sizeof(VAPictureH264));
   1005 
   1006 			pRefFrame++;
   1007 			frame_idx++;
   1008 		}
   1009 	}
   1010 	pic_parms->num_ref_frames = frame_idx;
   1011 
   1012 	for (; frame_idx < 16; frame_idx++)
   1013 	{
   1014 		pRefFrame->picture_id = VA_INVALID_SURFACE;
   1015 		pRefFrame->frame_idx = -1;
   1016 		pRefFrame->flags = VA_PICTURE_H264_INVALID;
   1017 		pRefFrame->TopFieldOrderCnt = -1;
   1018 		pRefFrame->BottomFieldOrderCnt = -1;
   1019 		pRefFrame++;
   1020 	}
   1021 }
   1022 #endif
   1023 
   1024 #if 0
   1025 static inline void vbp_update_reference_frames_h264_methodB(vbp_picture_data_h264* pic_data)
   1026 {
   1027   	VAPictureParameterBufferH264* pic_parms = pic_data->pic_parms;
   1028 	int i;
   1029   	VAPictureH264* pRefFrame = pic_parms->ReferenceFrames;
   1030   	for (i = 0; i < 16; i++)
   1031 	{
   1032 		pRefFrame->picture_id = VA_INVALID_SURFACE;
   1033 		pRefFrame->frame_idx = -1;
   1034 		pRefFrame->flags = VA_PICTURE_H264_INVALID;
   1035 		pRefFrame->TopFieldOrderCnt = -1;
   1036 		pRefFrame->BottomFieldOrderCnt = -1;
   1037 		pRefFrame++;
   1038 	}
   1039 
   1040 	pic_parms->num_ref_frames = 0;
   1041 
   1042 
   1043 	int ref_list;
   1044 	int slice_index;
   1045 	int j;
   1046 	VAPictureH264* pRefList = NULL;
   1047 
   1048 	for (slice_index = 0; slice_index < pic_data->num_slices; slice_index++)
   1049 	{
   1050 		VASliceParameterBufferH264* slice_parms =
   1051 			 &(pic_data->slc_data[slice_index].slc_parms);
   1052 
   1053 		for (ref_list = 0; ref_list < 2; ref_list++)
   1054 		{
   1055 			if (0 == ref_list)
   1056 				pRefList = slice_parms->RefPicList0;
   1057 			else
   1058 				pRefList = slice_parms->RefPicList1;
   1059 
   1060 			for (i = 0; i < 32; i++, pRefList++)
   1061 			{
   1062 				if (VA_PICTURE_H264_INVALID == pRefList->flags)
   1063 					break;
   1064 
   1065 				for (j = 0; j < 16; j++)
   1066 				{
   1067 					if (pic_parms->ReferenceFrames[j].TopFieldOrderCnt ==
   1068 						pRefList->TopFieldOrderCnt)
   1069 					{
   1070 						pic_parms->ReferenceFrames[j].flags |=
   1071 							pRefList->flags;
   1072 
   1073 						if ((pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_TOP_FIELD) &&
   1074 							(pic_parms->ReferenceFrames[j].flags & VA_PICTURE_H264_BOTTOM_FIELD))
   1075 						{
   1076 							pic_parms->ReferenceFrames[j].flags = 0;
   1077 						}
   1078 						break;
   1079 					}
   1080 				}
   1081 				if (j == 16)
   1082 				{
   1083 					memcpy(&(pic_parms->ReferenceFrames[pic_parms->num_ref_frames++]),
   1084 						pRefList,
   1085 						sizeof(VAPictureH264));
   1086 				}
   1087 
   1088 			}
   1089 		}
   1090 	}
   1091 }
   1092 #endif
   1093 
   1094 
   1095 static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index)
   1096 {
   1097   	viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
   1098   	uint32 bit, byte;
   1099   	uint8 is_emul;
   1100 
   1101 	vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
   1102 	VASliceParameterBufferH264 *slc_parms = NULL;
   1103 	vbp_slice_data_h264 *slc_data = NULL;
   1104 	struct h264_viddec_parser* h264_parser = NULL;
   1105 	h264_Slice_Header_t* slice_header = NULL;
   1106 	vbp_picture_data_h264* pic_data = NULL;
   1107 
   1108 
   1109 	h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
   1110 	int pic_data_index = query_data->num_pictures - 1;
   1111 	if (pic_data_index < 0)
   1112 	{
   1113 		ETRACE("invalid picture data index.");
   1114 		return VBP_DATA;
   1115 	}
   1116 
   1117 	pic_data = &(query_data->pic_data[pic_data_index]);
   1118 
   1119 	slc_data = &(pic_data->slc_data[pic_data->num_slices]);
   1120 	slc_data->buffer_addr = cxt->parse_cubby.buf;
   1121 	slc_parms = &(slc_data->slc_parms);
   1122 
   1123 	/* byte: how many bytes have been parsed */
   1124 	/* bit: bits parsed within the current parsing position */
   1125 	viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
   1126 
   1127 
   1128 #if 0
   1129 	/* add 4 bytes of start code prefix */
   1130 	slc_parms->slice_data_size = slc_data->slice_size =
   1131           pcontext->parser_cxt->list.data[index].edpos -
   1132           pcontext->parser_cxt->list.data[index].stpos + 4;
   1133 
   1134 	slc_data->slice_offset = pcontext->parser_cxt->list.data[index].stpos - 4;
   1135 
   1136 	/* overwrite the "length" bytes to start code (0x00000001) */
   1137 	*(slc_data->buffer_addr + slc_data->slice_offset) = 0;
   1138 	*(slc_data->buffer_addr + slc_data->slice_offset + 1) = 0;
   1139 	*(slc_data->buffer_addr + slc_data->slice_offset + 2) = 0;
   1140 	*(slc_data->buffer_addr + slc_data->slice_offset + 3) = 1;
   1141 
   1142 
   1143 	/* the offset to the NAL start code for this slice */
   1144 	slc_parms->slice_data_offset = 0;
   1145 
   1146 	/* whole slice is in this buffer */
   1147 	slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
   1148 
   1149 	/* bit offset from NAL start code to the beginning of slice data */
   1150 	/* slc_parms->slice_data_bit_offset = bit;*/
   1151 	slc_parms->slice_data_bit_offset = (byte + 4)* 8 + bit;
   1152 
   1153 #else
   1154 	slc_parms->slice_data_size = slc_data->slice_size =
   1155           pcontext->parser_cxt->list.data[index].edpos -
   1156           pcontext->parser_cxt->list.data[index].stpos;
   1157 
   1158 	/* the offset to the NAL start code for this slice */
   1159 	slc_data->slice_offset = cxt->list.data[index].stpos;
   1160 	slc_parms->slice_data_offset = 0;
   1161 
   1162 	/* whole slice is in this buffer */
   1163 	slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
   1164 
   1165 	/* bit offset from NAL start code to the beginning of slice data */
   1166 	slc_parms->slice_data_bit_offset = bit + byte * 8;
   1167 #endif
   1168 
   1169 	if (is_emul)
   1170 	{
   1171 		WTRACE("next byte is emulation prevention byte.");
   1172 		/*slc_parms->slice_data_bit_offset += 8; */
   1173 	}
   1174 
   1175 	if (cxt->getbits.emulation_byte_counter != 0)
   1176    	{
   1177    		slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8;
   1178    	}
   1179 
   1180 	slice_header = &(h264_parser->info.SliceHeader);
   1181 	slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
   1182 
   1183 	if(h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
   1184 		(!(h264_parser->info.SliceHeader.field_pic_flag)))
   1185 	{
   1186 			slc_parms->first_mb_in_slice /= 2;
   1187 	}
   1188 
   1189 	slc_parms->slice_type = slice_header->slice_type;
   1190 
   1191 	slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag;
   1192 
   1193 	slc_parms->num_ref_idx_l0_active_minus1 = 0;
   1194 	slc_parms->num_ref_idx_l1_active_minus1 = 0;
   1195 	if (slice_header->slice_type == h264_PtypeI)
   1196 	{
   1197 	}
   1198 	else if (slice_header->slice_type == h264_PtypeP)
   1199 	{
   1200 		slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
   1201 	}
   1202 	else if (slice_header->slice_type == h264_PtypeB)
   1203 	{
   1204 		slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
   1205 		slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1;
   1206 	}
   1207 	else
   1208 	{
   1209 		WTRACE("slice type %d is not supported.", slice_header->slice_type);
   1210 	}
   1211 
   1212 	slc_parms->cabac_init_idc = slice_header->cabac_init_idc;
   1213 	slc_parms->slice_qp_delta = slice_header->slice_qp_delta;
   1214 	slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc;
   1215 	slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2;
   1216 	slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2;
   1217 
   1218 
   1219 	vbp_set_pre_weight_table_h264(h264_parser, slc_parms);
   1220 	vbp_set_slice_ref_list_h264(h264_parser, slc_parms);
   1221 
   1222 
   1223 	pic_data->num_slices++;
   1224 
   1225 	//vbp_update_reference_frames_h264_methodB(pic_data);
   1226 	if (pic_data->num_slices > MAX_NUM_SLICES)
   1227 	{
   1228 		ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
   1229 		return VBP_DATA;
   1230 	}
   1231 	return VBP_OK;
   1232 }
   1233 
   1234 /**
   1235 * parse decoder configuration data
   1236 */
   1237 uint32 vbp_parse_init_data_h264(vbp_context* pcontext)
   1238 {
   1239 	/* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */
   1240 
   1241   	uint8 configuration_version = 0;
   1242 	uint8 AVC_profile_indication = 0;
   1243   	uint8 profile_compatibility = 0;
   1244  	uint8 AVC_level_indication = 0;
   1245   	uint8 length_size_minus_one = 0;
   1246   	uint8 num_of_sequence_parameter_sets = 0;
   1247   	uint8 num_of_picture_parameter_sets = 0;
   1248   	uint16 sequence_parameter_set_length = 0;
   1249   	uint16 picture_parameter_set_length = 0;
   1250 
   1251   	int i = 0;
   1252 	viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
   1253 	uint8* cur_data = cxt->parse_cubby.buf;
   1254 
   1255 
   1256 	if (cxt->parse_cubby.size < 6)
   1257 	{
   1258 		/* need at least 6 bytes to start parsing the structure, see spec 15 */
   1259 		return VBP_DATA;
   1260 	}
   1261 
   1262   	configuration_version = *cur_data++;
   1263   	AVC_profile_indication = *cur_data++;
   1264 
   1265 	/*ITRACE("Profile indication: %d", AVC_profile_indication); */
   1266 
   1267   	profile_compatibility = *cur_data++;
   1268   	AVC_level_indication = *cur_data++;
   1269 
   1270 	/* ITRACE("Level indication: %d", AVC_level_indication);*/
   1271   	/* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */
   1272   	length_size_minus_one = (*cur_data) & 0x3;
   1273 
   1274 	if (length_size_minus_one != 3)
   1275 	{
   1276 		WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1);
   1277 	}
   1278 
   1279 	NAL_length_size = length_size_minus_one + 1;
   1280 
   1281   	cur_data++;
   1282 
   1283   	/* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */
   1284   	num_of_sequence_parameter_sets = (*cur_data) & 0x1f;
   1285 	if (num_of_sequence_parameter_sets > 1)
   1286 	{
   1287 		WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets);
   1288 	}
   1289 	if (num_of_sequence_parameter_sets > MAX_NUM_SPS)
   1290 	{
   1291 		/* this would never happen as MAX_NUM_SPS = 32 */
   1292 		WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS);
   1293 	}
   1294   	cur_data++;
   1295 
   1296   	cxt->list.num_items = 0;
   1297   	for (i = 0; i < num_of_sequence_parameter_sets; i++)
   1298   	{
   1299 		if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
   1300 		{
   1301 			/* need at least 2 bytes to parse sequence_parameter_set_length */
   1302 			return VBP_DATA;
   1303 		}
   1304 
   1305   		/* 16 bits */
   1306   		sequence_parameter_set_length = vbp_utils_ntohs(cur_data);
   1307 
   1308 
   1309   		cur_data += 2;
   1310 
   1311 		if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size)
   1312 		{
   1313 			/* need at least sequence_parameter_set_length bytes for SPS */
   1314 			return VBP_DATA;
   1315 		}
   1316 
   1317 		cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
   1318 
   1319     	/* end pos is exclusive */
   1320     	cxt->list.data[cxt->list.num_items].edpos =
   1321     		cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length;
   1322 
   1323     	cxt->list.num_items++;
   1324 
   1325   		cur_data += sequence_parameter_set_length;
   1326   	}
   1327 
   1328 	if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size)
   1329 	{
   1330 		/* need at least one more byte to parse num_of_picture_parameter_sets */
   1331 		return VBP_DATA;
   1332 	}
   1333 
   1334   	num_of_picture_parameter_sets = *cur_data++;
   1335 	if (num_of_picture_parameter_sets > 1)
   1336 	{
   1337 		/* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */
   1338 	}
   1339 
   1340   	for (i = 0; i < num_of_picture_parameter_sets; i++)
   1341   	{
   1342 		if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
   1343 		{
   1344 			/* need at least 2 bytes to parse picture_parameter_set_length */
   1345 			return VBP_DATA;
   1346 		}
   1347 
   1348 		/* 16 bits */
   1349   		picture_parameter_set_length = vbp_utils_ntohs(cur_data);
   1350 
   1351   		cur_data += 2;
   1352 
   1353 		if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size)
   1354 		{
   1355 			/* need at least picture_parameter_set_length bytes for PPS */
   1356 			return VBP_DATA;
   1357 		}
   1358 
   1359     	cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
   1360 
   1361     	/* end pos is exclusive */
   1362     	cxt->list.data[cxt->list.num_items].edpos =
   1363     		cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length;
   1364 
   1365     	cxt->list.num_items++;
   1366 
   1367   		cur_data += picture_parameter_set_length;
   1368   	}
   1369 
   1370   	if ((cur_data - cxt->parse_cubby.buf) !=  cxt->parse_cubby.size)
   1371   	{
   1372   		WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.",
   1373   			cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf));
   1374   	}
   1375 
   1376  	return VBP_OK;
   1377 }
   1378 
   1379 static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p)
   1380 {
   1381 	switch (NAL_length_size)
   1382 	{
   1383 		case 4:
   1384 			return vbp_utils_ntohl(p);
   1385 
   1386 		case 3:
   1387 		{
   1388 			uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2)));
   1389 			return i;
   1390 		}
   1391 
   1392 		case 2:
   1393 			return vbp_utils_ntohs(p);
   1394 
   1395 		case 1:
   1396 			return *p;
   1397 
   1398 		default:
   1399 			WTRACE("invalid NAL_length_size: %d.", NAL_length_size);
   1400 			/* default to 4 bytes for length */
   1401 			NAL_length_size = 4;
   1402 			return vbp_utils_ntohl(p);
   1403 	}
   1404 }
   1405 
   1406 /**
   1407 ** H.264 elementary stream does not have start code.
   1408 * instead, it is comprised of size of NAL unit and payload
   1409 * of NAL unit. See spec 15 (Sample format)
   1410 */
   1411 uint32 vbp_parse_start_code_h264(vbp_context *pcontext)
   1412 {
   1413 	viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
   1414   	int32_t size_left = 0;
   1415   	int32_t size_parsed = 0;
   1416   	int32_t NAL_length = 0;
   1417   	viddec_sc_parse_cubby_cxt_t* cubby = NULL;
   1418 
   1419 	/* reset query data for the new sample buffer */
   1420 	vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
   1421 	int i;
   1422 
   1423 	for (i = 0; i < MAX_NUM_PICTURES; i++)
   1424 	{
   1425 		query_data->pic_data[i].num_slices = 0;
   1426 	}
   1427 	query_data->num_pictures = 0;
   1428 
   1429 
   1430   	cubby = &(cxt->parse_cubby);
   1431 
   1432   	cxt->list.num_items = 0;
   1433 
   1434 	/* start code emulation prevention byte is present in NAL */
   1435 	cxt->getbits.is_emul_reqd = 1;
   1436 
   1437   	size_left = cubby->size;
   1438 
   1439   	while (size_left >= NAL_length_size)
   1440   	{
   1441     	NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed);
   1442 
   1443     	size_parsed += NAL_length_size;
   1444     	cxt->list.data[cxt->list.num_items].stpos = size_parsed;
   1445     	size_parsed += NAL_length; /* skip NAL bytes */
   1446     	/* end position is exclusive */
   1447     	cxt->list.data[cxt->list.num_items].edpos = size_parsed;
   1448     	cxt->list.num_items++;
   1449     	if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
   1450       	{
   1451       		ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
   1452       		break;
   1453       	}
   1454 
   1455     	size_left = cubby->size - size_parsed;
   1456    	}
   1457 
   1458   	if (size_left != 0)
   1459   	{
   1460     	WTRACE("Elementary stream is not aligned (%d).", size_left);
   1461   	}
   1462   	return VBP_OK;
   1463 }
   1464 
   1465 /**
   1466 *
   1467 * process parsing result after a NAL unit is parsed
   1468 *
   1469 */
   1470 uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i)
   1471 {
   1472 	if (i >= MAX_NUM_SLICES)
   1473 	{
   1474 		return VBP_PARM;
   1475 	}
   1476 
   1477 	uint32 error = VBP_OK;
   1478 
   1479   	struct h264_viddec_parser* parser = NULL;
   1480 	parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]);
   1481 	switch (parser->info.nal_unit_type)
   1482     {
   1483 		case h264_NAL_UNIT_TYPE_SLICE:
   1484        	/* ITRACE("slice header is parsed."); */
   1485        	error = vbp_add_pic_data_h264(pcontext, i);
   1486        	if (VBP_OK == error)
   1487        	{
   1488        		error = vbp_add_slice_data_h264(pcontext, i);
   1489        	}
   1490        	break;
   1491 
   1492        	case  h264_NAL_UNIT_TYPE_IDR:
   1493        	/* ITRACE("IDR header is parsed."); */
   1494        	error = vbp_add_pic_data_h264(pcontext, i);
   1495        	if (VBP_OK == error)
   1496        	{
   1497        		error = vbp_add_slice_data_h264(pcontext, i);
   1498        	}
   1499        	break;
   1500 
   1501        	case h264_NAL_UNIT_TYPE_SEI:
   1502 		/* ITRACE("SEI header is parsed."); */
   1503        	break;
   1504 
   1505      	case h264_NAL_UNIT_TYPE_SPS:
   1506  		/*ITRACE("SPS header is parsed."); */
   1507  		break;
   1508 
   1509        	case h264_NAL_UNIT_TYPE_PPS:
   1510        	/* ITRACE("PPS header is parsed."); */
   1511        	break;
   1512 
   1513       	case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
   1514        	/* ITRACE("ACC unit delimiter is parsed."); */
   1515        	break;
   1516 
   1517       	case h264_NAL_UNIT_TYPE_EOSeq:
   1518        	/* ITRACE("EOSeq is parsed."); */
   1519       	break;
   1520 
   1521      	case h264_NAL_UNIT_TYPE_EOstream:
   1522       	/* ITRACE("EOStream is parsed."); */
   1523        	break;
   1524 
   1525      	default:
   1526 	     WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type);
   1527        	break;
   1528 	}
   1529 	return error;
   1530 }
   1531 
   1532 /*
   1533 *
   1534 * fill query data structure after sample buffer is parsed
   1535 *
   1536 */
   1537 uint32 vbp_populate_query_data_h264(vbp_context *pcontext)
   1538 {
   1539   	vbp_data_h264 *query_data = NULL;
   1540   	struct h264_viddec_parser *parser = NULL;
   1541 
   1542    	parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data;
   1543   	query_data = (vbp_data_h264 *)pcontext->query_data;
   1544 
   1545   	vbp_set_codec_data_h264(parser, query_data->codec_data);
   1546 
   1547   	/* buffer number */
   1548   	query_data->buf_number = buffer_counter;
   1549 
   1550   	/* VQIAMatrixBufferH264 */
   1551   	vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf);
   1552 
   1553 	if (query_data->num_pictures > 0)
   1554 	{
   1555 		/*
   1556 		* picture parameter buffer and slice parameter buffer have been populated
   1557 		*/
   1558 	}
   1559 	else
   1560 	{
   1561 		/**
   1562 		* add a dummy picture that contains picture parameters parsed
   1563 		  from SPS and PPS.
   1564 		*/
   1565 		vbp_add_pic_data_h264(pcontext, 0);
   1566 	}
   1567   	return VBP_OK;
   1568 }
   1569