1 /* 2 * Copyright (c) 2017, Alliance for Open Media. All rights reserved 3 * 4 * This source code is subject to the terms of the BSD 2 Clause License and 5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License 6 * was not distributed with this source code in the LICENSE file, you can 7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open 8 * Media Patent License 1.0 was not distributed with this source code in the 9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent. 10 */ 11 12 #include <assert.h> 13 14 #include "config/aom_config.h" 15 #include "config/aom_scale_rtcd.h" 16 17 #include "aom/aom_codec.h" 18 #include "aom_dsp/bitreader_buffer.h" 19 #include "aom_ports/mem_ops.h" 20 21 #include "av1/common/common.h" 22 #include "av1/common/obu_util.h" 23 #include "av1/common/timing.h" 24 #include "av1/decoder/decoder.h" 25 #include "av1/decoder/decodeframe.h" 26 #include "av1/decoder/obu.h" 27 28 // Picture prediction structures (0-12 are predefined) in scalability metadata. 29 enum { 30 SCALABILITY_L1T2 = 0, 31 SCALABILITY_L1T3 = 1, 32 SCALABILITY_L2T1 = 2, 33 SCALABILITY_L2T2 = 3, 34 SCALABILITY_L2T3 = 4, 35 SCALABILITY_S2T1 = 5, 36 SCALABILITY_S2T2 = 6, 37 SCALABILITY_S2T3 = 7, 38 SCALABILITY_L2T1h = 8, 39 SCALABILITY_L2T2h = 9, 40 SCALABILITY_L2T3h = 10, 41 SCALABILITY_S2T1h = 11, 42 SCALABILITY_S2T2h = 12, 43 SCALABILITY_S2T3h = 13, 44 SCALABILITY_SS = 14 45 } UENUM1BYTE(SCALABILITY_STRUCTURES); 46 47 aom_codec_err_t aom_get_num_layers_from_operating_point_idc( 48 int operating_point_idc, unsigned int *number_spatial_layers, 49 unsigned int *number_temporal_layers) { 50 // derive number of spatial/temporal layers from operating_point_idc 51 52 if (!number_spatial_layers || !number_temporal_layers) 53 return AOM_CODEC_INVALID_PARAM; 54 55 if (operating_point_idc == 0) { 56 *number_temporal_layers = 1; 57 *number_spatial_layers = 1; 58 } else { 59 *number_spatial_layers = 0; 60 *number_temporal_layers = 0; 61 for (int j = 0; j < MAX_NUM_SPATIAL_LAYERS; j++) { 62 *number_spatial_layers += 63 (operating_point_idc >> (j + MAX_NUM_TEMPORAL_LAYERS)) & 0x1; 64 } 65 for (int j = 0; j < MAX_NUM_TEMPORAL_LAYERS; j++) { 66 *number_temporal_layers += (operating_point_idc >> j) & 0x1; 67 } 68 } 69 70 return AOM_CODEC_OK; 71 } 72 73 static int is_obu_in_current_operating_point(AV1Decoder *pbi, 74 ObuHeader obu_header) { 75 if (!pbi->current_operating_point) { 76 return 1; 77 } 78 79 if ((pbi->current_operating_point >> obu_header.temporal_layer_id) & 0x1 && 80 (pbi->current_operating_point >> (obu_header.spatial_layer_id + 8)) & 81 0x1) { 82 return 1; 83 } 84 return 0; 85 } 86 87 static int byte_alignment(AV1_COMMON *const cm, 88 struct aom_read_bit_buffer *const rb) { 89 while (rb->bit_offset & 7) { 90 if (aom_rb_read_bit(rb)) { 91 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 92 return -1; 93 } 94 } 95 return 0; 96 } 97 98 static uint32_t read_temporal_delimiter_obu() { return 0; } 99 100 // Returns a boolean that indicates success. 101 static int read_bitstream_level(AV1_LEVEL *seq_level_idx, 102 struct aom_read_bit_buffer *rb) { 103 *seq_level_idx = aom_rb_read_literal(rb, LEVEL_BITS); 104 if (!is_valid_seq_level_idx(*seq_level_idx)) return 0; 105 return 1; 106 } 107 108 // Returns whether two sequence headers are consistent with each other. 109 // TODO(huisu,wtc (at) google.com): make sure the code matches the spec exactly. 110 static int are_seq_headers_consistent(const SequenceHeader *seq_params_old, 111 const SequenceHeader *seq_params_new) { 112 return !memcmp(seq_params_old, seq_params_new, sizeof(SequenceHeader)); 113 } 114 115 // On success, sets pbi->sequence_header_ready to 1 and returns the number of 116 // bytes read from 'rb'. 117 // On failure, sets pbi->common.error.error_code and returns 0. 118 static uint32_t read_sequence_header_obu(AV1Decoder *pbi, 119 struct aom_read_bit_buffer *rb) { 120 AV1_COMMON *const cm = &pbi->common; 121 const uint32_t saved_bit_offset = rb->bit_offset; 122 123 // Verify rb has been configured to report errors. 124 assert(rb->error_handler); 125 126 // Use a local variable to store the information as we decode. At the end, 127 // if no errors have occurred, cm->seq_params is updated. 128 SequenceHeader sh = cm->seq_params; 129 SequenceHeader *const seq_params = &sh; 130 131 seq_params->profile = av1_read_profile(rb); 132 if (seq_params->profile > CONFIG_MAX_DECODE_PROFILE) { 133 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM; 134 return 0; 135 } 136 137 // Still picture or not 138 seq_params->still_picture = aom_rb_read_bit(rb); 139 seq_params->reduced_still_picture_hdr = aom_rb_read_bit(rb); 140 // Video must have reduced_still_picture_hdr = 0 141 if (!seq_params->still_picture && seq_params->reduced_still_picture_hdr) { 142 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM; 143 return 0; 144 } 145 146 if (seq_params->reduced_still_picture_hdr) { 147 cm->timing_info_present = 0; 148 seq_params->decoder_model_info_present_flag = 0; 149 seq_params->display_model_info_present_flag = 0; 150 seq_params->operating_points_cnt_minus_1 = 0; 151 seq_params->operating_point_idc[0] = 0; 152 if (!read_bitstream_level(&seq_params->seq_level_idx[0], rb)) { 153 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM; 154 return 0; 155 } 156 seq_params->tier[0] = 0; 157 cm->op_params[0].decoder_model_param_present_flag = 0; 158 cm->op_params[0].display_model_param_present_flag = 0; 159 } else { 160 cm->timing_info_present = aom_rb_read_bit(rb); // timing_info_present_flag 161 if (cm->timing_info_present) { 162 av1_read_timing_info_header(cm, rb); 163 164 seq_params->decoder_model_info_present_flag = aom_rb_read_bit(rb); 165 if (seq_params->decoder_model_info_present_flag) 166 av1_read_decoder_model_info(cm, rb); 167 } else { 168 seq_params->decoder_model_info_present_flag = 0; 169 } 170 seq_params->display_model_info_present_flag = aom_rb_read_bit(rb); 171 seq_params->operating_points_cnt_minus_1 = 172 aom_rb_read_literal(rb, OP_POINTS_CNT_MINUS_1_BITS); 173 for (int i = 0; i < seq_params->operating_points_cnt_minus_1 + 1; i++) { 174 seq_params->operating_point_idc[i] = 175 aom_rb_read_literal(rb, OP_POINTS_IDC_BITS); 176 if (!read_bitstream_level(&seq_params->seq_level_idx[i], rb)) { 177 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM; 178 return 0; 179 } 180 // This is the seq_level_idx[i] > 7 check in the spec. seq_level_idx 7 181 // is equivalent to level 3.3. 182 if (seq_params->seq_level_idx[i] >= SEQ_LEVEL_4_0) 183 seq_params->tier[i] = aom_rb_read_bit(rb); 184 else 185 seq_params->tier[i] = 0; 186 if (seq_params->decoder_model_info_present_flag) { 187 cm->op_params[i].decoder_model_param_present_flag = aom_rb_read_bit(rb); 188 if (cm->op_params[i].decoder_model_param_present_flag) 189 av1_read_op_parameters_info(cm, rb, i); 190 } else { 191 cm->op_params[i].decoder_model_param_present_flag = 0; 192 } 193 if (cm->timing_info_present && 194 (cm->timing_info.equal_picture_interval || 195 cm->op_params[i].decoder_model_param_present_flag)) { 196 cm->op_params[i].bitrate = 197 max_level_bitrate(seq_params->profile, seq_params->seq_level_idx[i], 198 seq_params->tier[i]); 199 // Level with seq_level_idx = 31 returns a high "dummy" bitrate to pass 200 // the check 201 if (cm->op_params[i].bitrate == 0) 202 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM, 203 "AV1 does not support this combination of " 204 "profile, level, and tier."); 205 // Buffer size in bits/s is bitrate in bits/s * 1 s 206 cm->op_params[i].buffer_size = cm->op_params[i].bitrate; 207 } 208 if (cm->timing_info_present && cm->timing_info.equal_picture_interval && 209 !cm->op_params[i].decoder_model_param_present_flag) { 210 // When the decoder_model_parameters are not sent for this op, set 211 // the default ones that can be used with the resource availability mode 212 cm->op_params[i].decoder_buffer_delay = 70000; 213 cm->op_params[i].encoder_buffer_delay = 20000; 214 cm->op_params[i].low_delay_mode_flag = 0; 215 } 216 217 if (seq_params->display_model_info_present_flag) { 218 cm->op_params[i].display_model_param_present_flag = aom_rb_read_bit(rb); 219 if (cm->op_params[i].display_model_param_present_flag) { 220 cm->op_params[i].initial_display_delay = 221 aom_rb_read_literal(rb, 4) + 1; 222 if (cm->op_params[i].initial_display_delay > 10) 223 aom_internal_error( 224 &cm->error, AOM_CODEC_UNSUP_BITSTREAM, 225 "AV1 does not support more than 10 decoded frames delay"); 226 } else { 227 cm->op_params[i].initial_display_delay = 10; 228 } 229 } else { 230 cm->op_params[i].display_model_param_present_flag = 0; 231 cm->op_params[i].initial_display_delay = 10; 232 } 233 } 234 } 235 // This decoder supports all levels. Choose operating point provided by 236 // external means 237 int operating_point = pbi->operating_point; 238 if (operating_point < 0 || 239 operating_point > seq_params->operating_points_cnt_minus_1) 240 operating_point = 0; 241 pbi->current_operating_point = 242 seq_params->operating_point_idc[operating_point]; 243 if (aom_get_num_layers_from_operating_point_idc( 244 pbi->current_operating_point, &cm->number_spatial_layers, 245 &cm->number_temporal_layers) != AOM_CODEC_OK) { 246 cm->error.error_code = AOM_CODEC_ERROR; 247 return 0; 248 } 249 250 av1_read_sequence_header(cm, rb, seq_params); 251 252 av1_read_color_config(rb, pbi->allow_lowbitdepth, seq_params, &cm->error); 253 if (!(seq_params->subsampling_x == 0 && seq_params->subsampling_y == 0) && 254 !(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 1) && 255 !(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 0)) { 256 aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM, 257 "Only 4:4:4, 4:2:2 and 4:2:0 are currently supported, " 258 "%d %d subsampling is not supported.\n", 259 seq_params->subsampling_x, seq_params->subsampling_y); 260 } 261 262 seq_params->film_grain_params_present = aom_rb_read_bit(rb); 263 264 if (av1_check_trailing_bits(pbi, rb) != 0) { 265 // cm->error.error_code is already set. 266 return 0; 267 } 268 269 // If a sequence header has been decoded before, we check if the new 270 // one is consistent with the old one. 271 if (pbi->sequence_header_ready) { 272 if (!are_seq_headers_consistent(&cm->seq_params, seq_params)) 273 pbi->sequence_header_changed = 1; 274 } 275 276 cm->seq_params = *seq_params; 277 pbi->sequence_header_ready = 1; 278 279 return ((rb->bit_offset - saved_bit_offset + 7) >> 3); 280 } 281 282 // On success, returns the frame header size. On failure, calls 283 // aom_internal_error and does not return. 284 static uint32_t read_frame_header_obu(AV1Decoder *pbi, 285 struct aom_read_bit_buffer *rb, 286 const uint8_t *data, 287 const uint8_t **p_data_end, 288 int trailing_bits_present) { 289 return av1_decode_frame_headers_and_setup(pbi, rb, data, p_data_end, 290 trailing_bits_present); 291 } 292 293 // On success, returns the tile group header size. On failure, calls 294 // aom_internal_error() and returns -1. 295 static int32_t read_tile_group_header(AV1Decoder *pbi, 296 struct aom_read_bit_buffer *rb, 297 int *start_tile, int *end_tile, 298 int tile_start_implicit) { 299 AV1_COMMON *const cm = &pbi->common; 300 uint32_t saved_bit_offset = rb->bit_offset; 301 int tile_start_and_end_present_flag = 0; 302 const int num_tiles = pbi->common.tile_rows * pbi->common.tile_cols; 303 304 if (!pbi->common.large_scale_tile && num_tiles > 1) { 305 tile_start_and_end_present_flag = aom_rb_read_bit(rb); 306 if (tile_start_implicit && tile_start_and_end_present_flag) { 307 aom_internal_error( 308 &cm->error, AOM_CODEC_UNSUP_BITSTREAM, 309 "For OBU_FRAME type obu tile_start_and_end_present_flag must be 0"); 310 return -1; 311 } 312 } 313 if (pbi->common.large_scale_tile || num_tiles == 1 || 314 !tile_start_and_end_present_flag) { 315 *start_tile = 0; 316 *end_tile = num_tiles - 1; 317 } else { 318 int tile_bits = cm->log2_tile_rows + cm->log2_tile_cols; 319 *start_tile = aom_rb_read_literal(rb, tile_bits); 320 *end_tile = aom_rb_read_literal(rb, tile_bits); 321 } 322 if (*start_tile > *end_tile) { 323 aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME, 324 "tg_end must be greater than or equal to tg_start"); 325 return -1; 326 } 327 328 return ((rb->bit_offset - saved_bit_offset + 7) >> 3); 329 } 330 331 // On success, returns the tile group OBU size. On failure, sets 332 // pbi->common.error.error_code and returns 0. 333 static uint32_t read_one_tile_group_obu( 334 AV1Decoder *pbi, struct aom_read_bit_buffer *rb, int is_first_tg, 335 const uint8_t *data, const uint8_t *data_end, const uint8_t **p_data_end, 336 int *is_last_tg, int tile_start_implicit) { 337 AV1_COMMON *const cm = &pbi->common; 338 int start_tile, end_tile; 339 int32_t header_size, tg_payload_size; 340 341 assert((rb->bit_offset & 7) == 0); 342 assert(rb->bit_buffer + aom_rb_bytes_read(rb) == data); 343 344 header_size = read_tile_group_header(pbi, rb, &start_tile, &end_tile, 345 tile_start_implicit); 346 if (header_size == -1 || byte_alignment(cm, rb)) return 0; 347 data += header_size; 348 av1_decode_tg_tiles_and_wrapup(pbi, data, data_end, p_data_end, start_tile, 349 end_tile, is_first_tg); 350 351 tg_payload_size = (uint32_t)(*p_data_end - data); 352 353 // TODO(shan): For now, assume all tile groups received in order 354 *is_last_tg = end_tile == cm->tile_rows * cm->tile_cols - 1; 355 return header_size + tg_payload_size; 356 } 357 358 static void alloc_tile_list_buffer(AV1Decoder *pbi) { 359 // The resolution of the output frame is read out from the bitstream. The data 360 // are stored in the order of Y plane, U plane and V plane. As an example, for 361 // image format 4:2:0, the output frame of U plane and V plane is 1/4 of the 362 // output frame. 363 AV1_COMMON *const cm = &pbi->common; 364 int tile_width, tile_height; 365 av1_get_uniform_tile_size(cm, &tile_width, &tile_height); 366 const int tile_width_in_pixels = tile_width * MI_SIZE; 367 const int tile_height_in_pixels = tile_height * MI_SIZE; 368 const int output_frame_width = 369 (pbi->output_frame_width_in_tiles_minus_1 + 1) * tile_width_in_pixels; 370 const int output_frame_height = 371 (pbi->output_frame_height_in_tiles_minus_1 + 1) * tile_height_in_pixels; 372 // The output frame is used to store the decoded tile list. The decoded tile 373 // list has to fit into 1 output frame. 374 assert((pbi->tile_count_minus_1 + 1) <= 375 (pbi->output_frame_width_in_tiles_minus_1 + 1) * 376 (pbi->output_frame_height_in_tiles_minus_1 + 1)); 377 378 // Allocate the tile list output buffer. 379 // Note: if cm->seq_params.use_highbitdepth is 1 and cm->seq_params.bit_depth 380 // is 8, we could allocate less memory, namely, 8 bits/pixel. 381 if (aom_alloc_frame_buffer(&pbi->tile_list_outbuf, output_frame_width, 382 output_frame_height, cm->seq_params.subsampling_x, 383 cm->seq_params.subsampling_y, 384 (cm->seq_params.use_highbitdepth && 385 (cm->seq_params.bit_depth > AOM_BITS_8)), 386 0, cm->byte_alignment)) 387 aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR, 388 "Failed to allocate the tile list output buffer"); 389 } 390 391 static void yv12_tile_copy(const YV12_BUFFER_CONFIG *src, int hstart1, 392 int hend1, int vstart1, int vend1, 393 YV12_BUFFER_CONFIG *dst, int hstart2, int vstart2, 394 int plane) { 395 const int src_stride = (plane > 0) ? src->strides[1] : src->strides[0]; 396 const int dst_stride = (plane > 0) ? dst->strides[1] : dst->strides[0]; 397 int row, col; 398 399 assert(src->flags & YV12_FLAG_HIGHBITDEPTH); 400 assert(!(dst->flags & YV12_FLAG_HIGHBITDEPTH)); 401 402 const uint16_t *src16 = 403 CONVERT_TO_SHORTPTR(src->buffers[plane] + vstart1 * src_stride + hstart1); 404 uint8_t *dst8 = dst->buffers[plane] + vstart2 * dst_stride + hstart2; 405 406 for (row = vstart1; row < vend1; ++row) { 407 for (col = 0; col < (hend1 - hstart1); ++col) *dst8++ = (uint8_t)(*src16++); 408 src16 += src_stride - (hend1 - hstart1); 409 dst8 += dst_stride - (hend1 - hstart1); 410 } 411 return; 412 } 413 414 static void copy_decoded_tile_to_tile_list_buffer(AV1Decoder *pbi, 415 int tile_idx) { 416 AV1_COMMON *const cm = &pbi->common; 417 int tile_width, tile_height; 418 av1_get_uniform_tile_size(cm, &tile_width, &tile_height); 419 const int tile_width_in_pixels = tile_width * MI_SIZE; 420 const int tile_height_in_pixels = tile_height * MI_SIZE; 421 const int ssy = cm->seq_params.subsampling_y; 422 const int ssx = cm->seq_params.subsampling_x; 423 const int num_planes = av1_num_planes(cm); 424 425 YV12_BUFFER_CONFIG *cur_frame = &cm->cur_frame->buf; 426 const int tr = tile_idx / (pbi->output_frame_width_in_tiles_minus_1 + 1); 427 const int tc = tile_idx % (pbi->output_frame_width_in_tiles_minus_1 + 1); 428 int plane; 429 430 // Copy decoded tile to the tile list output buffer. 431 for (plane = 0; plane < num_planes; ++plane) { 432 const int shift_x = plane > 0 ? ssx : 0; 433 const int shift_y = plane > 0 ? ssy : 0; 434 const int h = tile_height_in_pixels >> shift_y; 435 const int w = tile_width_in_pixels >> shift_x; 436 437 // src offset 438 int vstart1 = pbi->dec_tile_row * h; 439 int vend1 = vstart1 + h; 440 int hstart1 = pbi->dec_tile_col * w; 441 int hend1 = hstart1 + w; 442 // dst offset 443 int vstart2 = tr * h; 444 int hstart2 = tc * w; 445 446 if (cm->seq_params.use_highbitdepth && 447 cm->seq_params.bit_depth == AOM_BITS_8) { 448 yv12_tile_copy(cur_frame, hstart1, hend1, vstart1, vend1, 449 &pbi->tile_list_outbuf, hstart2, vstart2, plane); 450 } else { 451 switch (plane) { 452 case 0: 453 aom_yv12_partial_copy_y(cur_frame, hstart1, hend1, vstart1, vend1, 454 &pbi->tile_list_outbuf, hstart2, vstart2); 455 break; 456 case 1: 457 aom_yv12_partial_copy_u(cur_frame, hstart1, hend1, vstart1, vend1, 458 &pbi->tile_list_outbuf, hstart2, vstart2); 459 break; 460 case 2: 461 aom_yv12_partial_copy_v(cur_frame, hstart1, hend1, vstart1, vend1, 462 &pbi->tile_list_outbuf, hstart2, vstart2); 463 break; 464 default: assert(0); 465 } 466 } 467 } 468 } 469 470 // Only called while large_scale_tile = 1. 471 // 472 // On success, returns the tile list OBU size. On failure, sets 473 // pbi->common.error.error_code and returns 0. 474 static uint32_t read_and_decode_one_tile_list(AV1Decoder *pbi, 475 struct aom_read_bit_buffer *rb, 476 const uint8_t *data, 477 const uint8_t *data_end, 478 const uint8_t **p_data_end, 479 int *frame_decoding_finished) { 480 AV1_COMMON *const cm = &pbi->common; 481 uint32_t tile_list_payload_size = 0; 482 const int num_tiles = cm->tile_cols * cm->tile_rows; 483 const int start_tile = 0; 484 const int end_tile = num_tiles - 1; 485 int i = 0; 486 487 // Process the tile list info. 488 pbi->output_frame_width_in_tiles_minus_1 = aom_rb_read_literal(rb, 8); 489 pbi->output_frame_height_in_tiles_minus_1 = aom_rb_read_literal(rb, 8); 490 pbi->tile_count_minus_1 = aom_rb_read_literal(rb, 16); 491 if (pbi->tile_count_minus_1 > MAX_TILES - 1) { 492 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 493 return 0; 494 } 495 496 // Allocate output frame buffer for the tile list. 497 alloc_tile_list_buffer(pbi); 498 499 uint32_t tile_list_info_bytes = 4; 500 tile_list_payload_size += tile_list_info_bytes; 501 data += tile_list_info_bytes; 502 503 int tile_idx = 0; 504 for (i = 0; i <= pbi->tile_count_minus_1; i++) { 505 // Process 1 tile. 506 // Reset the bit reader. 507 rb->bit_offset = 0; 508 rb->bit_buffer = data; 509 510 // Read out the tile info. 511 uint32_t tile_info_bytes = 5; 512 // Set reference for each tile. 513 int ref_idx = aom_rb_read_literal(rb, 8); 514 if (ref_idx >= MAX_EXTERNAL_REFERENCES) { 515 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 516 return 0; 517 } 518 av1_set_reference_dec(cm, 0, 1, &pbi->ext_refs.refs[ref_idx]); 519 520 pbi->dec_tile_row = aom_rb_read_literal(rb, 8); 521 pbi->dec_tile_col = aom_rb_read_literal(rb, 8); 522 if (pbi->dec_tile_row < 0 || pbi->dec_tile_col < 0 || 523 pbi->dec_tile_row >= cm->tile_rows || 524 pbi->dec_tile_col >= cm->tile_cols) { 525 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 526 return 0; 527 } 528 529 pbi->coded_tile_data_size = aom_rb_read_literal(rb, 16) + 1; 530 data += tile_info_bytes; 531 if ((size_t)(data_end - data) < pbi->coded_tile_data_size) { 532 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 533 return 0; 534 } 535 536 av1_decode_tg_tiles_and_wrapup(pbi, data, data + pbi->coded_tile_data_size, 537 p_data_end, start_tile, end_tile, 0); 538 uint32_t tile_payload_size = (uint32_t)(*p_data_end - data); 539 540 tile_list_payload_size += tile_info_bytes + tile_payload_size; 541 542 // Update data ptr for next tile decoding. 543 data = *p_data_end; 544 assert(data <= data_end); 545 546 // Copy the decoded tile to the tile list output buffer. 547 copy_decoded_tile_to_tile_list_buffer(pbi, tile_idx); 548 tile_idx++; 549 } 550 551 *frame_decoding_finished = 1; 552 return tile_list_payload_size; 553 } 554 555 static void read_metadata_itut_t35(const uint8_t *data, size_t sz) { 556 struct aom_read_bit_buffer rb = { data, data + sz, 0, NULL, NULL }; 557 for (size_t i = 0; i < sz; i++) { 558 aom_rb_read_literal(&rb, 8); 559 } 560 } 561 562 static void read_metadata_hdr_cll(const uint8_t *data, size_t sz) { 563 struct aom_read_bit_buffer rb = { data, data + sz, 0, NULL, NULL }; 564 aom_rb_read_literal(&rb, 16); // max_cll 565 aom_rb_read_literal(&rb, 16); // max_fall 566 } 567 568 static void read_metadata_hdr_mdcv(const uint8_t *data, size_t sz) { 569 struct aom_read_bit_buffer rb = { data, data + sz, 0, NULL, NULL }; 570 for (int i = 0; i < 3; i++) { 571 aom_rb_read_literal(&rb, 16); // primary_i_chromaticity_x 572 aom_rb_read_literal(&rb, 16); // primary_i_chromaticity_y 573 } 574 575 aom_rb_read_literal(&rb, 16); // white_point_chromaticity_x 576 aom_rb_read_literal(&rb, 16); // white_point_chromaticity_y 577 578 aom_rb_read_unsigned_literal(&rb, 32); // luminance_max 579 aom_rb_read_unsigned_literal(&rb, 32); // luminance_min 580 } 581 582 static void scalability_structure(struct aom_read_bit_buffer *rb) { 583 int spatial_layers_cnt = aom_rb_read_literal(rb, 2); 584 int spatial_layer_dimensions_present_flag = aom_rb_read_bit(rb); 585 int spatial_layer_description_present_flag = aom_rb_read_bit(rb); 586 int temporal_group_description_present_flag = aom_rb_read_bit(rb); 587 aom_rb_read_literal(rb, 3); // reserved 588 589 if (spatial_layer_dimensions_present_flag) { 590 int i; 591 for (i = 0; i < spatial_layers_cnt + 1; i++) { 592 aom_rb_read_literal(rb, 16); 593 aom_rb_read_literal(rb, 16); 594 } 595 } 596 if (spatial_layer_description_present_flag) { 597 int i; 598 for (i = 0; i < spatial_layers_cnt + 1; i++) { 599 aom_rb_read_literal(rb, 8); 600 } 601 } 602 if (temporal_group_description_present_flag) { 603 int i, j, temporal_group_size; 604 temporal_group_size = aom_rb_read_literal(rb, 8); 605 for (i = 0; i < temporal_group_size; i++) { 606 aom_rb_read_literal(rb, 3); 607 aom_rb_read_bit(rb); 608 aom_rb_read_bit(rb); 609 int temporal_group_ref_cnt = aom_rb_read_literal(rb, 3); 610 for (j = 0; j < temporal_group_ref_cnt; j++) { 611 aom_rb_read_literal(rb, 8); 612 } 613 } 614 } 615 } 616 617 static void read_metadata_scalability(const uint8_t *data, size_t sz) { 618 struct aom_read_bit_buffer rb = { data, data + sz, 0, NULL, NULL }; 619 int scalability_mode_idc = aom_rb_read_literal(&rb, 8); 620 if (scalability_mode_idc == SCALABILITY_SS) { 621 scalability_structure(&rb); 622 } 623 } 624 625 static void read_metadata_timecode(const uint8_t *data, size_t sz) { 626 struct aom_read_bit_buffer rb = { data, data + sz, 0, NULL, NULL }; 627 aom_rb_read_literal(&rb, 5); // counting_type f(5) 628 int full_timestamp_flag = aom_rb_read_bit(&rb); // full_timestamp_flag f(1) 629 aom_rb_read_bit(&rb); // discontinuity_flag (f1) 630 aom_rb_read_bit(&rb); // cnt_dropped_flag f(1) 631 aom_rb_read_literal(&rb, 9); // n_frames f(9) 632 if (full_timestamp_flag) { 633 aom_rb_read_literal(&rb, 6); // seconds_value f(6) 634 aom_rb_read_literal(&rb, 6); // minutes_value f(6) 635 aom_rb_read_literal(&rb, 5); // hours_value f(5) 636 } else { 637 int seconds_flag = aom_rb_read_bit(&rb); // seconds_flag f(1) 638 if (seconds_flag) { 639 aom_rb_read_literal(&rb, 6); // seconds_value f(6) 640 int minutes_flag = aom_rb_read_bit(&rb); // minutes_flag f(1) 641 if (minutes_flag) { 642 aom_rb_read_literal(&rb, 6); // minutes_value f(6) 643 int hours_flag = aom_rb_read_bit(&rb); // hours_flag f(1) 644 if (hours_flag) { 645 aom_rb_read_literal(&rb, 5); // hours_value f(5) 646 } 647 } 648 } 649 } 650 // time_offset_length f(5) 651 int time_offset_length = aom_rb_read_literal(&rb, 5); 652 if (time_offset_length) { 653 aom_rb_read_literal(&rb, time_offset_length); // f(time_offset_length) 654 } 655 } 656 657 // Not fully implemented. Always succeeds and returns sz. 658 static size_t read_metadata(const uint8_t *data, size_t sz) { 659 size_t type_length; 660 uint64_t type_value; 661 OBU_METADATA_TYPE metadata_type; 662 if (aom_uleb_decode(data, sz, &type_value, &type_length) < 0) { 663 return sz; 664 } 665 metadata_type = (OBU_METADATA_TYPE)type_value; 666 if (metadata_type == OBU_METADATA_TYPE_ITUT_T35) { 667 read_metadata_itut_t35(data + type_length, sz - type_length); 668 } else if (metadata_type == OBU_METADATA_TYPE_HDR_CLL) { 669 read_metadata_hdr_cll(data + type_length, sz - type_length); 670 } else if (metadata_type == OBU_METADATA_TYPE_HDR_MDCV) { 671 read_metadata_hdr_mdcv(data + type_length, sz - type_length); 672 } else if (metadata_type == OBU_METADATA_TYPE_SCALABILITY) { 673 read_metadata_scalability(data + type_length, sz - type_length); 674 } else if (metadata_type == OBU_METADATA_TYPE_TIMECODE) { 675 read_metadata_timecode(data + type_length, sz - type_length); 676 } 677 678 return sz; 679 } 680 681 // On success, returns a boolean that indicates whether the decoding of the 682 // current frame is finished. On failure, sets cm->error.error_code and 683 // returns -1. 684 int aom_decode_frame_from_obus(struct AV1Decoder *pbi, const uint8_t *data, 685 const uint8_t *data_end, 686 const uint8_t **p_data_end) { 687 AV1_COMMON *const cm = &pbi->common; 688 int frame_decoding_finished = 0; 689 int is_first_tg_obu_received = 1; 690 uint32_t frame_header_size = 0; 691 ObuHeader obu_header; 692 memset(&obu_header, 0, sizeof(obu_header)); 693 pbi->seen_frame_header = 0; 694 695 if (data_end < data) { 696 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 697 return -1; 698 } 699 700 // Reset pbi->camera_frame_header_ready to 0 if cm->large_scale_tile = 0. 701 if (!cm->large_scale_tile) pbi->camera_frame_header_ready = 0; 702 703 // decode frame as a series of OBUs 704 while (!frame_decoding_finished && cm->error.error_code == AOM_CODEC_OK) { 705 struct aom_read_bit_buffer rb; 706 size_t payload_size = 0; 707 size_t decoded_payload_size = 0; 708 size_t obu_payload_offset = 0; 709 size_t bytes_read = 0; 710 const size_t bytes_available = data_end - data; 711 712 if (bytes_available == 0 && !pbi->seen_frame_header) { 713 *p_data_end = data; 714 cm->error.error_code = AOM_CODEC_OK; 715 break; 716 } 717 718 aom_codec_err_t status = 719 aom_read_obu_header_and_size(data, bytes_available, cm->is_annexb, 720 &obu_header, &payload_size, &bytes_read); 721 722 if (status != AOM_CODEC_OK) { 723 cm->error.error_code = status; 724 return -1; 725 } 726 727 // Record obu size header information. 728 pbi->obu_size_hdr.data = data + obu_header.size; 729 pbi->obu_size_hdr.size = bytes_read - obu_header.size; 730 731 // Note: aom_read_obu_header_and_size() takes care of checking that this 732 // doesn't cause 'data' to advance past 'data_end'. 733 data += bytes_read; 734 735 if ((size_t)(data_end - data) < payload_size) { 736 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 737 return -1; 738 } 739 740 cm->temporal_layer_id = obu_header.temporal_layer_id; 741 cm->spatial_layer_id = obu_header.spatial_layer_id; 742 743 if (obu_header.type != OBU_TEMPORAL_DELIMITER && 744 obu_header.type != OBU_SEQUENCE_HEADER && 745 obu_header.type != OBU_PADDING) { 746 // don't decode obu if it's not in current operating mode 747 if (!is_obu_in_current_operating_point(pbi, obu_header)) { 748 data += payload_size; 749 continue; 750 } 751 } 752 753 av1_init_read_bit_buffer(pbi, &rb, data, data + payload_size); 754 755 switch (obu_header.type) { 756 case OBU_TEMPORAL_DELIMITER: 757 decoded_payload_size = read_temporal_delimiter_obu(); 758 pbi->seen_frame_header = 0; 759 break; 760 case OBU_SEQUENCE_HEADER: 761 decoded_payload_size = read_sequence_header_obu(pbi, &rb); 762 if (cm->error.error_code != AOM_CODEC_OK) return -1; 763 break; 764 case OBU_FRAME_HEADER: 765 case OBU_REDUNDANT_FRAME_HEADER: 766 case OBU_FRAME: 767 // Only decode first frame header received 768 if (!pbi->seen_frame_header || 769 (cm->large_scale_tile && !pbi->camera_frame_header_ready)) { 770 frame_header_size = read_frame_header_obu( 771 pbi, &rb, data, p_data_end, obu_header.type != OBU_FRAME); 772 pbi->seen_frame_header = 1; 773 if (!pbi->ext_tile_debug && cm->large_scale_tile) 774 pbi->camera_frame_header_ready = 1; 775 } else { 776 // TODO(wtc): Verify that the frame_header_obu is identical to the 777 // original frame_header_obu. For now just skip frame_header_size 778 // bytes in the bit buffer. 779 if (frame_header_size > payload_size) { 780 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 781 return -1; 782 } 783 assert(rb.bit_offset == 0); 784 rb.bit_offset = 8 * frame_header_size; 785 } 786 787 decoded_payload_size = frame_header_size; 788 pbi->frame_header_size = frame_header_size; 789 790 if (cm->show_existing_frame) { 791 if (obu_header.type == OBU_FRAME) { 792 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM; 793 return -1; 794 } 795 frame_decoding_finished = 1; 796 pbi->seen_frame_header = 0; 797 break; 798 } 799 800 // In large scale tile coding, decode the common camera frame header 801 // before any tile list OBU. 802 if (!pbi->ext_tile_debug && pbi->camera_frame_header_ready) { 803 frame_decoding_finished = 1; 804 // Skip the rest of the frame data. 805 decoded_payload_size = payload_size; 806 // Update data_end. 807 *p_data_end = data_end; 808 break; 809 } 810 811 if (obu_header.type != OBU_FRAME) break; 812 obu_payload_offset = frame_header_size; 813 // Byte align the reader before reading the tile group. 814 // byte_alignment() has set cm->error.error_code if it returns -1. 815 if (byte_alignment(cm, &rb)) return -1; 816 AOM_FALLTHROUGH_INTENDED; // fall through to read tile group. 817 case OBU_TILE_GROUP: 818 if (!pbi->seen_frame_header) { 819 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 820 return -1; 821 } 822 if (obu_payload_offset > payload_size) { 823 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 824 return -1; 825 } 826 decoded_payload_size += read_one_tile_group_obu( 827 pbi, &rb, is_first_tg_obu_received, data + obu_payload_offset, 828 data + payload_size, p_data_end, &frame_decoding_finished, 829 obu_header.type == OBU_FRAME); 830 if (cm->error.error_code != AOM_CODEC_OK) return -1; 831 is_first_tg_obu_received = 0; 832 if (frame_decoding_finished) pbi->seen_frame_header = 0; 833 break; 834 case OBU_METADATA: 835 decoded_payload_size = read_metadata(data, payload_size); 836 break; 837 case OBU_TILE_LIST: 838 if (CONFIG_NORMAL_TILE_MODE) { 839 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM; 840 return -1; 841 } 842 843 // This OBU type is purely for the large scale tile coding mode. 844 // The common camera frame header has to be already decoded. 845 if (!pbi->camera_frame_header_ready) { 846 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 847 return -1; 848 } 849 850 cm->large_scale_tile = 1; 851 av1_set_single_tile_decoding_mode(cm); 852 decoded_payload_size = 853 read_and_decode_one_tile_list(pbi, &rb, data, data + payload_size, 854 p_data_end, &frame_decoding_finished); 855 if (cm->error.error_code != AOM_CODEC_OK) return -1; 856 break; 857 case OBU_PADDING: 858 default: 859 // Skip unrecognized OBUs 860 decoded_payload_size = payload_size; 861 break; 862 } 863 864 // Check that the signalled OBU size matches the actual amount of data read 865 if (decoded_payload_size > payload_size) { 866 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 867 return -1; 868 } 869 870 // If there are extra padding bytes, they should all be zero 871 while (decoded_payload_size < payload_size) { 872 uint8_t padding_byte = data[decoded_payload_size++]; 873 if (padding_byte != 0) { 874 cm->error.error_code = AOM_CODEC_CORRUPT_FRAME; 875 return -1; 876 } 877 } 878 879 data += payload_size; 880 } 881 882 if (cm->error.error_code != AOM_CODEC_OK) return -1; 883 return frame_decoding_finished; 884 } 885