1 /* 2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 12 #include "vpx_config.h" 13 #include "./vpx_scale_rtcd.h" 14 #include "vp8/common/onyxc_int.h" 15 #include "vp8/common/blockd.h" 16 #include "onyx_int.h" 17 #include "vp8/common/systemdependent.h" 18 #include "quantize.h" 19 #include "vp8/common/alloccommon.h" 20 #include "mcomp.h" 21 #include "firstpass.h" 22 #include "psnr.h" 23 #include "vpx_scale/vpx_scale.h" 24 #include "vp8/common/extend.h" 25 #include "ratectrl.h" 26 #include "vp8/common/quant_common.h" 27 #include "segmentation.h" 28 #if CONFIG_POSTPROC 29 #include "vp8/common/postproc.h" 30 #endif 31 #include "vpx_mem/vpx_mem.h" 32 #include "vp8/common/swapyv12buffer.h" 33 #include "vp8/common/threading.h" 34 #include "vpx_ports/vpx_timer.h" 35 #if ARCH_ARM 36 #include "vpx_ports/arm.h" 37 #endif 38 #if CONFIG_MULTI_RES_ENCODING 39 #include "mr_dissim.h" 40 #endif 41 #include "encodeframe.h" 42 43 #include <math.h> 44 #include <stdio.h> 45 #include <limits.h> 46 47 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING 48 extern int vp8_update_coef_context(VP8_COMP *cpi); 49 extern void vp8_update_coef_probs(VP8_COMP *cpi); 50 #endif 51 52 extern void vp8cx_pick_filter_level_fast(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi); 53 extern void vp8cx_set_alt_lf_level(VP8_COMP *cpi, int filt_val); 54 extern void vp8cx_pick_filter_level(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi); 55 56 extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *post, int filt_lvl, int low_var_thresh, int flag); 57 extern void print_parms(VP8_CONFIG *ocf, char *filenam); 58 extern unsigned int vp8_get_processor_freq(); 59 extern void print_tree_update_probs(); 60 extern int vp8cx_create_encoder_threads(VP8_COMP *cpi); 61 extern void vp8cx_remove_encoder_threads(VP8_COMP *cpi); 62 63 int vp8_estimate_entropy_savings(VP8_COMP *cpi); 64 65 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest); 66 67 extern void vp8_temporal_filter_prepare_c(VP8_COMP *cpi, int distance); 68 69 static void set_default_lf_deltas(VP8_COMP *cpi); 70 71 extern const int vp8_gf_interval_table[101]; 72 73 #if CONFIG_INTERNAL_STATS 74 #include "math.h" 75 76 extern double vp8_calc_ssim 77 ( 78 YV12_BUFFER_CONFIG *source, 79 YV12_BUFFER_CONFIG *dest, 80 int lumamask, 81 double *weight 82 ); 83 84 85 extern double vp8_calc_ssimg 86 ( 87 YV12_BUFFER_CONFIG *source, 88 YV12_BUFFER_CONFIG *dest, 89 double *ssim_y, 90 double *ssim_u, 91 double *ssim_v 92 ); 93 94 95 #endif 96 97 98 #ifdef OUTPUT_YUV_SRC 99 FILE *yuv_file; 100 #endif 101 102 #if 0 103 FILE *framepsnr; 104 FILE *kf_list; 105 FILE *keyfile; 106 #endif 107 108 #if 0 109 extern int skip_true_count; 110 extern int skip_false_count; 111 #endif 112 113 114 #ifdef VP8_ENTROPY_STATS 115 extern int intra_mode_stats[10][10][10]; 116 #endif 117 118 #ifdef SPEEDSTATS 119 unsigned int frames_at_speed[16] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; 120 unsigned int tot_pm = 0; 121 unsigned int cnt_pm = 0; 122 unsigned int tot_ef = 0; 123 unsigned int cnt_ef = 0; 124 #endif 125 126 #ifdef MODE_STATS 127 extern unsigned __int64 Sectionbits[50]; 128 extern int y_modes[5] ; 129 extern int uv_modes[4] ; 130 extern int b_modes[10] ; 131 132 extern int inter_y_modes[10] ; 133 extern int inter_uv_modes[4] ; 134 extern unsigned int inter_b_modes[15]; 135 #endif 136 137 extern const int vp8_bits_per_mb[2][QINDEX_RANGE]; 138 139 extern const int qrounding_factors[129]; 140 extern const int qzbin_factors[129]; 141 extern void vp8cx_init_quantizer(VP8_COMP *cpi); 142 extern const int vp8cx_base_skip_false_prob[128]; 143 144 /* Tables relating active max Q to active min Q */ 145 static const unsigned char kf_low_motion_minq[QINDEX_RANGE] = 146 { 147 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 148 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 149 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 150 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2, 151 3,3,3,3,3,3,4,4,4,5,5,5,5,5,6,6, 152 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11, 153 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16, 154 16,16,17,17,18,18,18,18,19,20,20,21,21,22,23,23 155 }; 156 static const unsigned char kf_high_motion_minq[QINDEX_RANGE] = 157 { 158 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 159 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 160 1,1,1,1,1,1,1,1,2,2,2,2,3,3,3,3, 161 3,3,3,3,4,4,4,4,5,5,5,5,5,5,6,6, 162 6,6,7,7,8,8,8,8,9,9,10,10,10,10,11,11, 163 11,11,12,12,13,13,13,13,14,14,15,15,15,15,16,16, 164 16,16,17,17,18,18,18,18,19,19,20,20,20,20,21,21, 165 21,21,22,22,23,23,24,25,25,26,26,27,28,28,29,30 166 }; 167 static const unsigned char gf_low_motion_minq[QINDEX_RANGE] = 168 { 169 0,0,0,0,1,1,1,1,1,1,1,1,2,2,2,2, 170 3,3,3,3,4,4,4,4,5,5,5,5,6,6,6,6, 171 7,7,7,7,8,8,8,8,9,9,9,9,10,10,10,10, 172 11,11,12,12,13,13,14,14,15,15,16,16,17,17,18,18, 173 19,19,20,20,21,21,22,22,23,23,24,24,25,25,26,26, 174 27,27,28,28,29,29,30,30,31,31,32,32,33,33,34,34, 175 35,35,36,36,37,37,38,38,39,39,40,40,41,41,42,42, 176 43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58 177 }; 178 static const unsigned char gf_mid_motion_minq[QINDEX_RANGE] = 179 { 180 0,0,0,0,1,1,1,1,1,1,2,2,3,3,3,4, 181 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9, 182 9,10,10,10,10,11,11,11,12,12,12,12,13,13,13,14, 183 14,14,15,15,16,16,17,17,18,18,19,19,20,20,21,21, 184 22,22,23,23,24,24,25,25,26,26,27,27,28,28,29,29, 185 30,30,31,31,32,32,33,33,34,34,35,35,36,36,37,37, 186 38,39,39,40,40,41,41,42,42,43,43,44,45,46,47,48, 187 49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64 188 }; 189 static const unsigned char gf_high_motion_minq[QINDEX_RANGE] = 190 { 191 0,0,0,0,1,1,1,1,1,2,2,2,3,3,3,4, 192 4,4,5,5,5,6,6,6,7,7,7,8,8,8,9,9, 193 9,10,10,10,11,11,12,12,13,13,14,14,15,15,16,16, 194 17,17,18,18,19,19,20,20,21,21,22,22,23,23,24,24, 195 25,25,26,26,27,27,28,28,29,29,30,30,31,31,32,32, 196 33,33,34,34,35,35,36,36,37,37,38,38,39,39,40,40, 197 41,41,42,42,43,44,45,46,47,48,49,50,51,52,53,54, 198 55,56,57,58,59,60,62,64,66,68,70,72,74,76,78,80 199 }; 200 static const unsigned char inter_minq[QINDEX_RANGE] = 201 { 202 0,0,1,1,2,3,3,4,4,5,6,6,7,8,8,9, 203 9,10,11,11,12,13,13,14,15,15,16,17,17,18,19,20, 204 20,21,22,22,23,24,24,25,26,27,27,28,29,30,30,31, 205 32,33,33,34,35,36,36,37,38,39,39,40,41,42,42,43, 206 44,45,46,46,47,48,49,50,50,51,52,53,54,55,55,56, 207 57,58,59,60,60,61,62,63,64,65,66,67,67,68,69,70, 208 71,72,73,74,75,75,76,77,78,79,80,81,82,83,84,85, 209 86,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100 210 }; 211 212 #ifdef PACKET_TESTING 213 extern FILE *vpxlogc; 214 #endif 215 216 static void save_layer_context(VP8_COMP *cpi) 217 { 218 LAYER_CONTEXT *lc = &cpi->layer_context[cpi->current_layer]; 219 220 /* Save layer dependent coding state */ 221 lc->target_bandwidth = cpi->target_bandwidth; 222 lc->starting_buffer_level = cpi->oxcf.starting_buffer_level; 223 lc->optimal_buffer_level = cpi->oxcf.optimal_buffer_level; 224 lc->maximum_buffer_size = cpi->oxcf.maximum_buffer_size; 225 lc->starting_buffer_level_in_ms = cpi->oxcf.starting_buffer_level_in_ms; 226 lc->optimal_buffer_level_in_ms = cpi->oxcf.optimal_buffer_level_in_ms; 227 lc->maximum_buffer_size_in_ms = cpi->oxcf.maximum_buffer_size_in_ms; 228 lc->buffer_level = cpi->buffer_level; 229 lc->bits_off_target = cpi->bits_off_target; 230 lc->total_actual_bits = cpi->total_actual_bits; 231 lc->worst_quality = cpi->worst_quality; 232 lc->active_worst_quality = cpi->active_worst_quality; 233 lc->best_quality = cpi->best_quality; 234 lc->active_best_quality = cpi->active_best_quality; 235 lc->ni_av_qi = cpi->ni_av_qi; 236 lc->ni_tot_qi = cpi->ni_tot_qi; 237 lc->ni_frames = cpi->ni_frames; 238 lc->avg_frame_qindex = cpi->avg_frame_qindex; 239 lc->rate_correction_factor = cpi->rate_correction_factor; 240 lc->key_frame_rate_correction_factor = cpi->key_frame_rate_correction_factor; 241 lc->gf_rate_correction_factor = cpi->gf_rate_correction_factor; 242 lc->zbin_over_quant = cpi->mb.zbin_over_quant; 243 lc->inter_frame_target = cpi->inter_frame_target; 244 lc->total_byte_count = cpi->total_byte_count; 245 lc->filter_level = cpi->common.filter_level; 246 247 lc->last_frame_percent_intra = cpi->last_frame_percent_intra; 248 249 memcpy (lc->count_mb_ref_frame_usage, 250 cpi->mb.count_mb_ref_frame_usage, 251 sizeof(cpi->mb.count_mb_ref_frame_usage)); 252 } 253 254 static void restore_layer_context(VP8_COMP *cpi, const int layer) 255 { 256 LAYER_CONTEXT *lc = &cpi->layer_context[layer]; 257 258 /* Restore layer dependent coding state */ 259 cpi->current_layer = layer; 260 cpi->target_bandwidth = lc->target_bandwidth; 261 cpi->oxcf.target_bandwidth = lc->target_bandwidth; 262 cpi->oxcf.starting_buffer_level = lc->starting_buffer_level; 263 cpi->oxcf.optimal_buffer_level = lc->optimal_buffer_level; 264 cpi->oxcf.maximum_buffer_size = lc->maximum_buffer_size; 265 cpi->oxcf.starting_buffer_level_in_ms = lc->starting_buffer_level_in_ms; 266 cpi->oxcf.optimal_buffer_level_in_ms = lc->optimal_buffer_level_in_ms; 267 cpi->oxcf.maximum_buffer_size_in_ms = lc->maximum_buffer_size_in_ms; 268 cpi->buffer_level = lc->buffer_level; 269 cpi->bits_off_target = lc->bits_off_target; 270 cpi->total_actual_bits = lc->total_actual_bits; 271 cpi->active_worst_quality = lc->active_worst_quality; 272 cpi->active_best_quality = lc->active_best_quality; 273 cpi->ni_av_qi = lc->ni_av_qi; 274 cpi->ni_tot_qi = lc->ni_tot_qi; 275 cpi->ni_frames = lc->ni_frames; 276 cpi->avg_frame_qindex = lc->avg_frame_qindex; 277 cpi->rate_correction_factor = lc->rate_correction_factor; 278 cpi->key_frame_rate_correction_factor = lc->key_frame_rate_correction_factor; 279 cpi->gf_rate_correction_factor = lc->gf_rate_correction_factor; 280 cpi->mb.zbin_over_quant = lc->zbin_over_quant; 281 cpi->inter_frame_target = lc->inter_frame_target; 282 cpi->total_byte_count = lc->total_byte_count; 283 cpi->common.filter_level = lc->filter_level; 284 285 cpi->last_frame_percent_intra = lc->last_frame_percent_intra; 286 287 memcpy (cpi->mb.count_mb_ref_frame_usage, 288 lc->count_mb_ref_frame_usage, 289 sizeof(cpi->mb.count_mb_ref_frame_usage)); 290 } 291 292 static int rescale(int val, int num, int denom) 293 { 294 int64_t llnum = num; 295 int64_t llden = denom; 296 int64_t llval = val; 297 298 return (int)(llval * llnum / llden); 299 } 300 301 static void init_temporal_layer_context(VP8_COMP *cpi, 302 VP8_CONFIG *oxcf, 303 const int layer, 304 double prev_layer_framerate) 305 { 306 LAYER_CONTEXT *lc = &cpi->layer_context[layer]; 307 308 lc->framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[layer]; 309 lc->target_bandwidth = cpi->oxcf.target_bitrate[layer] * 1000; 310 311 lc->starting_buffer_level_in_ms = oxcf->starting_buffer_level; 312 lc->optimal_buffer_level_in_ms = oxcf->optimal_buffer_level; 313 lc->maximum_buffer_size_in_ms = oxcf->maximum_buffer_size; 314 315 lc->starting_buffer_level = 316 rescale((int)(oxcf->starting_buffer_level), 317 lc->target_bandwidth, 1000); 318 319 if (oxcf->optimal_buffer_level == 0) 320 lc->optimal_buffer_level = lc->target_bandwidth / 8; 321 else 322 lc->optimal_buffer_level = 323 rescale((int)(oxcf->optimal_buffer_level), 324 lc->target_bandwidth, 1000); 325 326 if (oxcf->maximum_buffer_size == 0) 327 lc->maximum_buffer_size = lc->target_bandwidth / 8; 328 else 329 lc->maximum_buffer_size = 330 rescale((int)(oxcf->maximum_buffer_size), 331 lc->target_bandwidth, 1000); 332 333 /* Work out the average size of a frame within this layer */ 334 if (layer > 0) 335 lc->avg_frame_size_for_layer = 336 (int)((cpi->oxcf.target_bitrate[layer] - 337 cpi->oxcf.target_bitrate[layer-1]) * 1000 / 338 (lc->framerate - prev_layer_framerate)); 339 340 lc->active_worst_quality = cpi->oxcf.worst_allowed_q; 341 lc->active_best_quality = cpi->oxcf.best_allowed_q; 342 lc->avg_frame_qindex = cpi->oxcf.worst_allowed_q; 343 344 lc->buffer_level = lc->starting_buffer_level; 345 lc->bits_off_target = lc->starting_buffer_level; 346 347 lc->total_actual_bits = 0; 348 lc->ni_av_qi = 0; 349 lc->ni_tot_qi = 0; 350 lc->ni_frames = 0; 351 lc->rate_correction_factor = 1.0; 352 lc->key_frame_rate_correction_factor = 1.0; 353 lc->gf_rate_correction_factor = 1.0; 354 lc->inter_frame_target = 0; 355 } 356 357 // Upon a run-time change in temporal layers, reset the layer context parameters 358 // for any "new" layers. For "existing" layers, let them inherit the parameters 359 // from the previous layer state (at the same layer #). In future we may want 360 // to better map the previous layer state(s) to the "new" ones. 361 static void reset_temporal_layer_change(VP8_COMP *cpi, 362 VP8_CONFIG *oxcf, 363 const int prev_num_layers) 364 { 365 int i; 366 double prev_layer_framerate = 0; 367 const int curr_num_layers = cpi->oxcf.number_of_layers; 368 // If the previous state was 1 layer, get current layer context from cpi. 369 // We need this to set the layer context for the new layers below. 370 if (prev_num_layers == 1) 371 { 372 cpi->current_layer = 0; 373 save_layer_context(cpi); 374 } 375 for (i = 0; i < curr_num_layers; i++) 376 { 377 LAYER_CONTEXT *lc = &cpi->layer_context[i]; 378 if (i >= prev_num_layers) 379 { 380 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate); 381 } 382 // The initial buffer levels are set based on their starting levels. 383 // We could set the buffer levels based on the previous state (normalized 384 // properly by the layer bandwidths) but we would need to keep track of 385 // the previous set of layer bandwidths (i.e., target_bitrate[i]) 386 // before the layer change. For now, reset to the starting levels. 387 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms * 388 cpi->oxcf.target_bitrate[i]; 389 lc->bits_off_target = lc->buffer_level; 390 // TDOD(marpan): Should we set the rate_correction_factor and 391 // active_worst/best_quality to values derived from the previous layer 392 // state (to smooth-out quality dips/rate fluctuation at transition)? 393 394 // We need to treat the 1 layer case separately: oxcf.target_bitrate[i] 395 // is not set for 1 layer, and the restore_layer_context/save_context() 396 // are not called in the encoding loop, so we need to call it here to 397 // pass the layer context state to |cpi|. 398 if (curr_num_layers == 1) 399 { 400 lc->target_bandwidth = cpi->oxcf.target_bandwidth; 401 lc->buffer_level = cpi->oxcf.starting_buffer_level_in_ms * 402 lc->target_bandwidth / 1000; 403 lc->bits_off_target = lc->buffer_level; 404 restore_layer_context(cpi, 0); 405 } 406 prev_layer_framerate = cpi->output_framerate / 407 cpi->oxcf.rate_decimator[i]; 408 } 409 } 410 411 static void setup_features(VP8_COMP *cpi) 412 { 413 // If segmentation enabled set the update flags 414 if ( cpi->mb.e_mbd.segmentation_enabled ) 415 { 416 cpi->mb.e_mbd.update_mb_segmentation_map = 1; 417 cpi->mb.e_mbd.update_mb_segmentation_data = 1; 418 } 419 else 420 { 421 cpi->mb.e_mbd.update_mb_segmentation_map = 0; 422 cpi->mb.e_mbd.update_mb_segmentation_data = 0; 423 } 424 425 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0; 426 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0; 427 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas)); 428 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas)); 429 vpx_memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas)); 430 vpx_memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas)); 431 432 set_default_lf_deltas(cpi); 433 434 } 435 436 437 static void dealloc_raw_frame_buffers(VP8_COMP *cpi); 438 439 440 static void dealloc_compressor_data(VP8_COMP *cpi) 441 { 442 vpx_free(cpi->tplist); 443 cpi->tplist = NULL; 444 445 /* Delete last frame MV storage buffers */ 446 vpx_free(cpi->lfmv); 447 cpi->lfmv = 0; 448 449 vpx_free(cpi->lf_ref_frame_sign_bias); 450 cpi->lf_ref_frame_sign_bias = 0; 451 452 vpx_free(cpi->lf_ref_frame); 453 cpi->lf_ref_frame = 0; 454 455 /* Delete sementation map */ 456 vpx_free(cpi->segmentation_map); 457 cpi->segmentation_map = 0; 458 459 vpx_free(cpi->active_map); 460 cpi->active_map = 0; 461 462 vp8_de_alloc_frame_buffers(&cpi->common); 463 464 vp8_yv12_de_alloc_frame_buffer(&cpi->pick_lf_lvl_frame); 465 vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source); 466 dealloc_raw_frame_buffers(cpi); 467 468 vpx_free(cpi->tok); 469 cpi->tok = 0; 470 471 /* Structure used to monitor GF usage */ 472 vpx_free(cpi->gf_active_flags); 473 cpi->gf_active_flags = 0; 474 475 /* Activity mask based per mb zbin adjustments */ 476 vpx_free(cpi->mb_activity_map); 477 cpi->mb_activity_map = 0; 478 479 vpx_free(cpi->mb.pip); 480 cpi->mb.pip = 0; 481 482 #if CONFIG_MULTITHREAD 483 vpx_free(cpi->mt_current_mb_col); 484 cpi->mt_current_mb_col = NULL; 485 #endif 486 } 487 488 static void enable_segmentation(VP8_COMP *cpi) 489 { 490 /* Set the appropriate feature bit */ 491 cpi->mb.e_mbd.segmentation_enabled = 1; 492 cpi->mb.e_mbd.update_mb_segmentation_map = 1; 493 cpi->mb.e_mbd.update_mb_segmentation_data = 1; 494 } 495 static void disable_segmentation(VP8_COMP *cpi) 496 { 497 /* Clear the appropriate feature bit */ 498 cpi->mb.e_mbd.segmentation_enabled = 0; 499 } 500 501 /* Valid values for a segment are 0 to 3 502 * Segmentation map is arrange as [Rows][Columns] 503 */ 504 static void set_segmentation_map(VP8_COMP *cpi, unsigned char *segmentation_map) 505 { 506 /* Copy in the new segmentation map */ 507 vpx_memcpy(cpi->segmentation_map, segmentation_map, (cpi->common.mb_rows * cpi->common.mb_cols)); 508 509 /* Signal that the map should be updated. */ 510 cpi->mb.e_mbd.update_mb_segmentation_map = 1; 511 cpi->mb.e_mbd.update_mb_segmentation_data = 1; 512 } 513 514 /* The values given for each segment can be either deltas (from the default 515 * value chosen for the frame) or absolute values. 516 * 517 * Valid range for abs values is: 518 * (0-127 for MB_LVL_ALT_Q), (0-63 for SEGMENT_ALT_LF) 519 * Valid range for delta values are: 520 * (+/-127 for MB_LVL_ALT_Q), (+/-63 for SEGMENT_ALT_LF) 521 * 522 * abs_delta = SEGMENT_DELTADATA (deltas) 523 * abs_delta = SEGMENT_ABSDATA (use the absolute values given). 524 * 525 */ 526 static void set_segment_data(VP8_COMP *cpi, signed char *feature_data, unsigned char abs_delta) 527 { 528 cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta; 529 vpx_memcpy(cpi->segment_feature_data, feature_data, sizeof(cpi->segment_feature_data)); 530 } 531 532 533 static void segmentation_test_function(VP8_COMP *cpi) 534 { 535 unsigned char *seg_map; 536 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS]; 537 538 // Create a temporary map for segmentation data. 539 CHECK_MEM_ERROR(seg_map, vpx_calloc(cpi->common.mb_rows * cpi->common.mb_cols, 1)); 540 541 // Set the segmentation Map 542 set_segmentation_map(cpi, seg_map); 543 544 // Activate segmentation. 545 enable_segmentation(cpi); 546 547 // Set up the quant segment data 548 feature_data[MB_LVL_ALT_Q][0] = 0; 549 feature_data[MB_LVL_ALT_Q][1] = 4; 550 feature_data[MB_LVL_ALT_Q][2] = 0; 551 feature_data[MB_LVL_ALT_Q][3] = 0; 552 // Set up the loop segment data 553 feature_data[MB_LVL_ALT_LF][0] = 0; 554 feature_data[MB_LVL_ALT_LF][1] = 0; 555 feature_data[MB_LVL_ALT_LF][2] = 0; 556 feature_data[MB_LVL_ALT_LF][3] = 0; 557 558 // Initialise the feature data structure 559 // SEGMENT_DELTADATA 0, SEGMENT_ABSDATA 1 560 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA); 561 562 // Delete sementation map 563 vpx_free(seg_map); 564 565 seg_map = 0; 566 } 567 568 /* A simple function to cyclically refresh the background at a lower Q */ 569 static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment) 570 { 571 unsigned char *seg_map = cpi->segmentation_map; 572 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS]; 573 int i; 574 int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe; 575 int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols; 576 577 cpi->cyclic_refresh_q = Q / 2; 578 579 // Set every macroblock to be eligible for update. 580 // For key frame this will reset seg map to 0. 581 vpx_memset(cpi->segmentation_map, 0, mbs_in_frame); 582 583 if (cpi->common.frame_type != KEY_FRAME) 584 { 585 /* Cycle through the macro_block rows */ 586 /* MB loop to set local segmentation map */ 587 i = cpi->cyclic_refresh_mode_index; 588 assert(i < mbs_in_frame); 589 do 590 { 591 /* If the MB is as a candidate for clean up then mark it for 592 * possible boost/refresh (segment 1) The segment id may get 593 * reset to 0 later if the MB gets coded anything other than 594 * last frame 0,0 as only (last frame 0,0) MBs are eligable for 595 * refresh : that is to say Mbs likely to be background blocks. 596 */ 597 if (cpi->cyclic_refresh_map[i] == 0) 598 { 599 seg_map[i] = 1; 600 block_count --; 601 } 602 else if (cpi->cyclic_refresh_map[i] < 0) 603 cpi->cyclic_refresh_map[i]++; 604 605 i++; 606 if (i == mbs_in_frame) 607 i = 0; 608 609 } 610 while(block_count && i != cpi->cyclic_refresh_mode_index); 611 612 cpi->cyclic_refresh_mode_index = i; 613 } 614 615 /* Activate segmentation. */ 616 cpi->mb.e_mbd.update_mb_segmentation_map = 1; 617 cpi->mb.e_mbd.update_mb_segmentation_data = 1; 618 enable_segmentation(cpi); 619 620 /* Set up the quant segment data */ 621 feature_data[MB_LVL_ALT_Q][0] = 0; 622 feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q); 623 feature_data[MB_LVL_ALT_Q][2] = 0; 624 feature_data[MB_LVL_ALT_Q][3] = 0; 625 626 /* Set up the loop segment data */ 627 feature_data[MB_LVL_ALT_LF][0] = 0; 628 feature_data[MB_LVL_ALT_LF][1] = lf_adjustment; 629 feature_data[MB_LVL_ALT_LF][2] = 0; 630 feature_data[MB_LVL_ALT_LF][3] = 0; 631 632 /* Initialise the feature data structure */ 633 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA); 634 635 } 636 637 static void set_default_lf_deltas(VP8_COMP *cpi) 638 { 639 cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1; 640 cpi->mb.e_mbd.mode_ref_lf_delta_update = 1; 641 642 vpx_memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas)); 643 vpx_memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas)); 644 645 /* Test of ref frame deltas */ 646 cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2; 647 cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0; 648 cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2; 649 cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2; 650 651 cpi->mb.e_mbd.mode_lf_deltas[0] = 4; /* BPRED */ 652 653 if(cpi->oxcf.Mode == MODE_REALTIME) 654 cpi->mb.e_mbd.mode_lf_deltas[1] = -12; /* Zero */ 655 else 656 cpi->mb.e_mbd.mode_lf_deltas[1] = -2; /* Zero */ 657 658 cpi->mb.e_mbd.mode_lf_deltas[2] = 2; /* New mv */ 659 cpi->mb.e_mbd.mode_lf_deltas[3] = 4; /* Split mv */ 660 } 661 662 /* Convenience macros for mapping speed and mode into a continuous 663 * range 664 */ 665 #define GOOD(x) (x+1) 666 #define RT(x) (x+7) 667 668 static int speed_map(int speed, const int *map) 669 { 670 int res; 671 672 do 673 { 674 res = *map++; 675 } while(speed >= *map++); 676 return res; 677 } 678 679 static const int thresh_mult_map_znn[] = { 680 /* map common to zero, nearest, and near */ 681 0, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(2), 2000, INT_MAX 682 }; 683 684 static const int thresh_mult_map_vhpred[] = { 685 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(1), 2000, 686 RT(7), INT_MAX, INT_MAX 687 }; 688 689 static const int thresh_mult_map_bpred[] = { 690 2000, GOOD(0), 2500, GOOD(2), 5000, GOOD(3), 7500, RT(0), 2500, RT(1), 5000, 691 RT(6), INT_MAX, INT_MAX 692 }; 693 694 static const int thresh_mult_map_tm[] = { 695 1000, GOOD(2), 1500, GOOD(3), 2000, RT(0), 0, RT(1), 1000, RT(2), 2000, 696 RT(7), INT_MAX, INT_MAX 697 }; 698 699 static const int thresh_mult_map_new1[] = { 700 1000, GOOD(2), 2000, RT(0), 2000, INT_MAX 701 }; 702 703 static const int thresh_mult_map_new2[] = { 704 1000, GOOD(2), 2000, GOOD(3), 2500, GOOD(5), 4000, RT(0), 2000, RT(2), 2500, 705 RT(5), 4000, INT_MAX 706 }; 707 708 static const int thresh_mult_map_split1[] = { 709 2500, GOOD(0), 1700, GOOD(2), 10000, GOOD(3), 25000, GOOD(4), INT_MAX, 710 RT(0), 5000, RT(1), 10000, RT(2), 25000, RT(3), INT_MAX, INT_MAX 711 }; 712 713 static const int thresh_mult_map_split2[] = { 714 5000, GOOD(0), 4500, GOOD(2), 20000, GOOD(3), 50000, GOOD(4), INT_MAX, 715 RT(0), 10000, RT(1), 20000, RT(2), 50000, RT(3), INT_MAX, INT_MAX 716 }; 717 718 static const int mode_check_freq_map_zn2[] = { 719 /* {zero,nearest}{2,3} */ 720 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX 721 }; 722 723 static const int mode_check_freq_map_vhbpred[] = { 724 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(5), 4, INT_MAX 725 }; 726 727 static const int mode_check_freq_map_near2[] = { 728 0, GOOD(5), 2, RT(0), 0, RT(3), 2, RT(10), 1<<2, RT(11), 1<<3, RT(12), 1<<4, 729 INT_MAX 730 }; 731 732 static const int mode_check_freq_map_new1[] = { 733 0, RT(10), 1<<1, RT(11), 1<<2, RT(12), 1<<3, INT_MAX 734 }; 735 736 static const int mode_check_freq_map_new2[] = { 737 0, GOOD(5), 4, RT(0), 0, RT(3), 4, RT(10), 1<<3, RT(11), 1<<4, RT(12), 1<<5, 738 INT_MAX 739 }; 740 741 static const int mode_check_freq_map_split1[] = { 742 0, GOOD(2), 2, GOOD(3), 7, RT(1), 2, RT(2), 7, INT_MAX 743 }; 744 745 static const int mode_check_freq_map_split2[] = { 746 0, GOOD(1), 2, GOOD(2), 4, GOOD(3), 15, RT(1), 4, RT(2), 15, INT_MAX 747 }; 748 749 void vp8_set_speed_features(VP8_COMP *cpi) 750 { 751 SPEED_FEATURES *sf = &cpi->sf; 752 int Mode = cpi->compressor_speed; 753 int Speed = cpi->Speed; 754 int i; 755 VP8_COMMON *cm = &cpi->common; 756 int last_improved_quant = sf->improved_quant; 757 int ref_frames; 758 759 /* Initialise default mode frequency sampling variables */ 760 for (i = 0; i < MAX_MODES; i ++) 761 { 762 cpi->mode_check_freq[i] = 0; 763 } 764 765 cpi->mb.mbs_tested_so_far = 0; 766 767 /* best quality defaults */ 768 sf->RD = 1; 769 sf->search_method = NSTEP; 770 sf->improved_quant = 1; 771 sf->improved_dct = 1; 772 sf->auto_filter = 1; 773 sf->recode_loop = 1; 774 sf->quarter_pixel_search = 1; 775 sf->half_pixel_search = 1; 776 sf->iterative_sub_pixel = 1; 777 sf->optimize_coefficients = 1; 778 sf->use_fastquant_for_pick = 0; 779 sf->no_skip_block4x4_search = 1; 780 781 sf->first_step = 0; 782 sf->max_step_search_steps = MAX_MVSEARCH_STEPS; 783 sf->improved_mv_pred = 1; 784 785 /* default thresholds to 0 */ 786 for (i = 0; i < MAX_MODES; i++) 787 sf->thresh_mult[i] = 0; 788 789 /* Count enabled references */ 790 ref_frames = 1; 791 if (cpi->ref_frame_flags & VP8_LAST_FRAME) 792 ref_frames++; 793 if (cpi->ref_frame_flags & VP8_GOLD_FRAME) 794 ref_frames++; 795 if (cpi->ref_frame_flags & VP8_ALTR_FRAME) 796 ref_frames++; 797 798 /* Convert speed to continuous range, with clamping */ 799 if (Mode == 0) 800 Speed = 0; 801 else if (Mode == 2) 802 Speed = RT(Speed); 803 else 804 { 805 if (Speed > 5) 806 Speed = 5; 807 Speed = GOOD(Speed); 808 } 809 810 sf->thresh_mult[THR_ZERO1] = 811 sf->thresh_mult[THR_NEAREST1] = 812 sf->thresh_mult[THR_NEAR1] = 813 sf->thresh_mult[THR_DC] = 0; /* always */ 814 815 sf->thresh_mult[THR_ZERO2] = 816 sf->thresh_mult[THR_ZERO3] = 817 sf->thresh_mult[THR_NEAREST2] = 818 sf->thresh_mult[THR_NEAREST3] = 819 sf->thresh_mult[THR_NEAR2] = 820 sf->thresh_mult[THR_NEAR3] = speed_map(Speed, thresh_mult_map_znn); 821 822 sf->thresh_mult[THR_V_PRED] = 823 sf->thresh_mult[THR_H_PRED] = speed_map(Speed, thresh_mult_map_vhpred); 824 sf->thresh_mult[THR_B_PRED] = speed_map(Speed, thresh_mult_map_bpred); 825 sf->thresh_mult[THR_TM] = speed_map(Speed, thresh_mult_map_tm); 826 sf->thresh_mult[THR_NEW1] = speed_map(Speed, thresh_mult_map_new1); 827 sf->thresh_mult[THR_NEW2] = 828 sf->thresh_mult[THR_NEW3] = speed_map(Speed, thresh_mult_map_new2); 829 sf->thresh_mult[THR_SPLIT1] = speed_map(Speed, thresh_mult_map_split1); 830 sf->thresh_mult[THR_SPLIT2] = 831 sf->thresh_mult[THR_SPLIT3] = speed_map(Speed, thresh_mult_map_split2); 832 833 cpi->mode_check_freq[THR_ZERO1] = 834 cpi->mode_check_freq[THR_NEAREST1] = 835 cpi->mode_check_freq[THR_NEAR1] = 836 cpi->mode_check_freq[THR_TM] = 837 cpi->mode_check_freq[THR_DC] = 0; /* always */ 838 839 cpi->mode_check_freq[THR_ZERO2] = 840 cpi->mode_check_freq[THR_ZERO3] = 841 cpi->mode_check_freq[THR_NEAREST2] = 842 cpi->mode_check_freq[THR_NEAREST3] = speed_map(Speed, 843 mode_check_freq_map_zn2); 844 845 cpi->mode_check_freq[THR_NEAR2] = 846 cpi->mode_check_freq[THR_NEAR3] = speed_map(Speed, 847 mode_check_freq_map_near2); 848 849 cpi->mode_check_freq[THR_V_PRED] = 850 cpi->mode_check_freq[THR_H_PRED] = 851 cpi->mode_check_freq[THR_B_PRED] = speed_map(Speed, 852 mode_check_freq_map_vhbpred); 853 cpi->mode_check_freq[THR_NEW1] = speed_map(Speed, 854 mode_check_freq_map_new1); 855 cpi->mode_check_freq[THR_NEW2] = 856 cpi->mode_check_freq[THR_NEW3] = speed_map(Speed, 857 mode_check_freq_map_new2); 858 cpi->mode_check_freq[THR_SPLIT1] = speed_map(Speed, 859 mode_check_freq_map_split1); 860 cpi->mode_check_freq[THR_SPLIT2] = 861 cpi->mode_check_freq[THR_SPLIT3] = speed_map(Speed, 862 mode_check_freq_map_split2); 863 Speed = cpi->Speed; 864 switch (Mode) 865 { 866 #if !(CONFIG_REALTIME_ONLY) 867 case 0: /* best quality mode */ 868 sf->first_step = 0; 869 sf->max_step_search_steps = MAX_MVSEARCH_STEPS; 870 break; 871 case 1: 872 case 3: 873 if (Speed > 0) 874 { 875 /* Disable coefficient optimization above speed 0 */ 876 sf->optimize_coefficients = 0; 877 sf->use_fastquant_for_pick = 1; 878 sf->no_skip_block4x4_search = 0; 879 880 sf->first_step = 1; 881 } 882 883 if (Speed > 2) 884 { 885 sf->improved_quant = 0; 886 sf->improved_dct = 0; 887 888 /* Only do recode loop on key frames, golden frames and 889 * alt ref frames 890 */ 891 sf->recode_loop = 2; 892 893 } 894 895 if (Speed > 3) 896 { 897 sf->auto_filter = 1; 898 sf->recode_loop = 0; /* recode loop off */ 899 sf->RD = 0; /* Turn rd off */ 900 901 } 902 903 if (Speed > 4) 904 { 905 sf->auto_filter = 0; /* Faster selection of loop filter */ 906 } 907 908 break; 909 #endif 910 case 2: 911 sf->optimize_coefficients = 0; 912 sf->recode_loop = 0; 913 sf->auto_filter = 1; 914 sf->iterative_sub_pixel = 1; 915 sf->search_method = NSTEP; 916 917 if (Speed > 0) 918 { 919 sf->improved_quant = 0; 920 sf->improved_dct = 0; 921 922 sf->use_fastquant_for_pick = 1; 923 sf->no_skip_block4x4_search = 0; 924 sf->first_step = 1; 925 } 926 927 if (Speed > 2) 928 sf->auto_filter = 0; /* Faster selection of loop filter */ 929 930 if (Speed > 3) 931 { 932 sf->RD = 0; 933 sf->auto_filter = 1; 934 } 935 936 if (Speed > 4) 937 { 938 sf->auto_filter = 0; /* Faster selection of loop filter */ 939 sf->search_method = HEX; 940 sf->iterative_sub_pixel = 0; 941 } 942 943 if (Speed > 6) 944 { 945 unsigned int sum = 0; 946 unsigned int total_mbs = cm->MBs; 947 int thresh; 948 unsigned int total_skip; 949 950 int min = 2000; 951 952 if (cpi->oxcf.encode_breakout > 2000) 953 min = cpi->oxcf.encode_breakout; 954 955 min >>= 7; 956 957 for (i = 0; i < min; i++) 958 { 959 sum += cpi->mb.error_bins[i]; 960 } 961 962 total_skip = sum; 963 sum = 0; 964 965 /* i starts from 2 to make sure thresh started from 2048 */ 966 for (; i < 1024; i++) 967 { 968 sum += cpi->mb.error_bins[i]; 969 970 if (10 * sum >= (unsigned int)(cpi->Speed - 6)*(total_mbs - total_skip)) 971 break; 972 } 973 974 i--; 975 thresh = (i << 7); 976 977 if (thresh < 2000) 978 thresh = 2000; 979 980 if (ref_frames > 1) 981 { 982 sf->thresh_mult[THR_NEW1 ] = thresh; 983 sf->thresh_mult[THR_NEAREST1 ] = thresh >> 1; 984 sf->thresh_mult[THR_NEAR1 ] = thresh >> 1; 985 } 986 987 if (ref_frames > 2) 988 { 989 sf->thresh_mult[THR_NEW2] = thresh << 1; 990 sf->thresh_mult[THR_NEAREST2 ] = thresh; 991 sf->thresh_mult[THR_NEAR2 ] = thresh; 992 } 993 994 if (ref_frames > 3) 995 { 996 sf->thresh_mult[THR_NEW3] = thresh << 1; 997 sf->thresh_mult[THR_NEAREST3 ] = thresh; 998 sf->thresh_mult[THR_NEAR3 ] = thresh; 999 } 1000 1001 sf->improved_mv_pred = 0; 1002 } 1003 1004 if (Speed > 8) 1005 sf->quarter_pixel_search = 0; 1006 1007 if(cm->version == 0) 1008 { 1009 cm->filter_type = NORMAL_LOOPFILTER; 1010 1011 if (Speed >= 14) 1012 cm->filter_type = SIMPLE_LOOPFILTER; 1013 } 1014 else 1015 { 1016 cm->filter_type = SIMPLE_LOOPFILTER; 1017 } 1018 1019 /* This has a big hit on quality. Last resort */ 1020 if (Speed >= 15) 1021 sf->half_pixel_search = 0; 1022 1023 vpx_memset(cpi->mb.error_bins, 0, sizeof(cpi->mb.error_bins)); 1024 1025 }; /* switch */ 1026 1027 /* Slow quant, dct and trellis not worthwhile for first pass 1028 * so make sure they are always turned off. 1029 */ 1030 if ( cpi->pass == 1 ) 1031 { 1032 sf->improved_quant = 0; 1033 sf->optimize_coefficients = 0; 1034 sf->improved_dct = 0; 1035 } 1036 1037 if (cpi->sf.search_method == NSTEP) 1038 { 1039 vp8_init3smotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride); 1040 } 1041 else if (cpi->sf.search_method == DIAMOND) 1042 { 1043 vp8_init_dsmotion_compensation(&cpi->mb, cm->yv12_fb[cm->lst_fb_idx].y_stride); 1044 } 1045 1046 if (cpi->sf.improved_dct) 1047 { 1048 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4; 1049 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4; 1050 } 1051 else 1052 { 1053 /* No fast FDCT defined for any platform at this time. */ 1054 cpi->mb.short_fdct8x4 = vp8_short_fdct8x4; 1055 cpi->mb.short_fdct4x4 = vp8_short_fdct4x4; 1056 } 1057 1058 cpi->mb.short_walsh4x4 = vp8_short_walsh4x4; 1059 1060 if (cpi->sf.improved_quant) 1061 { 1062 cpi->mb.quantize_b = vp8_regular_quantize_b; 1063 cpi->mb.quantize_b_pair = vp8_regular_quantize_b_pair; 1064 } 1065 else 1066 { 1067 cpi->mb.quantize_b = vp8_fast_quantize_b; 1068 cpi->mb.quantize_b_pair = vp8_fast_quantize_b_pair; 1069 } 1070 if (cpi->sf.improved_quant != last_improved_quant) 1071 vp8cx_init_quantizer(cpi); 1072 1073 if (cpi->sf.iterative_sub_pixel == 1) 1074 { 1075 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively; 1076 } 1077 else if (cpi->sf.quarter_pixel_search) 1078 { 1079 cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step; 1080 } 1081 else if (cpi->sf.half_pixel_search) 1082 { 1083 cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step; 1084 } 1085 else 1086 { 1087 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step; 1088 } 1089 1090 if (cpi->sf.optimize_coefficients == 1 && cpi->pass!=1) 1091 cpi->mb.optimize = 1; 1092 else 1093 cpi->mb.optimize = 0; 1094 1095 if (cpi->common.full_pixel) 1096 cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step; 1097 1098 #ifdef SPEEDSTATS 1099 frames_at_speed[cpi->Speed]++; 1100 #endif 1101 } 1102 #undef GOOD 1103 #undef RT 1104 1105 static void alloc_raw_frame_buffers(VP8_COMP *cpi) 1106 { 1107 #if VP8_TEMPORAL_ALT_REF 1108 int width = (cpi->oxcf.Width + 15) & ~15; 1109 int height = (cpi->oxcf.Height + 15) & ~15; 1110 #endif 1111 1112 cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height, 1113 cpi->oxcf.lag_in_frames); 1114 if(!cpi->lookahead) 1115 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR, 1116 "Failed to allocate lag buffers"); 1117 1118 #if VP8_TEMPORAL_ALT_REF 1119 1120 if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer, 1121 width, height, VP8BORDERINPIXELS)) 1122 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR, 1123 "Failed to allocate altref buffer"); 1124 1125 #endif 1126 } 1127 1128 1129 static void dealloc_raw_frame_buffers(VP8_COMP *cpi) 1130 { 1131 #if VP8_TEMPORAL_ALT_REF 1132 vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer); 1133 #endif 1134 vp8_lookahead_destroy(cpi->lookahead); 1135 } 1136 1137 1138 static int vp8_alloc_partition_data(VP8_COMP *cpi) 1139 { 1140 vpx_free(cpi->mb.pip); 1141 1142 cpi->mb.pip = vpx_calloc((cpi->common.mb_cols + 1) * 1143 (cpi->common.mb_rows + 1), 1144 sizeof(PARTITION_INFO)); 1145 if(!cpi->mb.pip) 1146 return 1; 1147 1148 cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1; 1149 1150 return 0; 1151 } 1152 1153 void vp8_alloc_compressor_data(VP8_COMP *cpi) 1154 { 1155 VP8_COMMON *cm = & cpi->common; 1156 1157 int width = cm->Width; 1158 int height = cm->Height; 1159 1160 if (vp8_alloc_frame_buffers(cm, width, height)) 1161 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR, 1162 "Failed to allocate frame buffers"); 1163 1164 if (vp8_alloc_partition_data(cpi)) 1165 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR, 1166 "Failed to allocate partition data"); 1167 1168 1169 if ((width & 0xf) != 0) 1170 width += 16 - (width & 0xf); 1171 1172 if ((height & 0xf) != 0) 1173 height += 16 - (height & 0xf); 1174 1175 1176 if (vp8_yv12_alloc_frame_buffer(&cpi->pick_lf_lvl_frame, 1177 width, height, VP8BORDERINPIXELS)) 1178 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR, 1179 "Failed to allocate last frame buffer"); 1180 1181 if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source, 1182 width, height, VP8BORDERINPIXELS)) 1183 vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR, 1184 "Failed to allocate scaled source buffer"); 1185 1186 vpx_free(cpi->tok); 1187 1188 { 1189 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING 1190 unsigned int tokens = 8 * 24 * 16; /* one MB for each thread */ 1191 #else 1192 unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16; 1193 #endif 1194 CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok))); 1195 } 1196 1197 /* Data used for real time vc mode to see if gf needs refreshing */ 1198 cpi->zeromv_count = 0; 1199 1200 1201 /* Structures used to monitor GF usage */ 1202 vpx_free(cpi->gf_active_flags); 1203 CHECK_MEM_ERROR(cpi->gf_active_flags, 1204 vpx_calloc(sizeof(*cpi->gf_active_flags), 1205 cm->mb_rows * cm->mb_cols)); 1206 cpi->gf_active_count = cm->mb_rows * cm->mb_cols; 1207 1208 vpx_free(cpi->mb_activity_map); 1209 CHECK_MEM_ERROR(cpi->mb_activity_map, 1210 vpx_calloc(sizeof(*cpi->mb_activity_map), 1211 cm->mb_rows * cm->mb_cols)); 1212 1213 /* allocate memory for storing last frame's MVs for MV prediction. */ 1214 vpx_free(cpi->lfmv); 1215 CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2), 1216 sizeof(*cpi->lfmv))); 1217 vpx_free(cpi->lf_ref_frame_sign_bias); 1218 CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias, 1219 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2), 1220 sizeof(*cpi->lf_ref_frame_sign_bias))); 1221 vpx_free(cpi->lf_ref_frame); 1222 CHECK_MEM_ERROR(cpi->lf_ref_frame, 1223 vpx_calloc((cm->mb_rows+2) * (cm->mb_cols+2), 1224 sizeof(*cpi->lf_ref_frame))); 1225 1226 /* Create the encoder segmentation map and set all entries to 0 */ 1227 vpx_free(cpi->segmentation_map); 1228 CHECK_MEM_ERROR(cpi->segmentation_map, 1229 vpx_calloc(cm->mb_rows * cm->mb_cols, 1230 sizeof(*cpi->segmentation_map))); 1231 cpi->cyclic_refresh_mode_index = 0; 1232 vpx_free(cpi->active_map); 1233 CHECK_MEM_ERROR(cpi->active_map, 1234 vpx_calloc(cm->mb_rows * cm->mb_cols, 1235 sizeof(*cpi->active_map))); 1236 vpx_memset(cpi->active_map , 1, (cm->mb_rows * cm->mb_cols)); 1237 1238 #if CONFIG_MULTITHREAD 1239 if (width < 640) 1240 cpi->mt_sync_range = 1; 1241 else if (width <= 1280) 1242 cpi->mt_sync_range = 4; 1243 else if (width <= 2560) 1244 cpi->mt_sync_range = 8; 1245 else 1246 cpi->mt_sync_range = 16; 1247 1248 if (cpi->oxcf.multi_threaded > 1) 1249 { 1250 vpx_free(cpi->mt_current_mb_col); 1251 CHECK_MEM_ERROR(cpi->mt_current_mb_col, 1252 vpx_malloc(sizeof(*cpi->mt_current_mb_col) * cm->mb_rows)); 1253 } 1254 1255 #endif 1256 1257 vpx_free(cpi->tplist); 1258 CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cm->mb_rows)); 1259 } 1260 1261 1262 /* Quant MOD */ 1263 static const int q_trans[] = 1264 { 1265 0, 1, 2, 3, 4, 5, 7, 8, 1266 9, 10, 12, 13, 15, 17, 18, 19, 1267 20, 21, 23, 24, 25, 26, 27, 28, 1268 29, 30, 31, 33, 35, 37, 39, 41, 1269 43, 45, 47, 49, 51, 53, 55, 57, 1270 59, 61, 64, 67, 70, 73, 76, 79, 1271 82, 85, 88, 91, 94, 97, 100, 103, 1272 106, 109, 112, 115, 118, 121, 124, 127, 1273 }; 1274 1275 int vp8_reverse_trans(int x) 1276 { 1277 int i; 1278 1279 for (i = 0; i < 64; i++) 1280 if (q_trans[i] >= x) 1281 return i; 1282 1283 return 63; 1284 } 1285 void vp8_new_framerate(VP8_COMP *cpi, double framerate) 1286 { 1287 if(framerate < .1) 1288 framerate = 30; 1289 1290 cpi->framerate = framerate; 1291 cpi->output_framerate = framerate; 1292 cpi->per_frame_bandwidth = (int)(cpi->oxcf.target_bandwidth / 1293 cpi->output_framerate); 1294 cpi->av_per_frame_bandwidth = cpi->per_frame_bandwidth; 1295 cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth * 1296 cpi->oxcf.two_pass_vbrmin_section / 100); 1297 1298 /* Set Maximum gf/arf interval */ 1299 cpi->max_gf_interval = ((int)(cpi->output_framerate / 2.0) + 2); 1300 1301 if(cpi->max_gf_interval < 12) 1302 cpi->max_gf_interval = 12; 1303 1304 /* Extended interval for genuinely static scenes */ 1305 cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1; 1306 1307 /* Special conditions when altr ref frame enabled in lagged compress mode */ 1308 if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames) 1309 { 1310 if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1) 1311 cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1; 1312 1313 if (cpi->twopass.static_scene_max_gf_interval > cpi->oxcf.lag_in_frames - 1) 1314 cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1; 1315 } 1316 1317 if ( cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval ) 1318 cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval; 1319 } 1320 1321 1322 static void init_config(VP8_COMP *cpi, VP8_CONFIG *oxcf) 1323 { 1324 VP8_COMMON *cm = &cpi->common; 1325 1326 cpi->oxcf = *oxcf; 1327 1328 cpi->auto_gold = 1; 1329 cpi->auto_adjust_gold_quantizer = 1; 1330 1331 cm->version = oxcf->Version; 1332 vp8_setup_version(cm); 1333 1334 /* frame rate is not available on the first frame, as it's derived from 1335 * the observed timestamps. The actual value used here doesn't matter 1336 * too much, as it will adapt quickly. If the reciprocal of the timebase 1337 * seems like a reasonable framerate, then use that as a guess, otherwise 1338 * use 30. 1339 */ 1340 cpi->framerate = (double)(oxcf->timebase.den) / 1341 (double)(oxcf->timebase.num); 1342 1343 if (cpi->framerate > 180) 1344 cpi->framerate = 30; 1345 1346 cpi->ref_framerate = cpi->framerate; 1347 1348 /* change includes all joint functionality */ 1349 vp8_change_config(cpi, oxcf); 1350 1351 /* Initialize active best and worst q and average q values. */ 1352 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q; 1353 cpi->active_best_quality = cpi->oxcf.best_allowed_q; 1354 cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q; 1355 1356 /* Initialise the starting buffer levels */ 1357 cpi->buffer_level = cpi->oxcf.starting_buffer_level; 1358 cpi->bits_off_target = cpi->oxcf.starting_buffer_level; 1359 1360 cpi->rolling_target_bits = cpi->av_per_frame_bandwidth; 1361 cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth; 1362 cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth; 1363 cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth; 1364 1365 cpi->total_actual_bits = 0; 1366 cpi->total_target_vs_actual = 0; 1367 1368 /* Temporal scalabilty */ 1369 if (cpi->oxcf.number_of_layers > 1) 1370 { 1371 unsigned int i; 1372 double prev_layer_framerate=0; 1373 1374 for (i=0; i<cpi->oxcf.number_of_layers; i++) 1375 { 1376 init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate); 1377 prev_layer_framerate = cpi->output_framerate / 1378 cpi->oxcf.rate_decimator[i]; 1379 } 1380 } 1381 1382 #if VP8_TEMPORAL_ALT_REF 1383 { 1384 int i; 1385 1386 cpi->fixed_divide[0] = 0; 1387 1388 for (i = 1; i < 512; i++) 1389 cpi->fixed_divide[i] = 0x80000 / i; 1390 } 1391 #endif 1392 } 1393 1394 static void update_layer_contexts (VP8_COMP *cpi) 1395 { 1396 VP8_CONFIG *oxcf = &cpi->oxcf; 1397 1398 /* Update snapshots of the layer contexts to reflect new parameters */ 1399 if (oxcf->number_of_layers > 1) 1400 { 1401 unsigned int i; 1402 double prev_layer_framerate=0; 1403 1404 for (i=0; i<oxcf->number_of_layers; i++) 1405 { 1406 LAYER_CONTEXT *lc = &cpi->layer_context[i]; 1407 1408 lc->framerate = 1409 cpi->ref_framerate / oxcf->rate_decimator[i]; 1410 lc->target_bandwidth = oxcf->target_bitrate[i] * 1000; 1411 1412 lc->starting_buffer_level = rescale( 1413 (int)oxcf->starting_buffer_level_in_ms, 1414 lc->target_bandwidth, 1000); 1415 1416 if (oxcf->optimal_buffer_level == 0) 1417 lc->optimal_buffer_level = lc->target_bandwidth / 8; 1418 else 1419 lc->optimal_buffer_level = rescale( 1420 (int)oxcf->optimal_buffer_level_in_ms, 1421 lc->target_bandwidth, 1000); 1422 1423 if (oxcf->maximum_buffer_size == 0) 1424 lc->maximum_buffer_size = lc->target_bandwidth / 8; 1425 else 1426 lc->maximum_buffer_size = rescale( 1427 (int)oxcf->maximum_buffer_size_in_ms, 1428 lc->target_bandwidth, 1000); 1429 1430 /* Work out the average size of a frame within this layer */ 1431 if (i > 0) 1432 lc->avg_frame_size_for_layer = 1433 (int)((oxcf->target_bitrate[i] - 1434 oxcf->target_bitrate[i-1]) * 1000 / 1435 (lc->framerate - prev_layer_framerate)); 1436 1437 prev_layer_framerate = lc->framerate; 1438 } 1439 } 1440 } 1441 1442 void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf) 1443 { 1444 VP8_COMMON *cm = &cpi->common; 1445 int last_w, last_h, prev_number_of_layers; 1446 1447 if (!cpi) 1448 return; 1449 1450 if (!oxcf) 1451 return; 1452 1453 #if CONFIG_MULTITHREAD 1454 /* wait for the last picture loopfilter thread done */ 1455 if (cpi->b_lpf_running) 1456 { 1457 sem_wait(&cpi->h_event_end_lpf); 1458 cpi->b_lpf_running = 0; 1459 } 1460 #endif 1461 1462 if (cm->version != oxcf->Version) 1463 { 1464 cm->version = oxcf->Version; 1465 vp8_setup_version(cm); 1466 } 1467 1468 last_w = cpi->oxcf.Width; 1469 last_h = cpi->oxcf.Height; 1470 prev_number_of_layers = cpi->oxcf.number_of_layers; 1471 1472 cpi->oxcf = *oxcf; 1473 1474 switch (cpi->oxcf.Mode) 1475 { 1476 1477 case MODE_REALTIME: 1478 cpi->pass = 0; 1479 cpi->compressor_speed = 2; 1480 1481 if (cpi->oxcf.cpu_used < -16) 1482 { 1483 cpi->oxcf.cpu_used = -16; 1484 } 1485 1486 if (cpi->oxcf.cpu_used > 16) 1487 cpi->oxcf.cpu_used = 16; 1488 1489 break; 1490 1491 case MODE_GOODQUALITY: 1492 cpi->pass = 0; 1493 cpi->compressor_speed = 1; 1494 1495 if (cpi->oxcf.cpu_used < -5) 1496 { 1497 cpi->oxcf.cpu_used = -5; 1498 } 1499 1500 if (cpi->oxcf.cpu_used > 5) 1501 cpi->oxcf.cpu_used = 5; 1502 1503 break; 1504 1505 case MODE_BESTQUALITY: 1506 cpi->pass = 0; 1507 cpi->compressor_speed = 0; 1508 break; 1509 1510 case MODE_FIRSTPASS: 1511 cpi->pass = 1; 1512 cpi->compressor_speed = 1; 1513 break; 1514 case MODE_SECONDPASS: 1515 cpi->pass = 2; 1516 cpi->compressor_speed = 1; 1517 1518 if (cpi->oxcf.cpu_used < -5) 1519 { 1520 cpi->oxcf.cpu_used = -5; 1521 } 1522 1523 if (cpi->oxcf.cpu_used > 5) 1524 cpi->oxcf.cpu_used = 5; 1525 1526 break; 1527 case MODE_SECONDPASS_BEST: 1528 cpi->pass = 2; 1529 cpi->compressor_speed = 0; 1530 break; 1531 } 1532 1533 if (cpi->pass == 0) 1534 cpi->auto_worst_q = 1; 1535 1536 cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q]; 1537 cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q]; 1538 cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level]; 1539 1540 if (oxcf->fixed_q >= 0) 1541 { 1542 if (oxcf->worst_allowed_q < 0) 1543 cpi->oxcf.fixed_q = q_trans[0]; 1544 else 1545 cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q]; 1546 1547 if (oxcf->alt_q < 0) 1548 cpi->oxcf.alt_q = q_trans[0]; 1549 else 1550 cpi->oxcf.alt_q = q_trans[oxcf->alt_q]; 1551 1552 if (oxcf->key_q < 0) 1553 cpi->oxcf.key_q = q_trans[0]; 1554 else 1555 cpi->oxcf.key_q = q_trans[oxcf->key_q]; 1556 1557 if (oxcf->gold_q < 0) 1558 cpi->oxcf.gold_q = q_trans[0]; 1559 else 1560 cpi->oxcf.gold_q = q_trans[oxcf->gold_q]; 1561 1562 } 1563 1564 cpi->baseline_gf_interval = 1565 cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL; 1566 1567 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME; 1568 1569 cm->refresh_golden_frame = 0; 1570 cm->refresh_last_frame = 1; 1571 cm->refresh_entropy_probs = 1; 1572 1573 #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING) 1574 cpi->oxcf.token_partitions = 3; 1575 #endif 1576 1577 if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3) 1578 cm->multi_token_partition = 1579 (TOKEN_PARTITION) cpi->oxcf.token_partitions; 1580 1581 setup_features(cpi); 1582 1583 { 1584 int i; 1585 1586 for (i = 0; i < MAX_MB_SEGMENTS; i++) 1587 cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout; 1588 } 1589 1590 /* At the moment the first order values may not be > MAXQ */ 1591 if (cpi->oxcf.fixed_q > MAXQ) 1592 cpi->oxcf.fixed_q = MAXQ; 1593 1594 /* local file playback mode == really big buffer */ 1595 if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK) 1596 { 1597 cpi->oxcf.starting_buffer_level = 60000; 1598 cpi->oxcf.optimal_buffer_level = 60000; 1599 cpi->oxcf.maximum_buffer_size = 240000; 1600 cpi->oxcf.starting_buffer_level_in_ms = 60000; 1601 cpi->oxcf.optimal_buffer_level_in_ms = 60000; 1602 cpi->oxcf.maximum_buffer_size_in_ms = 240000; 1603 } 1604 1605 /* Convert target bandwidth from Kbit/s to Bit/s */ 1606 cpi->oxcf.target_bandwidth *= 1000; 1607 1608 cpi->oxcf.starting_buffer_level = 1609 rescale((int)cpi->oxcf.starting_buffer_level, 1610 cpi->oxcf.target_bandwidth, 1000); 1611 1612 /* Set or reset optimal and maximum buffer levels. */ 1613 if (cpi->oxcf.optimal_buffer_level == 0) 1614 cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8; 1615 else 1616 cpi->oxcf.optimal_buffer_level = 1617 rescale((int)cpi->oxcf.optimal_buffer_level, 1618 cpi->oxcf.target_bandwidth, 1000); 1619 1620 if (cpi->oxcf.maximum_buffer_size == 0) 1621 cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8; 1622 else 1623 cpi->oxcf.maximum_buffer_size = 1624 rescale((int)cpi->oxcf.maximum_buffer_size, 1625 cpi->oxcf.target_bandwidth, 1000); 1626 1627 /* Set up frame rate and related parameters rate control values. */ 1628 vp8_new_framerate(cpi, cpi->framerate); 1629 1630 /* Set absolute upper and lower quality limits */ 1631 cpi->worst_quality = cpi->oxcf.worst_allowed_q; 1632 cpi->best_quality = cpi->oxcf.best_allowed_q; 1633 1634 /* active values should only be modified if out of new range */ 1635 if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q) 1636 { 1637 cpi->active_worst_quality = cpi->oxcf.worst_allowed_q; 1638 } 1639 /* less likely */ 1640 else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q) 1641 { 1642 cpi->active_worst_quality = cpi->oxcf.best_allowed_q; 1643 } 1644 if (cpi->active_best_quality < cpi->oxcf.best_allowed_q) 1645 { 1646 cpi->active_best_quality = cpi->oxcf.best_allowed_q; 1647 } 1648 /* less likely */ 1649 else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q) 1650 { 1651 cpi->active_best_quality = cpi->oxcf.worst_allowed_q; 1652 } 1653 1654 cpi->buffered_mode = cpi->oxcf.optimal_buffer_level > 0; 1655 1656 cpi->cq_target_quality = cpi->oxcf.cq_level; 1657 1658 /* Only allow dropped frames in buffered mode */ 1659 cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode; 1660 1661 cpi->target_bandwidth = cpi->oxcf.target_bandwidth; 1662 1663 // Check if the number of temporal layers has changed, and if so reset the 1664 // pattern counter and set/initialize the temporal layer context for the 1665 // new layer configuration. 1666 if (cpi->oxcf.number_of_layers != prev_number_of_layers) 1667 { 1668 // If the number of temporal layers are changed we must start at the 1669 // base of the pattern cycle, so reset temporal_pattern_counter. 1670 cpi->temporal_pattern_counter = 0; 1671 reset_temporal_layer_change(cpi, oxcf, prev_number_of_layers); 1672 } 1673 1674 cm->Width = cpi->oxcf.Width; 1675 cm->Height = cpi->oxcf.Height; 1676 1677 /* TODO(jkoleszar): if an internal spatial resampling is active, 1678 * and we downsize the input image, maybe we should clear the 1679 * internal scale immediately rather than waiting for it to 1680 * correct. 1681 */ 1682 1683 /* VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs) */ 1684 if (cpi->oxcf.Sharpness > 7) 1685 cpi->oxcf.Sharpness = 7; 1686 1687 cm->sharpness_level = cpi->oxcf.Sharpness; 1688 1689 if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL) 1690 { 1691 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs); 1692 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs); 1693 1694 Scale2Ratio(cm->horiz_scale, &hr, &hs); 1695 Scale2Ratio(cm->vert_scale, &vr, &vs); 1696 1697 /* always go to the next whole number */ 1698 cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs; 1699 cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs; 1700 } 1701 1702 if (last_w != cpi->oxcf.Width || last_h != cpi->oxcf.Height) 1703 cpi->force_next_frame_intra = 1; 1704 1705 if (((cm->Width + 15) & 0xfffffff0) != 1706 cm->yv12_fb[cm->lst_fb_idx].y_width || 1707 ((cm->Height + 15) & 0xfffffff0) != 1708 cm->yv12_fb[cm->lst_fb_idx].y_height || 1709 cm->yv12_fb[cm->lst_fb_idx].y_width == 0) 1710 { 1711 dealloc_raw_frame_buffers(cpi); 1712 alloc_raw_frame_buffers(cpi); 1713 vp8_alloc_compressor_data(cpi); 1714 } 1715 1716 if (cpi->oxcf.fixed_q >= 0) 1717 { 1718 cpi->last_q[0] = cpi->oxcf.fixed_q; 1719 cpi->last_q[1] = cpi->oxcf.fixed_q; 1720 } 1721 1722 cpi->Speed = cpi->oxcf.cpu_used; 1723 1724 /* force to allowlag to 0 if lag_in_frames is 0; */ 1725 if (cpi->oxcf.lag_in_frames == 0) 1726 { 1727 cpi->oxcf.allow_lag = 0; 1728 } 1729 /* Limit on lag buffers as these are not currently dynamically allocated */ 1730 else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS) 1731 cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS; 1732 1733 /* YX Temp */ 1734 cpi->alt_ref_source = NULL; 1735 cpi->is_src_frame_alt_ref = 0; 1736 1737 #if CONFIG_TEMPORAL_DENOISING 1738 if (cpi->oxcf.noise_sensitivity) 1739 { 1740 if (!cpi->denoiser.yv12_mc_running_avg.buffer_alloc) 1741 { 1742 int width = (cpi->oxcf.Width + 15) & ~15; 1743 int height = (cpi->oxcf.Height + 15) & ~15; 1744 vp8_denoiser_allocate(&cpi->denoiser, width, height); 1745 } 1746 } 1747 #endif 1748 1749 #if 0 1750 /* Experimental RD Code */ 1751 cpi->frame_distortion = 0; 1752 cpi->last_frame_distortion = 0; 1753 #endif 1754 1755 } 1756 1757 #define M_LOG2_E 0.693147180559945309417 1758 #define log2f(x) (log (x) / (float) M_LOG2_E) 1759 static void cal_mvsadcosts(int *mvsadcost[2]) 1760 { 1761 int i = 1; 1762 1763 mvsadcost [0] [0] = 300; 1764 mvsadcost [1] [0] = 300; 1765 1766 do 1767 { 1768 double z = 256 * (2 * (log2f(8 * i) + .6)); 1769 mvsadcost [0][i] = (int) z; 1770 mvsadcost [1][i] = (int) z; 1771 mvsadcost [0][-i] = (int) z; 1772 mvsadcost [1][-i] = (int) z; 1773 } 1774 while (++i <= mvfp_max); 1775 } 1776 1777 struct VP8_COMP* vp8_create_compressor(VP8_CONFIG *oxcf) 1778 { 1779 int i; 1780 1781 VP8_COMP *cpi; 1782 VP8_COMMON *cm; 1783 1784 cpi = vpx_memalign(32, sizeof(VP8_COMP)); 1785 /* Check that the CPI instance is valid */ 1786 if (!cpi) 1787 return 0; 1788 1789 cm = &cpi->common; 1790 1791 vpx_memset(cpi, 0, sizeof(VP8_COMP)); 1792 1793 if (setjmp(cm->error.jmp)) 1794 { 1795 cpi->common.error.setjmp = 0; 1796 vp8_remove_compressor(&cpi); 1797 return 0; 1798 } 1799 1800 cpi->common.error.setjmp = 1; 1801 1802 CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site), (MAX_MVSEARCH_STEPS * 8) + 1)); 1803 1804 vp8_create_common(&cpi->common); 1805 1806 init_config(cpi, oxcf); 1807 1808 memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob, sizeof(vp8cx_base_skip_false_prob)); 1809 cpi->common.current_video_frame = 0; 1810 cpi->temporal_pattern_counter = 0; 1811 cpi->kf_overspend_bits = 0; 1812 cpi->kf_bitrate_adjustment = 0; 1813 cpi->frames_till_gf_update_due = 0; 1814 cpi->gf_overspend_bits = 0; 1815 cpi->non_gf_bitrate_adjustment = 0; 1816 cpi->prob_last_coded = 128; 1817 cpi->prob_gf_coded = 128; 1818 cpi->prob_intra_coded = 63; 1819 1820 /* Prime the recent reference frame usage counters. 1821 * Hereafter they will be maintained as a sort of moving average 1822 */ 1823 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1; 1824 cpi->recent_ref_frame_usage[LAST_FRAME] = 1; 1825 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1; 1826 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1; 1827 1828 /* Set reference frame sign bias for ALTREF frame to 1 (for now) */ 1829 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1; 1830 1831 cpi->twopass.gf_decay_rate = 0; 1832 cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL; 1833 1834 cpi->gold_is_last = 0 ; 1835 cpi->alt_is_last = 0 ; 1836 cpi->gold_is_alt = 0 ; 1837 1838 cpi->active_map_enabled = 0; 1839 1840 #if 0 1841 /* Experimental code for lagged and one pass */ 1842 /* Initialise one_pass GF frames stats */ 1843 /* Update stats used for GF selection */ 1844 if (cpi->pass == 0) 1845 { 1846 cpi->one_pass_frame_index = 0; 1847 1848 for (i = 0; i < MAX_LAG_BUFFERS; i++) 1849 { 1850 cpi->one_pass_frame_stats[i].frames_so_far = 0; 1851 cpi->one_pass_frame_stats[i].frame_intra_error = 0.0; 1852 cpi->one_pass_frame_stats[i].frame_coded_error = 0.0; 1853 cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0; 1854 cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0; 1855 cpi->one_pass_frame_stats[i].frame_mvr = 0.0; 1856 cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0; 1857 cpi->one_pass_frame_stats[i].frame_mvc = 0.0; 1858 cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0; 1859 } 1860 } 1861 #endif 1862 1863 /* Should we use the cyclic refresh method. 1864 * Currently this is tied to error resilliant mode 1865 */ 1866 cpi->cyclic_refresh_mode_enabled = cpi->oxcf.error_resilient_mode; 1867 cpi->cyclic_refresh_mode_max_mbs_perframe = (cpi->common.mb_rows * cpi->common.mb_cols) / 5; 1868 cpi->cyclic_refresh_mode_index = 0; 1869 cpi->cyclic_refresh_q = 32; 1870 1871 if (cpi->cyclic_refresh_mode_enabled) 1872 { 1873 CHECK_MEM_ERROR(cpi->cyclic_refresh_map, vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1)); 1874 } 1875 else 1876 cpi->cyclic_refresh_map = (signed char *) NULL; 1877 1878 #ifdef VP8_ENTROPY_STATS 1879 init_context_counters(); 1880 #endif 1881 1882 /*Initialize the feed-forward activity masking.*/ 1883 cpi->activity_avg = 90<<12; 1884 1885 /* Give a sensible default for the first frame. */ 1886 cpi->frames_since_key = 8; 1887 cpi->key_frame_frequency = cpi->oxcf.key_freq; 1888 cpi->this_key_frame_forced = 0; 1889 cpi->next_key_frame_forced = 0; 1890 1891 cpi->source_alt_ref_pending = 0; 1892 cpi->source_alt_ref_active = 0; 1893 cpi->common.refresh_alt_ref_frame = 0; 1894 1895 cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS; 1896 #if CONFIG_INTERNAL_STATS 1897 cpi->b_calculate_ssimg = 0; 1898 1899 cpi->count = 0; 1900 cpi->bytes = 0; 1901 1902 if (cpi->b_calculate_psnr) 1903 { 1904 cpi->total_sq_error = 0.0; 1905 cpi->total_sq_error2 = 0.0; 1906 cpi->total_y = 0.0; 1907 cpi->total_u = 0.0; 1908 cpi->total_v = 0.0; 1909 cpi->total = 0.0; 1910 cpi->totalp_y = 0.0; 1911 cpi->totalp_u = 0.0; 1912 cpi->totalp_v = 0.0; 1913 cpi->totalp = 0.0; 1914 cpi->tot_recode_hits = 0; 1915 cpi->summed_quality = 0; 1916 cpi->summed_weights = 0; 1917 } 1918 1919 if (cpi->b_calculate_ssimg) 1920 { 1921 cpi->total_ssimg_y = 0; 1922 cpi->total_ssimg_u = 0; 1923 cpi->total_ssimg_v = 0; 1924 cpi->total_ssimg_all = 0; 1925 } 1926 1927 #endif 1928 1929 cpi->first_time_stamp_ever = 0x7FFFFFFF; 1930 1931 cpi->frames_till_gf_update_due = 0; 1932 cpi->key_frame_count = 1; 1933 1934 cpi->ni_av_qi = cpi->oxcf.worst_allowed_q; 1935 cpi->ni_tot_qi = 0; 1936 cpi->ni_frames = 0; 1937 cpi->total_byte_count = 0; 1938 1939 cpi->drop_frame = 0; 1940 1941 cpi->rate_correction_factor = 1.0; 1942 cpi->key_frame_rate_correction_factor = 1.0; 1943 cpi->gf_rate_correction_factor = 1.0; 1944 cpi->twopass.est_max_qcorrection_factor = 1.0; 1945 1946 for (i = 0; i < KEY_FRAME_CONTEXT; i++) 1947 { 1948 cpi->prior_key_frame_distance[i] = (int)cpi->output_framerate; 1949 } 1950 1951 #ifdef OUTPUT_YUV_SRC 1952 yuv_file = fopen("bd.yuv", "ab"); 1953 #endif 1954 1955 #if 0 1956 framepsnr = fopen("framepsnr.stt", "a"); 1957 kf_list = fopen("kf_list.stt", "w"); 1958 #endif 1959 1960 cpi->output_pkt_list = oxcf->output_pkt_list; 1961 1962 #if !(CONFIG_REALTIME_ONLY) 1963 1964 if (cpi->pass == 1) 1965 { 1966 vp8_init_first_pass(cpi); 1967 } 1968 else if (cpi->pass == 2) 1969 { 1970 size_t packet_sz = sizeof(FIRSTPASS_STATS); 1971 int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz); 1972 1973 cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf; 1974 cpi->twopass.stats_in = cpi->twopass.stats_in_start; 1975 cpi->twopass.stats_in_end = (void*)((char *)cpi->twopass.stats_in 1976 + (packets - 1) * packet_sz); 1977 vp8_init_second_pass(cpi); 1978 } 1979 1980 #endif 1981 1982 if (cpi->compressor_speed == 2) 1983 { 1984 cpi->avg_encode_time = 0; 1985 cpi->avg_pick_mode_time = 0; 1986 } 1987 1988 vp8_set_speed_features(cpi); 1989 1990 /* Set starting values of RD threshold multipliers (128 = *1) */ 1991 for (i = 0; i < MAX_MODES; i++) 1992 { 1993 cpi->mb.rd_thresh_mult[i] = 128; 1994 } 1995 1996 #ifdef VP8_ENTROPY_STATS 1997 init_mv_ref_counts(); 1998 #endif 1999 2000 #if CONFIG_MULTITHREAD 2001 if(vp8cx_create_encoder_threads(cpi)) 2002 { 2003 vp8_remove_compressor(&cpi); 2004 return 0; 2005 } 2006 #endif 2007 2008 cpi->fn_ptr[BLOCK_16X16].sdf = vp8_sad16x16; 2009 cpi->fn_ptr[BLOCK_16X16].vf = vp8_variance16x16; 2010 cpi->fn_ptr[BLOCK_16X16].svf = vp8_sub_pixel_variance16x16; 2011 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_h = vp8_variance_halfpixvar16x16_h; 2012 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_v = vp8_variance_halfpixvar16x16_v; 2013 cpi->fn_ptr[BLOCK_16X16].svf_halfpix_hv = vp8_variance_halfpixvar16x16_hv; 2014 cpi->fn_ptr[BLOCK_16X16].sdx3f = vp8_sad16x16x3; 2015 cpi->fn_ptr[BLOCK_16X16].sdx8f = vp8_sad16x16x8; 2016 cpi->fn_ptr[BLOCK_16X16].sdx4df = vp8_sad16x16x4d; 2017 2018 cpi->fn_ptr[BLOCK_16X8].sdf = vp8_sad16x8; 2019 cpi->fn_ptr[BLOCK_16X8].vf = vp8_variance16x8; 2020 cpi->fn_ptr[BLOCK_16X8].svf = vp8_sub_pixel_variance16x8; 2021 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_h = NULL; 2022 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_v = NULL; 2023 cpi->fn_ptr[BLOCK_16X8].svf_halfpix_hv = NULL; 2024 cpi->fn_ptr[BLOCK_16X8].sdx3f = vp8_sad16x8x3; 2025 cpi->fn_ptr[BLOCK_16X8].sdx8f = vp8_sad16x8x8; 2026 cpi->fn_ptr[BLOCK_16X8].sdx4df = vp8_sad16x8x4d; 2027 2028 cpi->fn_ptr[BLOCK_8X16].sdf = vp8_sad8x16; 2029 cpi->fn_ptr[BLOCK_8X16].vf = vp8_variance8x16; 2030 cpi->fn_ptr[BLOCK_8X16].svf = vp8_sub_pixel_variance8x16; 2031 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_h = NULL; 2032 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_v = NULL; 2033 cpi->fn_ptr[BLOCK_8X16].svf_halfpix_hv = NULL; 2034 cpi->fn_ptr[BLOCK_8X16].sdx3f = vp8_sad8x16x3; 2035 cpi->fn_ptr[BLOCK_8X16].sdx8f = vp8_sad8x16x8; 2036 cpi->fn_ptr[BLOCK_8X16].sdx4df = vp8_sad8x16x4d; 2037 2038 cpi->fn_ptr[BLOCK_8X8].sdf = vp8_sad8x8; 2039 cpi->fn_ptr[BLOCK_8X8].vf = vp8_variance8x8; 2040 cpi->fn_ptr[BLOCK_8X8].svf = vp8_sub_pixel_variance8x8; 2041 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_h = NULL; 2042 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_v = NULL; 2043 cpi->fn_ptr[BLOCK_8X8].svf_halfpix_hv = NULL; 2044 cpi->fn_ptr[BLOCK_8X8].sdx3f = vp8_sad8x8x3; 2045 cpi->fn_ptr[BLOCK_8X8].sdx8f = vp8_sad8x8x8; 2046 cpi->fn_ptr[BLOCK_8X8].sdx4df = vp8_sad8x8x4d; 2047 2048 cpi->fn_ptr[BLOCK_4X4].sdf = vp8_sad4x4; 2049 cpi->fn_ptr[BLOCK_4X4].vf = vp8_variance4x4; 2050 cpi->fn_ptr[BLOCK_4X4].svf = vp8_sub_pixel_variance4x4; 2051 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_h = NULL; 2052 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_v = NULL; 2053 cpi->fn_ptr[BLOCK_4X4].svf_halfpix_hv = NULL; 2054 cpi->fn_ptr[BLOCK_4X4].sdx3f = vp8_sad4x4x3; 2055 cpi->fn_ptr[BLOCK_4X4].sdx8f = vp8_sad4x4x8; 2056 cpi->fn_ptr[BLOCK_4X4].sdx4df = vp8_sad4x4x4d; 2057 2058 #if ARCH_X86 || ARCH_X86_64 2059 cpi->fn_ptr[BLOCK_16X16].copymem = vp8_copy32xn; 2060 cpi->fn_ptr[BLOCK_16X8].copymem = vp8_copy32xn; 2061 cpi->fn_ptr[BLOCK_8X16].copymem = vp8_copy32xn; 2062 cpi->fn_ptr[BLOCK_8X8].copymem = vp8_copy32xn; 2063 cpi->fn_ptr[BLOCK_4X4].copymem = vp8_copy32xn; 2064 #endif 2065 2066 cpi->full_search_sad = vp8_full_search_sad; 2067 cpi->diamond_search_sad = vp8_diamond_search_sad; 2068 cpi->refining_search_sad = vp8_refining_search_sad; 2069 2070 /* make sure frame 1 is okay */ 2071 cpi->mb.error_bins[0] = cpi->common.MBs; 2072 2073 /* vp8cx_init_quantizer() is first called here. Add check in 2074 * vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only 2075 * called later when needed. This will avoid unnecessary calls of 2076 * vp8cx_init_quantizer() for every frame. 2077 */ 2078 vp8cx_init_quantizer(cpi); 2079 2080 vp8_loop_filter_init(cm); 2081 2082 cpi->common.error.setjmp = 0; 2083 2084 #if CONFIG_MULTI_RES_ENCODING 2085 2086 /* Calculate # of MBs in a row in lower-resolution level image. */ 2087 if (cpi->oxcf.mr_encoder_id > 0) 2088 vp8_cal_low_res_mb_cols(cpi); 2089 2090 #endif 2091 2092 /* setup RD costs to MACROBLOCK struct */ 2093 2094 cpi->mb.mvcost[0] = &cpi->rd_costs.mvcosts[0][mv_max+1]; 2095 cpi->mb.mvcost[1] = &cpi->rd_costs.mvcosts[1][mv_max+1]; 2096 cpi->mb.mvsadcost[0] = &cpi->rd_costs.mvsadcosts[0][mvfp_max+1]; 2097 cpi->mb.mvsadcost[1] = &cpi->rd_costs.mvsadcosts[1][mvfp_max+1]; 2098 2099 cal_mvsadcosts(cpi->mb.mvsadcost); 2100 2101 cpi->mb.mbmode_cost = cpi->rd_costs.mbmode_cost; 2102 cpi->mb.intra_uv_mode_cost = cpi->rd_costs.intra_uv_mode_cost; 2103 cpi->mb.bmode_costs = cpi->rd_costs.bmode_costs; 2104 cpi->mb.inter_bmode_costs = cpi->rd_costs.inter_bmode_costs; 2105 cpi->mb.token_costs = cpi->rd_costs.token_costs; 2106 2107 /* setup block ptrs & offsets */ 2108 vp8_setup_block_ptrs(&cpi->mb); 2109 vp8_setup_block_dptrs(&cpi->mb.e_mbd); 2110 2111 return cpi; 2112 } 2113 2114 2115 void vp8_remove_compressor(VP8_COMP **ptr) 2116 { 2117 VP8_COMP *cpi = *ptr; 2118 2119 if (!cpi) 2120 return; 2121 2122 if (cpi && (cpi->common.current_video_frame > 0)) 2123 { 2124 #if !(CONFIG_REALTIME_ONLY) 2125 2126 if (cpi->pass == 2) 2127 { 2128 vp8_end_second_pass(cpi); 2129 } 2130 2131 #endif 2132 2133 #ifdef VP8_ENTROPY_STATS 2134 print_context_counters(); 2135 print_tree_update_probs(); 2136 print_mode_context(); 2137 #endif 2138 2139 #if CONFIG_INTERNAL_STATS 2140 2141 if (cpi->pass != 1) 2142 { 2143 FILE *f = fopen("opsnr.stt", "a"); 2144 double time_encoded = (cpi->last_end_time_stamp_seen 2145 - cpi->first_time_stamp_ever) / 10000000.000; 2146 double total_encode_time = (cpi->time_receive_data + 2147 cpi->time_compress_data) / 1000.000; 2148 double dr = (double)cpi->bytes * 8.0 / 1000.0 / time_encoded; 2149 2150 if (cpi->b_calculate_psnr) 2151 { 2152 YV12_BUFFER_CONFIG *lst_yv12 = 2153 &cpi->common.yv12_fb[cpi->common.lst_fb_idx]; 2154 2155 if (cpi->oxcf.number_of_layers > 1) 2156 { 2157 int i; 2158 2159 fprintf(f, "Layer\tBitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t" 2160 "GLPsnrP\tVPXSSIM\t\n"); 2161 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++) 2162 { 2163 double dr = (double)cpi->bytes_in_layer[i] * 2164 8.0 / 1000.0 / time_encoded; 2165 double samples = 3.0 / 2 * cpi->frames_in_layer[i] * 2166 lst_yv12->y_width * lst_yv12->y_height; 2167 double total_psnr = vp8_mse2psnr(samples, 255.0, 2168 cpi->total_error2[i]); 2169 double total_psnr2 = vp8_mse2psnr(samples, 255.0, 2170 cpi->total_error2_p[i]); 2171 double total_ssim = 100 * pow(cpi->sum_ssim[i] / 2172 cpi->sum_weights[i], 8.0); 2173 2174 fprintf(f, "%5d\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t" 2175 "%7.3f\t%7.3f\n", 2176 i, dr, 2177 cpi->sum_psnr[i] / cpi->frames_in_layer[i], 2178 total_psnr, 2179 cpi->sum_psnr_p[i] / cpi->frames_in_layer[i], 2180 total_psnr2, total_ssim); 2181 } 2182 } 2183 else 2184 { 2185 double samples = 3.0 / 2 * cpi->count * 2186 lst_yv12->y_width * lst_yv12->y_height; 2187 double total_psnr = vp8_mse2psnr(samples, 255.0, 2188 cpi->total_sq_error); 2189 double total_psnr2 = vp8_mse2psnr(samples, 255.0, 2190 cpi->total_sq_error2); 2191 double total_ssim = 100 * pow(cpi->summed_quality / 2192 cpi->summed_weights, 8.0); 2193 2194 fprintf(f, "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t" 2195 "GLPsnrP\tVPXSSIM\t Time(us)\n"); 2196 fprintf(f, "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t" 2197 "%7.3f\t%8.0f\n", 2198 dr, cpi->total / cpi->count, total_psnr, 2199 cpi->totalp / cpi->count, total_psnr2, 2200 total_ssim, total_encode_time); 2201 } 2202 } 2203 2204 if (cpi->b_calculate_ssimg) 2205 { 2206 if (cpi->oxcf.number_of_layers > 1) 2207 { 2208 int i; 2209 2210 fprintf(f, "Layer\tBitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t" 2211 "Time(us)\n"); 2212 for (i=0; i<(int)cpi->oxcf.number_of_layers; i++) 2213 { 2214 double dr = (double)cpi->bytes_in_layer[i] * 2215 8.0 / 1000.0 / time_encoded; 2216 fprintf(f, "%5d\t%7.3f\t%6.4f\t" 2217 "%6.4f\t%6.4f\t%6.4f\t%8.0f\n", 2218 i, dr, 2219 cpi->total_ssimg_y_in_layer[i] / 2220 cpi->frames_in_layer[i], 2221 cpi->total_ssimg_u_in_layer[i] / 2222 cpi->frames_in_layer[i], 2223 cpi->total_ssimg_v_in_layer[i] / 2224 cpi->frames_in_layer[i], 2225 cpi->total_ssimg_all_in_layer[i] / 2226 cpi->frames_in_layer[i], 2227 total_encode_time); 2228 } 2229 } 2230 else 2231 { 2232 fprintf(f, "BitRate\tSSIM_Y\tSSIM_U\tSSIM_V\tSSIM_A\t" 2233 "Time(us)\n"); 2234 fprintf(f, "%7.3f\t%6.4f\t%6.4f\t%6.4f\t%6.4f\t%8.0f\n", dr, 2235 cpi->total_ssimg_y / cpi->count, 2236 cpi->total_ssimg_u / cpi->count, 2237 cpi->total_ssimg_v / cpi->count, 2238 cpi->total_ssimg_all / cpi->count, total_encode_time); 2239 } 2240 } 2241 2242 fclose(f); 2243 #if 0 2244 f = fopen("qskip.stt", "a"); 2245 fprintf(f, "minq:%d -maxq:%d skiptrue:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount); 2246 fclose(f); 2247 #endif 2248 2249 } 2250 2251 #endif 2252 2253 2254 #ifdef SPEEDSTATS 2255 2256 if (cpi->compressor_speed == 2) 2257 { 2258 int i; 2259 FILE *f = fopen("cxspeed.stt", "a"); 2260 cnt_pm /= cpi->common.MBs; 2261 2262 for (i = 0; i < 16; i++) 2263 fprintf(f, "%5d", frames_at_speed[i]); 2264 2265 fprintf(f, "\n"); 2266 fclose(f); 2267 } 2268 2269 #endif 2270 2271 2272 #ifdef MODE_STATS 2273 { 2274 extern int count_mb_seg[4]; 2275 FILE *f = fopen("modes.stt", "a"); 2276 double dr = (double)cpi->framerate * (double)bytes * (double)8 / (double)count / (double)1000 ; 2277 fprintf(f, "intra_mode in Intra Frames:\n"); 2278 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1], y_modes[2], y_modes[3], y_modes[4]); 2279 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1], uv_modes[2], uv_modes[3]); 2280 fprintf(f, "B: "); 2281 { 2282 int i; 2283 2284 for (i = 0; i < 10; i++) 2285 fprintf(f, "%8d, ", b_modes[i]); 2286 2287 fprintf(f, "\n"); 2288 2289 } 2290 2291 fprintf(f, "Modes in Inter Frames:\n"); 2292 fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n", 2293 inter_y_modes[0], inter_y_modes[1], inter_y_modes[2], inter_y_modes[3], inter_y_modes[4], 2294 inter_y_modes[5], inter_y_modes[6], inter_y_modes[7], inter_y_modes[8], inter_y_modes[9]); 2295 fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0], inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]); 2296 fprintf(f, "B: "); 2297 { 2298 int i; 2299 2300 for (i = 0; i < 15; i++) 2301 fprintf(f, "%8d, ", inter_b_modes[i]); 2302 2303 fprintf(f, "\n"); 2304 2305 } 2306 fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1], count_mb_seg[2], count_mb_seg[3]); 2307 fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4], inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4], inter_b_modes[NEW4X4]); 2308 2309 2310 2311 fclose(f); 2312 } 2313 #endif 2314 2315 #ifdef VP8_ENTROPY_STATS 2316 { 2317 int i, j, k; 2318 FILE *fmode = fopen("modecontext.c", "w"); 2319 2320 fprintf(fmode, "\n#include \"entropymode.h\"\n\n"); 2321 fprintf(fmode, "const unsigned int vp8_kf_default_bmode_counts "); 2322 fprintf(fmode, "[VP8_BINTRAMODES] [VP8_BINTRAMODES] [VP8_BINTRAMODES] =\n{\n"); 2323 2324 for (i = 0; i < 10; i++) 2325 { 2326 2327 fprintf(fmode, " { /* Above Mode : %d */\n", i); 2328 2329 for (j = 0; j < 10; j++) 2330 { 2331 2332 fprintf(fmode, " {"); 2333 2334 for (k = 0; k < 10; k++) 2335 { 2336 if (!intra_mode_stats[i][j][k]) 2337 fprintf(fmode, " %5d, ", 1); 2338 else 2339 fprintf(fmode, " %5d, ", intra_mode_stats[i][j][k]); 2340 } 2341 2342 fprintf(fmode, "}, /* left_mode %d */\n", j); 2343 2344 } 2345 2346 fprintf(fmode, " },\n"); 2347 2348 } 2349 2350 fprintf(fmode, "};\n"); 2351 fclose(fmode); 2352 } 2353 #endif 2354 2355 2356 #if defined(SECTIONBITS_OUTPUT) 2357 2358 if (0) 2359 { 2360 int i; 2361 FILE *f = fopen("tokenbits.stt", "a"); 2362 2363 for (i = 0; i < 28; i++) 2364 fprintf(f, "%8d", (int)(Sectionbits[i] / 256)); 2365 2366 fprintf(f, "\n"); 2367 fclose(f); 2368 } 2369 2370 #endif 2371 2372 #if 0 2373 { 2374 printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000); 2375 printf("\n_frames recive_data encod_mb_row compress_frame Total\n"); 2376 printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000); 2377 } 2378 #endif 2379 2380 } 2381 2382 #if CONFIG_MULTITHREAD 2383 vp8cx_remove_encoder_threads(cpi); 2384 #endif 2385 2386 #if CONFIG_TEMPORAL_DENOISING 2387 vp8_denoiser_free(&cpi->denoiser); 2388 #endif 2389 dealloc_compressor_data(cpi); 2390 vpx_free(cpi->mb.ss); 2391 vpx_free(cpi->tok); 2392 vpx_free(cpi->cyclic_refresh_map); 2393 2394 vp8_remove_common(&cpi->common); 2395 vpx_free(cpi); 2396 *ptr = 0; 2397 2398 #ifdef OUTPUT_YUV_SRC 2399 fclose(yuv_file); 2400 #endif 2401 2402 #if 0 2403 2404 if (keyfile) 2405 fclose(keyfile); 2406 2407 if (framepsnr) 2408 fclose(framepsnr); 2409 2410 if (kf_list) 2411 fclose(kf_list); 2412 2413 #endif 2414 2415 } 2416 2417 2418 static uint64_t calc_plane_error(unsigned char *orig, int orig_stride, 2419 unsigned char *recon, int recon_stride, 2420 unsigned int cols, unsigned int rows) 2421 { 2422 unsigned int row, col; 2423 uint64_t total_sse = 0; 2424 int diff; 2425 2426 for (row = 0; row + 16 <= rows; row += 16) 2427 { 2428 for (col = 0; col + 16 <= cols; col += 16) 2429 { 2430 unsigned int sse; 2431 2432 vp8_mse16x16(orig + col, orig_stride, 2433 recon + col, recon_stride, 2434 &sse); 2435 total_sse += sse; 2436 } 2437 2438 /* Handle odd-sized width */ 2439 if (col < cols) 2440 { 2441 unsigned int border_row, border_col; 2442 unsigned char *border_orig = orig; 2443 unsigned char *border_recon = recon; 2444 2445 for (border_row = 0; border_row < 16; border_row++) 2446 { 2447 for (border_col = col; border_col < cols; border_col++) 2448 { 2449 diff = border_orig[border_col] - border_recon[border_col]; 2450 total_sse += diff * diff; 2451 } 2452 2453 border_orig += orig_stride; 2454 border_recon += recon_stride; 2455 } 2456 } 2457 2458 orig += orig_stride * 16; 2459 recon += recon_stride * 16; 2460 } 2461 2462 /* Handle odd-sized height */ 2463 for (; row < rows; row++) 2464 { 2465 for (col = 0; col < cols; col++) 2466 { 2467 diff = orig[col] - recon[col]; 2468 total_sse += diff * diff; 2469 } 2470 2471 orig += orig_stride; 2472 recon += recon_stride; 2473 } 2474 2475 vp8_clear_system_state(); 2476 return total_sse; 2477 } 2478 2479 2480 static void generate_psnr_packet(VP8_COMP *cpi) 2481 { 2482 YV12_BUFFER_CONFIG *orig = cpi->Source; 2483 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show; 2484 struct vpx_codec_cx_pkt pkt; 2485 uint64_t sse; 2486 int i; 2487 unsigned int width = cpi->common.Width; 2488 unsigned int height = cpi->common.Height; 2489 2490 pkt.kind = VPX_CODEC_PSNR_PKT; 2491 sse = calc_plane_error(orig->y_buffer, orig->y_stride, 2492 recon->y_buffer, recon->y_stride, 2493 width, height); 2494 pkt.data.psnr.sse[0] = sse; 2495 pkt.data.psnr.sse[1] = sse; 2496 pkt.data.psnr.samples[0] = width * height; 2497 pkt.data.psnr.samples[1] = width * height; 2498 2499 width = (width + 1) / 2; 2500 height = (height + 1) / 2; 2501 2502 sse = calc_plane_error(orig->u_buffer, orig->uv_stride, 2503 recon->u_buffer, recon->uv_stride, 2504 width, height); 2505 pkt.data.psnr.sse[0] += sse; 2506 pkt.data.psnr.sse[2] = sse; 2507 pkt.data.psnr.samples[0] += width * height; 2508 pkt.data.psnr.samples[2] = width * height; 2509 2510 sse = calc_plane_error(orig->v_buffer, orig->uv_stride, 2511 recon->v_buffer, recon->uv_stride, 2512 width, height); 2513 pkt.data.psnr.sse[0] += sse; 2514 pkt.data.psnr.sse[3] = sse; 2515 pkt.data.psnr.samples[0] += width * height; 2516 pkt.data.psnr.samples[3] = width * height; 2517 2518 for (i = 0; i < 4; i++) 2519 pkt.data.psnr.psnr[i] = vp8_mse2psnr(pkt.data.psnr.samples[i], 255.0, 2520 (double)(pkt.data.psnr.sse[i])); 2521 2522 vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt); 2523 } 2524 2525 2526 int vp8_use_as_reference(VP8_COMP *cpi, int ref_frame_flags) 2527 { 2528 if (ref_frame_flags > 7) 2529 return -1 ; 2530 2531 cpi->ref_frame_flags = ref_frame_flags; 2532 return 0; 2533 } 2534 int vp8_update_reference(VP8_COMP *cpi, int ref_frame_flags) 2535 { 2536 if (ref_frame_flags > 7) 2537 return -1 ; 2538 2539 cpi->common.refresh_golden_frame = 0; 2540 cpi->common.refresh_alt_ref_frame = 0; 2541 cpi->common.refresh_last_frame = 0; 2542 2543 if (ref_frame_flags & VP8_LAST_FRAME) 2544 cpi->common.refresh_last_frame = 1; 2545 2546 if (ref_frame_flags & VP8_GOLD_FRAME) 2547 cpi->common.refresh_golden_frame = 1; 2548 2549 if (ref_frame_flags & VP8_ALTR_FRAME) 2550 cpi->common.refresh_alt_ref_frame = 1; 2551 2552 return 0; 2553 } 2554 2555 int vp8_get_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd) 2556 { 2557 VP8_COMMON *cm = &cpi->common; 2558 int ref_fb_idx; 2559 2560 if (ref_frame_flag == VP8_LAST_FRAME) 2561 ref_fb_idx = cm->lst_fb_idx; 2562 else if (ref_frame_flag == VP8_GOLD_FRAME) 2563 ref_fb_idx = cm->gld_fb_idx; 2564 else if (ref_frame_flag == VP8_ALTR_FRAME) 2565 ref_fb_idx = cm->alt_fb_idx; 2566 else 2567 return -1; 2568 2569 vp8_yv12_copy_frame(&cm->yv12_fb[ref_fb_idx], sd); 2570 2571 return 0; 2572 } 2573 int vp8_set_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag, YV12_BUFFER_CONFIG *sd) 2574 { 2575 VP8_COMMON *cm = &cpi->common; 2576 2577 int ref_fb_idx; 2578 2579 if (ref_frame_flag == VP8_LAST_FRAME) 2580 ref_fb_idx = cm->lst_fb_idx; 2581 else if (ref_frame_flag == VP8_GOLD_FRAME) 2582 ref_fb_idx = cm->gld_fb_idx; 2583 else if (ref_frame_flag == VP8_ALTR_FRAME) 2584 ref_fb_idx = cm->alt_fb_idx; 2585 else 2586 return -1; 2587 2588 vp8_yv12_copy_frame(sd, &cm->yv12_fb[ref_fb_idx]); 2589 2590 return 0; 2591 } 2592 int vp8_update_entropy(VP8_COMP *cpi, int update) 2593 { 2594 VP8_COMMON *cm = &cpi->common; 2595 cm->refresh_entropy_probs = update; 2596 2597 return 0; 2598 } 2599 2600 2601 #if OUTPUT_YUV_SRC 2602 void vp8_write_yuv_frame(const char *name, YV12_BUFFER_CONFIG *s) 2603 { 2604 FILE *yuv_file = fopen(name, "ab"); 2605 unsigned char *src = s->y_buffer; 2606 int h = s->y_height; 2607 2608 do 2609 { 2610 fwrite(src, s->y_width, 1, yuv_file); 2611 src += s->y_stride; 2612 } 2613 while (--h); 2614 2615 src = s->u_buffer; 2616 h = s->uv_height; 2617 2618 do 2619 { 2620 fwrite(src, s->uv_width, 1, yuv_file); 2621 src += s->uv_stride; 2622 } 2623 while (--h); 2624 2625 src = s->v_buffer; 2626 h = s->uv_height; 2627 2628 do 2629 { 2630 fwrite(src, s->uv_width, 1, yuv_file); 2631 src += s->uv_stride; 2632 } 2633 while (--h); 2634 2635 fclose(yuv_file); 2636 } 2637 #endif 2638 2639 2640 static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi) 2641 { 2642 VP8_COMMON *cm = &cpi->common; 2643 2644 /* are we resizing the image */ 2645 if (cm->horiz_scale != 0 || cm->vert_scale != 0) 2646 { 2647 #if CONFIG_SPATIAL_RESAMPLING 2648 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs); 2649 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs); 2650 int tmp_height; 2651 2652 if (cm->vert_scale == 3) 2653 tmp_height = 9; 2654 else 2655 tmp_height = 11; 2656 2657 Scale2Ratio(cm->horiz_scale, &hr, &hs); 2658 Scale2Ratio(cm->vert_scale, &vr, &vs); 2659 2660 vpx_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer, 2661 tmp_height, hs, hr, vs, vr, 0); 2662 2663 vp8_yv12_extend_frame_borders(&cpi->scaled_source); 2664 cpi->Source = &cpi->scaled_source; 2665 #endif 2666 } 2667 else 2668 cpi->Source = sd; 2669 } 2670 2671 2672 static int resize_key_frame(VP8_COMP *cpi) 2673 { 2674 #if CONFIG_SPATIAL_RESAMPLING 2675 VP8_COMMON *cm = &cpi->common; 2676 2677 /* Do we need to apply resampling for one pass cbr. 2678 * In one pass this is more limited than in two pass cbr 2679 * The test and any change is only made one per key frame sequence 2680 */ 2681 if (cpi->oxcf.allow_spatial_resampling && (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) 2682 { 2683 int UNINITIALIZED_IS_SAFE(hr), UNINITIALIZED_IS_SAFE(hs); 2684 int UNINITIALIZED_IS_SAFE(vr), UNINITIALIZED_IS_SAFE(vs); 2685 int new_width, new_height; 2686 2687 /* If we are below the resample DOWN watermark then scale down a 2688 * notch. 2689 */ 2690 if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark * cpi->oxcf.optimal_buffer_level / 100)) 2691 { 2692 cm->horiz_scale = (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO; 2693 cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO; 2694 } 2695 /* Should we now start scaling back up */ 2696 else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark * cpi->oxcf.optimal_buffer_level / 100)) 2697 { 2698 cm->horiz_scale = (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL; 2699 cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL; 2700 } 2701 2702 /* Get the new hieght and width */ 2703 Scale2Ratio(cm->horiz_scale, &hr, &hs); 2704 Scale2Ratio(cm->vert_scale, &vr, &vs); 2705 new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs; 2706 new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs; 2707 2708 /* If the image size has changed we need to reallocate the buffers 2709 * and resample the source image 2710 */ 2711 if ((cm->Width != new_width) || (cm->Height != new_height)) 2712 { 2713 cm->Width = new_width; 2714 cm->Height = new_height; 2715 vp8_alloc_compressor_data(cpi); 2716 scale_and_extend_source(cpi->un_scaled_source, cpi); 2717 return 1; 2718 } 2719 } 2720 2721 #endif 2722 return 0; 2723 } 2724 2725 2726 static void update_alt_ref_frame_stats(VP8_COMP *cpi) 2727 { 2728 VP8_COMMON *cm = &cpi->common; 2729 2730 /* Select an interval before next GF or altref */ 2731 if (!cpi->auto_gold) 2732 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL; 2733 2734 if ((cpi->pass != 2) && cpi->frames_till_gf_update_due) 2735 { 2736 cpi->current_gf_interval = cpi->frames_till_gf_update_due; 2737 2738 /* Set the bits per frame that we should try and recover in 2739 * subsequent inter frames to account for the extra GF spend... 2740 * note that his does not apply for GF updates that occur 2741 * coincident with a key frame as the extra cost of key frames is 2742 * dealt with elsewhere. 2743 */ 2744 cpi->gf_overspend_bits += cpi->projected_frame_size; 2745 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due; 2746 } 2747 2748 /* Update data structure that monitors level of reference to last GF */ 2749 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols)); 2750 cpi->gf_active_count = cm->mb_rows * cm->mb_cols; 2751 2752 /* this frame refreshes means next frames don't unless specified by user */ 2753 cpi->frames_since_golden = 0; 2754 2755 /* Clear the alternate reference update pending flag. */ 2756 cpi->source_alt_ref_pending = 0; 2757 2758 /* Set the alternate reference frame active flag */ 2759 cpi->source_alt_ref_active = 1; 2760 2761 2762 } 2763 static void update_golden_frame_stats(VP8_COMP *cpi) 2764 { 2765 VP8_COMMON *cm = &cpi->common; 2766 2767 /* Update the Golden frame usage counts. */ 2768 if (cm->refresh_golden_frame) 2769 { 2770 /* Select an interval before next GF */ 2771 if (!cpi->auto_gold) 2772 cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL; 2773 2774 if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0)) 2775 { 2776 cpi->current_gf_interval = cpi->frames_till_gf_update_due; 2777 2778 /* Set the bits per frame that we should try and recover in 2779 * subsequent inter frames to account for the extra GF spend... 2780 * note that his does not apply for GF updates that occur 2781 * coincident with a key frame as the extra cost of key frames 2782 * is dealt with elsewhere. 2783 */ 2784 if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active) 2785 { 2786 /* Calcluate GF bits to be recovered 2787 * Projected size - av frame bits available for inter 2788 * frames for clip as a whole 2789 */ 2790 cpi->gf_overspend_bits += (cpi->projected_frame_size - cpi->inter_frame_target); 2791 } 2792 2793 cpi->non_gf_bitrate_adjustment = cpi->gf_overspend_bits / cpi->frames_till_gf_update_due; 2794 2795 } 2796 2797 /* Update data structure that monitors level of reference to last GF */ 2798 vpx_memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols)); 2799 cpi->gf_active_count = cm->mb_rows * cm->mb_cols; 2800 2801 /* this frame refreshes means next frames don't unless specified by 2802 * user 2803 */ 2804 cm->refresh_golden_frame = 0; 2805 cpi->frames_since_golden = 0; 2806 2807 cpi->recent_ref_frame_usage[INTRA_FRAME] = 1; 2808 cpi->recent_ref_frame_usage[LAST_FRAME] = 1; 2809 cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1; 2810 cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1; 2811 2812 /* ******** Fixed Q test code only ************ */ 2813 /* If we are going to use the ALT reference for the next group of 2814 * frames set a flag to say so. 2815 */ 2816 if (cpi->oxcf.fixed_q >= 0 && 2817 cpi->oxcf.play_alternate && !cpi->common.refresh_alt_ref_frame) 2818 { 2819 cpi->source_alt_ref_pending = 1; 2820 cpi->frames_till_gf_update_due = cpi->baseline_gf_interval; 2821 } 2822 2823 if (!cpi->source_alt_ref_pending) 2824 cpi->source_alt_ref_active = 0; 2825 2826 /* Decrement count down till next gf */ 2827 if (cpi->frames_till_gf_update_due > 0) 2828 cpi->frames_till_gf_update_due--; 2829 2830 } 2831 else if (!cpi->common.refresh_alt_ref_frame) 2832 { 2833 /* Decrement count down till next gf */ 2834 if (cpi->frames_till_gf_update_due > 0) 2835 cpi->frames_till_gf_update_due--; 2836 2837 if (cpi->frames_till_alt_ref_frame) 2838 cpi->frames_till_alt_ref_frame --; 2839 2840 cpi->frames_since_golden ++; 2841 2842 if (cpi->frames_since_golden > 1) 2843 { 2844 cpi->recent_ref_frame_usage[INTRA_FRAME] += 2845 cpi->mb.count_mb_ref_frame_usage[INTRA_FRAME]; 2846 cpi->recent_ref_frame_usage[LAST_FRAME] += 2847 cpi->mb.count_mb_ref_frame_usage[LAST_FRAME]; 2848 cpi->recent_ref_frame_usage[GOLDEN_FRAME] += 2849 cpi->mb.count_mb_ref_frame_usage[GOLDEN_FRAME]; 2850 cpi->recent_ref_frame_usage[ALTREF_FRAME] += 2851 cpi->mb.count_mb_ref_frame_usage[ALTREF_FRAME]; 2852 } 2853 } 2854 } 2855 2856 /* This function updates the reference frame probability estimates that 2857 * will be used during mode selection 2858 */ 2859 static void update_rd_ref_frame_probs(VP8_COMP *cpi) 2860 { 2861 VP8_COMMON *cm = &cpi->common; 2862 2863 const int *const rfct = cpi->mb.count_mb_ref_frame_usage; 2864 const int rf_intra = rfct[INTRA_FRAME]; 2865 const int rf_inter = rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME]; 2866 2867 if (cm->frame_type == KEY_FRAME) 2868 { 2869 cpi->prob_intra_coded = 255; 2870 cpi->prob_last_coded = 128; 2871 cpi->prob_gf_coded = 128; 2872 } 2873 else if (!(rf_intra + rf_inter)) 2874 { 2875 cpi->prob_intra_coded = 63; 2876 cpi->prob_last_coded = 128; 2877 cpi->prob_gf_coded = 128; 2878 } 2879 2880 /* update reference frame costs since we can do better than what we got 2881 * last frame. 2882 */ 2883 if (cpi->oxcf.number_of_layers == 1) 2884 { 2885 if (cpi->common.refresh_alt_ref_frame) 2886 { 2887 cpi->prob_intra_coded += 40; 2888 if (cpi->prob_intra_coded > 255) 2889 cpi->prob_intra_coded = 255; 2890 cpi->prob_last_coded = 200; 2891 cpi->prob_gf_coded = 1; 2892 } 2893 else if (cpi->frames_since_golden == 0) 2894 { 2895 cpi->prob_last_coded = 214; 2896 } 2897 else if (cpi->frames_since_golden == 1) 2898 { 2899 cpi->prob_last_coded = 192; 2900 cpi->prob_gf_coded = 220; 2901 } 2902 else if (cpi->source_alt_ref_active) 2903 { 2904 cpi->prob_gf_coded -= 20; 2905 2906 if (cpi->prob_gf_coded < 10) 2907 cpi->prob_gf_coded = 10; 2908 } 2909 if (!cpi->source_alt_ref_active) 2910 cpi->prob_gf_coded = 255; 2911 } 2912 } 2913 2914 2915 /* 1 = key, 0 = inter */ 2916 static int decide_key_frame(VP8_COMP *cpi) 2917 { 2918 VP8_COMMON *cm = &cpi->common; 2919 2920 int code_key_frame = 0; 2921 2922 cpi->kf_boost = 0; 2923 2924 if (cpi->Speed > 11) 2925 return 0; 2926 2927 /* Clear down mmx registers */ 2928 vp8_clear_system_state(); 2929 2930 if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0)) 2931 { 2932 double change = 1.0 * abs((int)(cpi->mb.intra_error - 2933 cpi->last_intra_error)) / (1 + cpi->last_intra_error); 2934 double change2 = 1.0 * abs((int)(cpi->mb.prediction_error - 2935 cpi->last_prediction_error)) / (1 + cpi->last_prediction_error); 2936 double minerror = cm->MBs * 256; 2937 2938 cpi->last_intra_error = cpi->mb.intra_error; 2939 cpi->last_prediction_error = cpi->mb.prediction_error; 2940 2941 if (10 * cpi->mb.intra_error / (1 + cpi->mb.prediction_error) < 15 2942 && cpi->mb.prediction_error > minerror 2943 && (change > .25 || change2 > .25)) 2944 { 2945 /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra > cpi->last_frame_percent_intra + 3*/ 2946 return 1; 2947 } 2948 2949 return 0; 2950 2951 } 2952 2953 /* If the following are true we might as well code a key frame */ 2954 if (((cpi->this_frame_percent_intra == 100) && 2955 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) || 2956 ((cpi->this_frame_percent_intra > 95) && 2957 (cpi->this_frame_percent_intra >= (cpi->last_frame_percent_intra + 5)))) 2958 { 2959 code_key_frame = 1; 2960 } 2961 /* in addition if the following are true and this is not a golden frame 2962 * then code a key frame Note that on golden frames there often seems 2963 * to be a pop in intra useage anyway hence this restriction is 2964 * designed to prevent spurious key frames. The Intra pop needs to be 2965 * investigated. 2966 */ 2967 else if (((cpi->this_frame_percent_intra > 60) && 2968 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 2))) || 2969 ((cpi->this_frame_percent_intra > 75) && 2970 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra * 3 / 2))) || 2971 ((cpi->this_frame_percent_intra > 90) && 2972 (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 10)))) 2973 { 2974 if (!cm->refresh_golden_frame) 2975 code_key_frame = 1; 2976 } 2977 2978 return code_key_frame; 2979 2980 } 2981 2982 #if !(CONFIG_REALTIME_ONLY) 2983 static void Pass1Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned int *frame_flags) 2984 { 2985 (void) size; 2986 (void) dest; 2987 (void) frame_flags; 2988 vp8_set_quantizer(cpi, 26); 2989 2990 vp8_first_pass(cpi); 2991 } 2992 #endif 2993 2994 #if 0 2995 void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame) 2996 { 2997 2998 /* write the frame */ 2999 FILE *yframe; 3000 int i; 3001 char filename[255]; 3002 3003 sprintf(filename, "cx\\y%04d.raw", this_frame); 3004 yframe = fopen(filename, "wb"); 3005 3006 for (i = 0; i < frame->y_height; i++) 3007 fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe); 3008 3009 fclose(yframe); 3010 sprintf(filename, "cx\\u%04d.raw", this_frame); 3011 yframe = fopen(filename, "wb"); 3012 3013 for (i = 0; i < frame->uv_height; i++) 3014 fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe); 3015 3016 fclose(yframe); 3017 sprintf(filename, "cx\\v%04d.raw", this_frame); 3018 yframe = fopen(filename, "wb"); 3019 3020 for (i = 0; i < frame->uv_height; i++) 3021 fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe); 3022 3023 fclose(yframe); 3024 } 3025 #endif 3026 /* return of 0 means drop frame */ 3027 3028 /* Function to test for conditions that indeicate we should loop 3029 * back and recode a frame. 3030 */ 3031 static int recode_loop_test( VP8_COMP *cpi, 3032 int high_limit, int low_limit, 3033 int q, int maxq, int minq ) 3034 { 3035 int force_recode = 0; 3036 VP8_COMMON *cm = &cpi->common; 3037 3038 /* Is frame recode allowed at all 3039 * Yes if either recode mode 1 is selected or mode two is selcted 3040 * and the frame is a key frame. golden frame or alt_ref_frame 3041 */ 3042 if ( (cpi->sf.recode_loop == 1) || 3043 ( (cpi->sf.recode_loop == 2) && 3044 ( (cm->frame_type == KEY_FRAME) || 3045 cm->refresh_golden_frame || 3046 cm->refresh_alt_ref_frame ) ) ) 3047 { 3048 /* General over and under shoot tests */ 3049 if ( ((cpi->projected_frame_size > high_limit) && (q < maxq)) || 3050 ((cpi->projected_frame_size < low_limit) && (q > minq)) ) 3051 { 3052 force_recode = 1; 3053 } 3054 /* Special Constrained quality tests */ 3055 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) 3056 { 3057 /* Undershoot and below auto cq level */ 3058 if ( (q > cpi->cq_target_quality) && 3059 (cpi->projected_frame_size < 3060 ((cpi->this_frame_target * 7) >> 3))) 3061 { 3062 force_recode = 1; 3063 } 3064 /* Severe undershoot and between auto and user cq level */ 3065 else if ( (q > cpi->oxcf.cq_level) && 3066 (cpi->projected_frame_size < cpi->min_frame_bandwidth) && 3067 (cpi->active_best_quality > cpi->oxcf.cq_level)) 3068 { 3069 force_recode = 1; 3070 cpi->active_best_quality = cpi->oxcf.cq_level; 3071 } 3072 } 3073 } 3074 3075 return force_recode; 3076 } 3077 3078 static void update_reference_frames(VP8_COMP *cpi) 3079 { 3080 VP8_COMMON *cm = &cpi->common; 3081 YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb; 3082 3083 /* At this point the new frame has been encoded. 3084 * If any buffer copy / swapping is signaled it should be done here. 3085 */ 3086 3087 if (cm->frame_type == KEY_FRAME) 3088 { 3089 yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME | VP8_ALTR_FRAME ; 3090 3091 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME; 3092 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME; 3093 3094 cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx; 3095 3096 #if CONFIG_MULTI_RES_ENCODING 3097 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame; 3098 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame; 3099 #endif 3100 } 3101 else /* For non key frames */ 3102 { 3103 if (cm->refresh_alt_ref_frame) 3104 { 3105 assert(!cm->copy_buffer_to_arf); 3106 3107 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALTR_FRAME; 3108 cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME; 3109 cm->alt_fb_idx = cm->new_fb_idx; 3110 3111 #if CONFIG_MULTI_RES_ENCODING 3112 cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame; 3113 #endif 3114 } 3115 else if (cm->copy_buffer_to_arf) 3116 { 3117 assert(!(cm->copy_buffer_to_arf & ~0x3)); 3118 3119 if (cm->copy_buffer_to_arf == 1) 3120 { 3121 if(cm->alt_fb_idx != cm->lst_fb_idx) 3122 { 3123 yv12_fb[cm->lst_fb_idx].flags |= VP8_ALTR_FRAME; 3124 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME; 3125 cm->alt_fb_idx = cm->lst_fb_idx; 3126 3127 #if CONFIG_MULTI_RES_ENCODING 3128 cpi->current_ref_frames[ALTREF_FRAME] = 3129 cpi->current_ref_frames[LAST_FRAME]; 3130 #endif 3131 } 3132 } 3133 else /* if (cm->copy_buffer_to_arf == 2) */ 3134 { 3135 if(cm->alt_fb_idx != cm->gld_fb_idx) 3136 { 3137 yv12_fb[cm->gld_fb_idx].flags |= VP8_ALTR_FRAME; 3138 yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME; 3139 cm->alt_fb_idx = cm->gld_fb_idx; 3140 3141 #if CONFIG_MULTI_RES_ENCODING 3142 cpi->current_ref_frames[ALTREF_FRAME] = 3143 cpi->current_ref_frames[GOLDEN_FRAME]; 3144 #endif 3145 } 3146 } 3147 } 3148 3149 if (cm->refresh_golden_frame) 3150 { 3151 assert(!cm->copy_buffer_to_gf); 3152 3153 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME; 3154 cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME; 3155 cm->gld_fb_idx = cm->new_fb_idx; 3156 3157 #if CONFIG_MULTI_RES_ENCODING 3158 cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame; 3159 #endif 3160 } 3161 else if (cm->copy_buffer_to_gf) 3162 { 3163 assert(!(cm->copy_buffer_to_arf & ~0x3)); 3164 3165 if (cm->copy_buffer_to_gf == 1) 3166 { 3167 if(cm->gld_fb_idx != cm->lst_fb_idx) 3168 { 3169 yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FRAME; 3170 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME; 3171 cm->gld_fb_idx = cm->lst_fb_idx; 3172 3173 #if CONFIG_MULTI_RES_ENCODING 3174 cpi->current_ref_frames[GOLDEN_FRAME] = 3175 cpi->current_ref_frames[LAST_FRAME]; 3176 #endif 3177 } 3178 } 3179 else /* if (cm->copy_buffer_to_gf == 2) */ 3180 { 3181 if(cm->alt_fb_idx != cm->gld_fb_idx) 3182 { 3183 yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FRAME; 3184 yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME; 3185 cm->gld_fb_idx = cm->alt_fb_idx; 3186 3187 #if CONFIG_MULTI_RES_ENCODING 3188 cpi->current_ref_frames[GOLDEN_FRAME] = 3189 cpi->current_ref_frames[ALTREF_FRAME]; 3190 #endif 3191 } 3192 } 3193 } 3194 } 3195 3196 if (cm->refresh_last_frame) 3197 { 3198 cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FRAME; 3199 cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FRAME; 3200 cm->lst_fb_idx = cm->new_fb_idx; 3201 3202 #if CONFIG_MULTI_RES_ENCODING 3203 cpi->current_ref_frames[LAST_FRAME] = cm->current_video_frame; 3204 #endif 3205 } 3206 3207 #if CONFIG_TEMPORAL_DENOISING 3208 if (cpi->oxcf.noise_sensitivity) 3209 { 3210 /* we shouldn't have to keep multiple copies as we know in advance which 3211 * buffer we should start - for now to get something up and running 3212 * I've chosen to copy the buffers 3213 */ 3214 if (cm->frame_type == KEY_FRAME) 3215 { 3216 int i; 3217 vp8_yv12_copy_frame( 3218 cpi->Source, 3219 &cpi->denoiser.yv12_running_avg[LAST_FRAME]); 3220 3221 vp8_yv12_extend_frame_borders( 3222 &cpi->denoiser.yv12_running_avg[LAST_FRAME]); 3223 3224 for (i = 2; i < MAX_REF_FRAMES - 1; i++) 3225 vp8_yv12_copy_frame( 3226 &cpi->denoiser.yv12_running_avg[LAST_FRAME], 3227 &cpi->denoiser.yv12_running_avg[i]); 3228 } 3229 else /* For non key frames */ 3230 { 3231 vp8_yv12_extend_frame_borders( 3232 &cpi->denoiser.yv12_running_avg[INTRA_FRAME]); 3233 3234 if (cm->refresh_alt_ref_frame || cm->copy_buffer_to_arf) 3235 { 3236 vp8_yv12_copy_frame( 3237 &cpi->denoiser.yv12_running_avg[INTRA_FRAME], 3238 &cpi->denoiser.yv12_running_avg[ALTREF_FRAME]); 3239 } 3240 if (cm->refresh_golden_frame || cm->copy_buffer_to_gf) 3241 { 3242 vp8_yv12_copy_frame( 3243 &cpi->denoiser.yv12_running_avg[INTRA_FRAME], 3244 &cpi->denoiser.yv12_running_avg[GOLDEN_FRAME]); 3245 } 3246 if(cm->refresh_last_frame) 3247 { 3248 vp8_yv12_copy_frame( 3249 &cpi->denoiser.yv12_running_avg[INTRA_FRAME], 3250 &cpi->denoiser.yv12_running_avg[LAST_FRAME]); 3251 } 3252 } 3253 3254 } 3255 #endif 3256 3257 } 3258 3259 void vp8_loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm) 3260 { 3261 const FRAME_TYPE frame_type = cm->frame_type; 3262 3263 if (cm->no_lpf) 3264 { 3265 cm->filter_level = 0; 3266 } 3267 else 3268 { 3269 struct vpx_usec_timer timer; 3270 3271 vp8_clear_system_state(); 3272 3273 vpx_usec_timer_start(&timer); 3274 if (cpi->sf.auto_filter == 0) 3275 vp8cx_pick_filter_level_fast(cpi->Source, cpi); 3276 3277 else 3278 vp8cx_pick_filter_level(cpi->Source, cpi); 3279 3280 if (cm->filter_level > 0) 3281 { 3282 vp8cx_set_alt_lf_level(cpi, cm->filter_level); 3283 } 3284 3285 vpx_usec_timer_mark(&timer); 3286 cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer); 3287 } 3288 3289 #if CONFIG_MULTITHREAD 3290 if (cpi->b_multi_threaded) 3291 sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */ 3292 #endif 3293 3294 if (cm->filter_level > 0) 3295 { 3296 vp8_loop_filter_frame(cm, &cpi->mb.e_mbd, frame_type); 3297 } 3298 3299 vp8_yv12_extend_frame_borders(cm->frame_to_show); 3300 3301 } 3302 3303 static void encode_frame_to_data_rate 3304 ( 3305 VP8_COMP *cpi, 3306 unsigned long *size, 3307 unsigned char *dest, 3308 unsigned char* dest_end, 3309 unsigned int *frame_flags 3310 ) 3311 { 3312 int Q; 3313 int frame_over_shoot_limit; 3314 int frame_under_shoot_limit; 3315 3316 int Loop = 0; 3317 int loop_count; 3318 3319 VP8_COMMON *cm = &cpi->common; 3320 int active_worst_qchanged = 0; 3321 3322 #if !(CONFIG_REALTIME_ONLY) 3323 int q_low; 3324 int q_high; 3325 int zbin_oq_high; 3326 int zbin_oq_low = 0; 3327 int top_index; 3328 int bottom_index; 3329 int overshoot_seen = 0; 3330 int undershoot_seen = 0; 3331 #endif 3332 3333 int drop_mark = (int)(cpi->oxcf.drop_frames_water_mark * 3334 cpi->oxcf.optimal_buffer_level / 100); 3335 int drop_mark75 = drop_mark * 2 / 3; 3336 int drop_mark50 = drop_mark / 4; 3337 int drop_mark25 = drop_mark / 8; 3338 3339 3340 /* Clear down mmx registers to allow floating point in what follows */ 3341 vp8_clear_system_state(); 3342 3343 #if CONFIG_MULTITHREAD 3344 /* wait for the last picture loopfilter thread done */ 3345 if (cpi->b_lpf_running) 3346 { 3347 sem_wait(&cpi->h_event_end_lpf); 3348 cpi->b_lpf_running = 0; 3349 } 3350 #endif 3351 3352 if(cpi->force_next_frame_intra) 3353 { 3354 cm->frame_type = KEY_FRAME; /* delayed intra frame */ 3355 cpi->force_next_frame_intra = 0; 3356 } 3357 3358 /* For an alt ref frame in 2 pass we skip the call to the second pass 3359 * function that sets the target bandwidth 3360 */ 3361 #if !(CONFIG_REALTIME_ONLY) 3362 3363 if (cpi->pass == 2) 3364 { 3365 if (cpi->common.refresh_alt_ref_frame) 3366 { 3367 /* Per frame bit target for the alt ref frame */ 3368 cpi->per_frame_bandwidth = cpi->twopass.gf_bits; 3369 /* per second target bitrate */ 3370 cpi->target_bandwidth = (int)(cpi->twopass.gf_bits * 3371 cpi->output_framerate); 3372 } 3373 } 3374 else 3375 #endif 3376 cpi->per_frame_bandwidth = (int)(cpi->target_bandwidth / cpi->output_framerate); 3377 3378 /* Default turn off buffer to buffer copying */ 3379 cm->copy_buffer_to_gf = 0; 3380 cm->copy_buffer_to_arf = 0; 3381 3382 /* Clear zbin over-quant value and mode boost values. */ 3383 cpi->mb.zbin_over_quant = 0; 3384 cpi->mb.zbin_mode_boost = 0; 3385 3386 /* Enable or disable mode based tweaking of the zbin 3387 * For 2 Pass Only used where GF/ARF prediction quality 3388 * is above a threshold 3389 */ 3390 cpi->mb.zbin_mode_boost_enabled = 1; 3391 if (cpi->pass == 2) 3392 { 3393 if ( cpi->gfu_boost <= 400 ) 3394 { 3395 cpi->mb.zbin_mode_boost_enabled = 0; 3396 } 3397 } 3398 3399 /* Current default encoder behaviour for the altref sign bias */ 3400 if (cpi->source_alt_ref_active) 3401 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1; 3402 else 3403 cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0; 3404 3405 /* Check to see if a key frame is signaled 3406 * For two pass with auto key frame enabled cm->frame_type may already 3407 * be set, but not for one pass. 3408 */ 3409 if ((cm->current_video_frame == 0) || 3410 (cm->frame_flags & FRAMEFLAGS_KEY) || 3411 (cpi->oxcf.auto_key && (cpi->frames_since_key % cpi->key_frame_frequency == 0))) 3412 { 3413 /* Key frame from VFW/auto-keyframe/first frame */ 3414 cm->frame_type = KEY_FRAME; 3415 } 3416 3417 #if CONFIG_MULTI_RES_ENCODING 3418 /* In multi-resolution encoding, frame_type is decided by lowest-resolution 3419 * encoder. Same frame_type is adopted while encoding at other resolution. 3420 */ 3421 if (cpi->oxcf.mr_encoder_id) 3422 { 3423 LOWER_RES_FRAME_INFO* low_res_frame_info 3424 = (LOWER_RES_FRAME_INFO*)cpi->oxcf.mr_low_res_mode_info; 3425 3426 cm->frame_type = low_res_frame_info->frame_type; 3427 3428 if(cm->frame_type != KEY_FRAME) 3429 { 3430 cpi->mr_low_res_mv_avail = 1; 3431 cpi->mr_low_res_mv_avail &= !(low_res_frame_info->is_frame_dropped); 3432 3433 if (cpi->ref_frame_flags & VP8_LAST_FRAME) 3434 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[LAST_FRAME] 3435 == low_res_frame_info->low_res_ref_frames[LAST_FRAME]); 3436 3437 if (cpi->ref_frame_flags & VP8_GOLD_FRAME) 3438 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[GOLDEN_FRAME] 3439 == low_res_frame_info->low_res_ref_frames[GOLDEN_FRAME]); 3440 3441 if (cpi->ref_frame_flags & VP8_ALTR_FRAME) 3442 cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[ALTREF_FRAME] 3443 == low_res_frame_info->low_res_ref_frames[ALTREF_FRAME]); 3444 } 3445 } 3446 #endif 3447 3448 /* Set various flags etc to special state if it is a key frame */ 3449 if (cm->frame_type == KEY_FRAME) 3450 { 3451 int i; 3452 3453 // Set the loop filter deltas and segmentation map update 3454 setup_features(cpi); 3455 3456 /* The alternate reference frame cannot be active for a key frame */ 3457 cpi->source_alt_ref_active = 0; 3458 3459 /* Reset the RD threshold multipliers to default of * 1 (128) */ 3460 for (i = 0; i < MAX_MODES; i++) 3461 { 3462 cpi->mb.rd_thresh_mult[i] = 128; 3463 } 3464 } 3465 3466 #if 0 3467 /* Experimental code for lagged compress and one pass 3468 * Initialise one_pass GF frames stats 3469 * Update stats used for GF selection 3470 */ 3471 { 3472 cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS; 3473 3474 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0; 3475 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0; 3476 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0; 3477 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0; 3478 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0; 3479 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0; 3480 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0; 3481 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0; 3482 cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0; 3483 } 3484 #endif 3485 3486 update_rd_ref_frame_probs(cpi); 3487 3488 if (cpi->drop_frames_allowed) 3489 { 3490 /* The reset to decimation 0 is only done here for one pass. 3491 * Once it is set two pass leaves decimation on till the next kf. 3492 */ 3493 if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0)) 3494 cpi->decimation_factor --; 3495 3496 if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0) 3497 cpi->decimation_factor = 1; 3498 3499 else if (cpi->buffer_level < drop_mark25 && (cpi->decimation_factor == 2 || cpi->decimation_factor == 3)) 3500 { 3501 cpi->decimation_factor = 3; 3502 } 3503 else if (cpi->buffer_level < drop_mark50 && (cpi->decimation_factor == 1 || cpi->decimation_factor == 2)) 3504 { 3505 cpi->decimation_factor = 2; 3506 } 3507 else if (cpi->buffer_level < drop_mark75 && (cpi->decimation_factor == 0 || cpi->decimation_factor == 1)) 3508 { 3509 cpi->decimation_factor = 1; 3510 } 3511 } 3512 3513 /* The following decimates the frame rate according to a regular 3514 * pattern (i.e. to 1/2 or 2/3 frame rate) This can be used to help 3515 * prevent buffer under-run in CBR mode. Alternatively it might be 3516 * desirable in some situations to drop frame rate but throw more bits 3517 * at each frame. 3518 * 3519 * Note that dropping a key frame can be problematic if spatial 3520 * resampling is also active 3521 */ 3522 if (cpi->decimation_factor > 0) 3523 { 3524 switch (cpi->decimation_factor) 3525 { 3526 case 1: 3527 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2; 3528 break; 3529 case 2: 3530 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4; 3531 break; 3532 case 3: 3533 cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4; 3534 break; 3535 } 3536 3537 /* Note that we should not throw out a key frame (especially when 3538 * spatial resampling is enabled). 3539 */ 3540 if (cm->frame_type == KEY_FRAME) 3541 { 3542 cpi->decimation_count = cpi->decimation_factor; 3543 } 3544 else if (cpi->decimation_count > 0) 3545 { 3546 cpi->decimation_count --; 3547 3548 cpi->bits_off_target += cpi->av_per_frame_bandwidth; 3549 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) 3550 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size; 3551 3552 #if CONFIG_MULTI_RES_ENCODING 3553 vp8_store_drop_frame_info(cpi); 3554 #endif 3555 3556 cm->current_video_frame++; 3557 cpi->frames_since_key++; 3558 // We advance the temporal pattern for dropped frames. 3559 cpi->temporal_pattern_counter++; 3560 3561 #if CONFIG_INTERNAL_STATS 3562 cpi->count ++; 3563 #endif 3564 3565 cpi->buffer_level = cpi->bits_off_target; 3566 3567 if (cpi->oxcf.number_of_layers > 1) 3568 { 3569 unsigned int i; 3570 3571 /* Propagate bits saved by dropping the frame to higher 3572 * layers 3573 */ 3574 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++) 3575 { 3576 LAYER_CONTEXT *lc = &cpi->layer_context[i]; 3577 lc->bits_off_target += cpi->av_per_frame_bandwidth; 3578 if (lc->bits_off_target > lc->maximum_buffer_size) 3579 lc->bits_off_target = lc->maximum_buffer_size; 3580 lc->buffer_level = lc->bits_off_target; 3581 } 3582 } 3583 3584 return; 3585 } 3586 else 3587 cpi->decimation_count = cpi->decimation_factor; 3588 } 3589 else 3590 cpi->decimation_count = 0; 3591 3592 /* Decide how big to make the frame */ 3593 if (!vp8_pick_frame_size(cpi)) 3594 { 3595 /*TODO: 2 drop_frame and return code could be put together. */ 3596 #if CONFIG_MULTI_RES_ENCODING 3597 vp8_store_drop_frame_info(cpi); 3598 #endif 3599 cm->current_video_frame++; 3600 cpi->frames_since_key++; 3601 // We advance the temporal pattern for dropped frames. 3602 cpi->temporal_pattern_counter++; 3603 return; 3604 } 3605 3606 /* Reduce active_worst_allowed_q for CBR if our buffer is getting too full. 3607 * This has a knock on effect on active best quality as well. 3608 * For CBR if the buffer reaches its maximum level then we can no longer 3609 * save up bits for later frames so we might as well use them up 3610 * on the current frame. 3611 */ 3612 if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) && 3613 (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) && cpi->buffered_mode) 3614 { 3615 /* Max adjustment is 1/4 */ 3616 int Adjustment = cpi->active_worst_quality / 4; 3617 3618 if (Adjustment) 3619 { 3620 int buff_lvl_step; 3621 3622 if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size) 3623 { 3624 buff_lvl_step = (int) 3625 ((cpi->oxcf.maximum_buffer_size - 3626 cpi->oxcf.optimal_buffer_level) / 3627 Adjustment); 3628 3629 if (buff_lvl_step) 3630 Adjustment = (int) 3631 ((cpi->buffer_level - 3632 cpi->oxcf.optimal_buffer_level) / 3633 buff_lvl_step); 3634 else 3635 Adjustment = 0; 3636 } 3637 3638 cpi->active_worst_quality -= Adjustment; 3639 3640 if(cpi->active_worst_quality < cpi->active_best_quality) 3641 cpi->active_worst_quality = cpi->active_best_quality; 3642 } 3643 } 3644 3645 /* Set an active best quality and if necessary active worst quality 3646 * There is some odd behavior for one pass here that needs attention. 3647 */ 3648 if ( (cpi->pass == 2) || (cpi->ni_frames > 150)) 3649 { 3650 vp8_clear_system_state(); 3651 3652 Q = cpi->active_worst_quality; 3653 3654 if ( cm->frame_type == KEY_FRAME ) 3655 { 3656 if ( cpi->pass == 2 ) 3657 { 3658 if (cpi->gfu_boost > 600) 3659 cpi->active_best_quality = kf_low_motion_minq[Q]; 3660 else 3661 cpi->active_best_quality = kf_high_motion_minq[Q]; 3662 3663 /* Special case for key frames forced because we have reached 3664 * the maximum key frame interval. Here force the Q to a range 3665 * based on the ambient Q to reduce the risk of popping 3666 */ 3667 if ( cpi->this_key_frame_forced ) 3668 { 3669 if ( cpi->active_best_quality > cpi->avg_frame_qindex * 7/8) 3670 cpi->active_best_quality = cpi->avg_frame_qindex * 7/8; 3671 else if ( cpi->active_best_quality < cpi->avg_frame_qindex >> 2 ) 3672 cpi->active_best_quality = cpi->avg_frame_qindex >> 2; 3673 } 3674 } 3675 /* One pass more conservative */ 3676 else 3677 cpi->active_best_quality = kf_high_motion_minq[Q]; 3678 } 3679 3680 else if (cpi->oxcf.number_of_layers==1 && 3681 (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame)) 3682 { 3683 /* Use the lower of cpi->active_worst_quality and recent 3684 * average Q as basis for GF/ARF Q limit unless last frame was 3685 * a key frame. 3686 */ 3687 if ( (cpi->frames_since_key > 1) && 3688 (cpi->avg_frame_qindex < cpi->active_worst_quality) ) 3689 { 3690 Q = cpi->avg_frame_qindex; 3691 } 3692 3693 /* For constrained quality dont allow Q less than the cq level */ 3694 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) && 3695 (Q < cpi->cq_target_quality) ) 3696 { 3697 Q = cpi->cq_target_quality; 3698 } 3699 3700 if ( cpi->pass == 2 ) 3701 { 3702 if ( cpi->gfu_boost > 1000 ) 3703 cpi->active_best_quality = gf_low_motion_minq[Q]; 3704 else if ( cpi->gfu_boost < 400 ) 3705 cpi->active_best_quality = gf_high_motion_minq[Q]; 3706 else 3707 cpi->active_best_quality = gf_mid_motion_minq[Q]; 3708 3709 /* Constrained quality use slightly lower active best. */ 3710 if ( cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY ) 3711 { 3712 cpi->active_best_quality = 3713 cpi->active_best_quality * 15/16; 3714 } 3715 } 3716 /* One pass more conservative */ 3717 else 3718 cpi->active_best_quality = gf_high_motion_minq[Q]; 3719 } 3720 else 3721 { 3722 cpi->active_best_quality = inter_minq[Q]; 3723 3724 /* For the constant/constrained quality mode we dont want 3725 * q to fall below the cq level. 3726 */ 3727 if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) && 3728 (cpi->active_best_quality < cpi->cq_target_quality) ) 3729 { 3730 /* If we are strongly undershooting the target rate in the last 3731 * frames then use the user passed in cq value not the auto 3732 * cq value. 3733 */ 3734 if ( cpi->rolling_actual_bits < cpi->min_frame_bandwidth ) 3735 cpi->active_best_quality = cpi->oxcf.cq_level; 3736 else 3737 cpi->active_best_quality = cpi->cq_target_quality; 3738 } 3739 } 3740 3741 /* If CBR and the buffer is as full then it is reasonable to allow 3742 * higher quality on the frames to prevent bits just going to waste. 3743 */ 3744 if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) 3745 { 3746 /* Note that the use of >= here elliminates the risk of a devide 3747 * by 0 error in the else if clause 3748 */ 3749 if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size) 3750 cpi->active_best_quality = cpi->best_quality; 3751 3752 else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level) 3753 { 3754 int Fraction = (int) 3755 (((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128) 3756 / (cpi->oxcf.maximum_buffer_size - 3757 cpi->oxcf.optimal_buffer_level)); 3758 int min_qadjustment = ((cpi->active_best_quality - 3759 cpi->best_quality) * Fraction) / 128; 3760 3761 cpi->active_best_quality -= min_qadjustment; 3762 } 3763 } 3764 } 3765 /* Make sure constrained quality mode limits are adhered to for the first 3766 * few frames of one pass encodes 3767 */ 3768 else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) 3769 { 3770 if ( (cm->frame_type == KEY_FRAME) || 3771 cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame ) 3772 { 3773 cpi->active_best_quality = cpi->best_quality; 3774 } 3775 else if (cpi->active_best_quality < cpi->cq_target_quality) 3776 { 3777 cpi->active_best_quality = cpi->cq_target_quality; 3778 } 3779 } 3780 3781 /* Clip the active best and worst quality values to limits */ 3782 if (cpi->active_worst_quality > cpi->worst_quality) 3783 cpi->active_worst_quality = cpi->worst_quality; 3784 3785 if (cpi->active_best_quality < cpi->best_quality) 3786 cpi->active_best_quality = cpi->best_quality; 3787 3788 if ( cpi->active_worst_quality < cpi->active_best_quality ) 3789 cpi->active_worst_quality = cpi->active_best_quality; 3790 3791 /* Determine initial Q to try */ 3792 Q = vp8_regulate_q(cpi, cpi->this_frame_target); 3793 3794 #if !(CONFIG_REALTIME_ONLY) 3795 3796 /* Set highest allowed value for Zbin over quant */ 3797 if (cm->frame_type == KEY_FRAME) 3798 zbin_oq_high = 0; 3799 else if ((cpi->oxcf.number_of_layers == 1) && ((cm->refresh_alt_ref_frame || 3800 (cm->refresh_golden_frame && !cpi->source_alt_ref_active)))) 3801 { 3802 zbin_oq_high = 16; 3803 } 3804 else 3805 zbin_oq_high = ZBIN_OQ_MAX; 3806 #endif 3807 3808 /* Setup background Q adjustment for error resilient mode. 3809 * For multi-layer encodes only enable this for the base layer. 3810 */ 3811 if (cpi->cyclic_refresh_mode_enabled) 3812 { 3813 if (cpi->current_layer==0) 3814 cyclic_background_refresh(cpi, Q, 0); 3815 else 3816 disable_segmentation(cpi); 3817 } 3818 3819 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit); 3820 3821 #if !(CONFIG_REALTIME_ONLY) 3822 /* Limit Q range for the adaptive loop. */ 3823 bottom_index = cpi->active_best_quality; 3824 top_index = cpi->active_worst_quality; 3825 q_low = cpi->active_best_quality; 3826 q_high = cpi->active_worst_quality; 3827 #endif 3828 3829 vp8_save_coding_context(cpi); 3830 3831 loop_count = 0; 3832 3833 scale_and_extend_source(cpi->un_scaled_source, cpi); 3834 3835 #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC && !(CONFIG_TEMPORAL_DENOISING) 3836 3837 if (cpi->oxcf.noise_sensitivity > 0) 3838 { 3839 unsigned char *src; 3840 int l = 0; 3841 3842 switch (cpi->oxcf.noise_sensitivity) 3843 { 3844 case 1: 3845 l = 20; 3846 break; 3847 case 2: 3848 l = 40; 3849 break; 3850 case 3: 3851 l = 60; 3852 break; 3853 case 4: 3854 l = 80; 3855 break; 3856 case 5: 3857 l = 100; 3858 break; 3859 case 6: 3860 l = 150; 3861 break; 3862 } 3863 3864 3865 if (cm->frame_type == KEY_FRAME) 3866 { 3867 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0); 3868 } 3869 else 3870 { 3871 vp8_de_noise(cm, cpi->Source, cpi->Source, l , 1, 0); 3872 3873 src = cpi->Source->y_buffer; 3874 3875 if (cpi->Source->y_stride < 0) 3876 { 3877 src += cpi->Source->y_stride * (cpi->Source->y_height - 1); 3878 } 3879 } 3880 } 3881 3882 #endif 3883 3884 #ifdef OUTPUT_YUV_SRC 3885 vp8_write_yuv_frame(cpi->Source); 3886 #endif 3887 3888 do 3889 { 3890 vp8_clear_system_state(); 3891 3892 vp8_set_quantizer(cpi, Q); 3893 3894 /* setup skip prob for costing in mode/mv decision */ 3895 if (cpi->common.mb_no_coeff_skip) 3896 { 3897 cpi->prob_skip_false = cpi->base_skip_false_prob[Q]; 3898 3899 if (cm->frame_type != KEY_FRAME) 3900 { 3901 if (cpi->common.refresh_alt_ref_frame) 3902 { 3903 if (cpi->last_skip_false_probs[2] != 0) 3904 cpi->prob_skip_false = cpi->last_skip_false_probs[2]; 3905 3906 /* 3907 if(cpi->last_skip_false_probs[2]!=0 && abs(Q- cpi->last_skip_probs_q[2])<=16 ) 3908 cpi->prob_skip_false = cpi->last_skip_false_probs[2]; 3909 else if (cpi->last_skip_false_probs[2]!=0) 3910 cpi->prob_skip_false = (cpi->last_skip_false_probs[2] + cpi->prob_skip_false ) / 2; 3911 */ 3912 } 3913 else if (cpi->common.refresh_golden_frame) 3914 { 3915 if (cpi->last_skip_false_probs[1] != 0) 3916 cpi->prob_skip_false = cpi->last_skip_false_probs[1]; 3917 3918 /* 3919 if(cpi->last_skip_false_probs[1]!=0 && abs(Q- cpi->last_skip_probs_q[1])<=16 ) 3920 cpi->prob_skip_false = cpi->last_skip_false_probs[1]; 3921 else if (cpi->last_skip_false_probs[1]!=0) 3922 cpi->prob_skip_false = (cpi->last_skip_false_probs[1] + cpi->prob_skip_false ) / 2; 3923 */ 3924 } 3925 else 3926 { 3927 if (cpi->last_skip_false_probs[0] != 0) 3928 cpi->prob_skip_false = cpi->last_skip_false_probs[0]; 3929 3930 /* 3931 if(cpi->last_skip_false_probs[0]!=0 && abs(Q- cpi->last_skip_probs_q[0])<=16 ) 3932 cpi->prob_skip_false = cpi->last_skip_false_probs[0]; 3933 else if(cpi->last_skip_false_probs[0]!=0) 3934 cpi->prob_skip_false = (cpi->last_skip_false_probs[0] + cpi->prob_skip_false ) / 2; 3935 */ 3936 } 3937 3938 /* as this is for cost estimate, let's make sure it does not 3939 * go extreme eitehr way 3940 */ 3941 if (cpi->prob_skip_false < 5) 3942 cpi->prob_skip_false = 5; 3943 3944 if (cpi->prob_skip_false > 250) 3945 cpi->prob_skip_false = 250; 3946 3947 if (cpi->oxcf.number_of_layers == 1 && cpi->is_src_frame_alt_ref) 3948 cpi->prob_skip_false = 1; 3949 } 3950 3951 #if 0 3952 3953 if (cpi->pass != 1) 3954 { 3955 FILE *f = fopen("skip.stt", "a"); 3956 fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false); 3957 fclose(f); 3958 } 3959 3960 #endif 3961 3962 } 3963 3964 if (cm->frame_type == KEY_FRAME) 3965 { 3966 if(resize_key_frame(cpi)) 3967 { 3968 /* If the frame size has changed, need to reset Q, quantizer, 3969 * and background refresh. 3970 */ 3971 Q = vp8_regulate_q(cpi, cpi->this_frame_target); 3972 if (cpi->cyclic_refresh_mode_enabled) 3973 { 3974 if (cpi->current_layer==0) 3975 cyclic_background_refresh(cpi, Q, 0); 3976 else 3977 disable_segmentation(cpi); 3978 } 3979 vp8_set_quantizer(cpi, Q); 3980 } 3981 3982 vp8_setup_key_frame(cpi); 3983 } 3984 3985 3986 3987 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING 3988 { 3989 if(cpi->oxcf.error_resilient_mode) 3990 cm->refresh_entropy_probs = 0; 3991 3992 if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS) 3993 { 3994 if (cm->frame_type == KEY_FRAME) 3995 cm->refresh_entropy_probs = 1; 3996 } 3997 3998 if (cm->refresh_entropy_probs == 0) 3999 { 4000 /* save a copy for later refresh */ 4001 vpx_memcpy(&cm->lfc, &cm->fc, sizeof(cm->fc)); 4002 } 4003 4004 vp8_update_coef_context(cpi); 4005 4006 vp8_update_coef_probs(cpi); 4007 4008 /* transform / motion compensation build reconstruction frame 4009 * +pack coef partitions 4010 */ 4011 vp8_encode_frame(cpi); 4012 4013 /* cpi->projected_frame_size is not needed for RT mode */ 4014 } 4015 #else 4016 /* transform / motion compensation build reconstruction frame */ 4017 vp8_encode_frame(cpi); 4018 4019 cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi); 4020 cpi->projected_frame_size = (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0; 4021 #endif 4022 vp8_clear_system_state(); 4023 4024 /* Test to see if the stats generated for this frame indicate that 4025 * we should have coded a key frame (assuming that we didn't)! 4026 */ 4027 4028 if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME 4029 && cpi->compressor_speed != 2) 4030 { 4031 #if !(CONFIG_REALTIME_ONLY) 4032 if (decide_key_frame(cpi)) 4033 { 4034 /* Reset all our sizing numbers and recode */ 4035 cm->frame_type = KEY_FRAME; 4036 4037 vp8_pick_frame_size(cpi); 4038 4039 /* Clear the Alt reference frame active flag when we have 4040 * a key frame 4041 */ 4042 cpi->source_alt_ref_active = 0; 4043 4044 // Set the loop filter deltas and segmentation map update 4045 setup_features(cpi); 4046 4047 vp8_restore_coding_context(cpi); 4048 4049 Q = vp8_regulate_q(cpi, cpi->this_frame_target); 4050 4051 vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit, &frame_over_shoot_limit); 4052 4053 /* Limit Q range for the adaptive loop. */ 4054 bottom_index = cpi->active_best_quality; 4055 top_index = cpi->active_worst_quality; 4056 q_low = cpi->active_best_quality; 4057 q_high = cpi->active_worst_quality; 4058 4059 loop_count++; 4060 Loop = 1; 4061 4062 continue; 4063 } 4064 #endif 4065 } 4066 4067 vp8_clear_system_state(); 4068 4069 if (frame_over_shoot_limit == 0) 4070 frame_over_shoot_limit = 1; 4071 4072 /* Are we are overshooting and up against the limit of active max Q. */ 4073 if (((cpi->pass != 2) || (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) && 4074 (Q == cpi->active_worst_quality) && 4075 (cpi->active_worst_quality < cpi->worst_quality) && 4076 (cpi->projected_frame_size > frame_over_shoot_limit)) 4077 { 4078 int over_size_percent = ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) / frame_over_shoot_limit; 4079 4080 /* If so is there any scope for relaxing it */ 4081 while ((cpi->active_worst_quality < cpi->worst_quality) && (over_size_percent > 0)) 4082 { 4083 cpi->active_worst_quality++; 4084 /* Assume 1 qstep = about 4% on frame size. */ 4085 over_size_percent = (int)(over_size_percent * 0.96); 4086 } 4087 #if !(CONFIG_REALTIME_ONLY) 4088 top_index = cpi->active_worst_quality; 4089 #endif 4090 /* If we have updated the active max Q do not call 4091 * vp8_update_rate_correction_factors() this loop. 4092 */ 4093 active_worst_qchanged = 1; 4094 } 4095 else 4096 active_worst_qchanged = 0; 4097 4098 #if !(CONFIG_REALTIME_ONLY) 4099 /* Special case handling for forced key frames */ 4100 if ( (cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced ) 4101 { 4102 int last_q = Q; 4103 int kf_err = vp8_calc_ss_err(cpi->Source, 4104 &cm->yv12_fb[cm->new_fb_idx]); 4105 4106 /* The key frame is not good enough */ 4107 if ( kf_err > ((cpi->ambient_err * 7) >> 3) ) 4108 { 4109 /* Lower q_high */ 4110 q_high = (Q > q_low) ? (Q - 1) : q_low; 4111 4112 /* Adjust Q */ 4113 Q = (q_high + q_low) >> 1; 4114 } 4115 /* The key frame is much better than the previous frame */ 4116 else if ( kf_err < (cpi->ambient_err >> 1) ) 4117 { 4118 /* Raise q_low */ 4119 q_low = (Q < q_high) ? (Q + 1) : q_high; 4120 4121 /* Adjust Q */ 4122 Q = (q_high + q_low + 1) >> 1; 4123 } 4124 4125 /* Clamp Q to upper and lower limits: */ 4126 if (Q > q_high) 4127 Q = q_high; 4128 else if (Q < q_low) 4129 Q = q_low; 4130 4131 Loop = Q != last_q; 4132 } 4133 4134 /* Is the projected frame size out of range and are we allowed 4135 * to attempt to recode. 4136 */ 4137 else if ( recode_loop_test( cpi, 4138 frame_over_shoot_limit, frame_under_shoot_limit, 4139 Q, top_index, bottom_index ) ) 4140 { 4141 int last_q = Q; 4142 int Retries = 0; 4143 4144 /* Frame size out of permitted range. Update correction factor 4145 * & compute new Q to try... 4146 */ 4147 4148 /* Frame is too large */ 4149 if (cpi->projected_frame_size > cpi->this_frame_target) 4150 { 4151 /* Raise Qlow as to at least the current value */ 4152 q_low = (Q < q_high) ? (Q + 1) : q_high; 4153 4154 /* If we are using over quant do the same for zbin_oq_low */ 4155 if (cpi->mb.zbin_over_quant > 0) 4156 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ? 4157 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high; 4158 4159 if (undershoot_seen) 4160 { 4161 /* Update rate_correction_factor unless 4162 * cpi->active_worst_quality has changed. 4163 */ 4164 if (!active_worst_qchanged) 4165 vp8_update_rate_correction_factors(cpi, 1); 4166 4167 Q = (q_high + q_low + 1) / 2; 4168 4169 /* Adjust cpi->zbin_over_quant (only allowed when Q 4170 * is max) 4171 */ 4172 if (Q < MAXQ) 4173 cpi->mb.zbin_over_quant = 0; 4174 else 4175 { 4176 zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high) ? 4177 (cpi->mb.zbin_over_quant + 1) : zbin_oq_high; 4178 cpi->mb.zbin_over_quant = 4179 (zbin_oq_high + zbin_oq_low) / 2; 4180 } 4181 } 4182 else 4183 { 4184 /* Update rate_correction_factor unless 4185 * cpi->active_worst_quality has changed. 4186 */ 4187 if (!active_worst_qchanged) 4188 vp8_update_rate_correction_factors(cpi, 0); 4189 4190 Q = vp8_regulate_q(cpi, cpi->this_frame_target); 4191 4192 while (((Q < q_low) || 4193 (cpi->mb.zbin_over_quant < zbin_oq_low)) && 4194 (Retries < 10)) 4195 { 4196 vp8_update_rate_correction_factors(cpi, 0); 4197 Q = vp8_regulate_q(cpi, cpi->this_frame_target); 4198 Retries ++; 4199 } 4200 } 4201 4202 overshoot_seen = 1; 4203 } 4204 /* Frame is too small */ 4205 else 4206 { 4207 if (cpi->mb.zbin_over_quant == 0) 4208 /* Lower q_high if not using over quant */ 4209 q_high = (Q > q_low) ? (Q - 1) : q_low; 4210 else 4211 /* else lower zbin_oq_high */ 4212 zbin_oq_high = (cpi->mb.zbin_over_quant > zbin_oq_low) ? 4213 (cpi->mb.zbin_over_quant - 1) : zbin_oq_low; 4214 4215 if (overshoot_seen) 4216 { 4217 /* Update rate_correction_factor unless 4218 * cpi->active_worst_quality has changed. 4219 */ 4220 if (!active_worst_qchanged) 4221 vp8_update_rate_correction_factors(cpi, 1); 4222 4223 Q = (q_high + q_low) / 2; 4224 4225 /* Adjust cpi->zbin_over_quant (only allowed when Q 4226 * is max) 4227 */ 4228 if (Q < MAXQ) 4229 cpi->mb.zbin_over_quant = 0; 4230 else 4231 cpi->mb.zbin_over_quant = 4232 (zbin_oq_high + zbin_oq_low) / 2; 4233 } 4234 else 4235 { 4236 /* Update rate_correction_factor unless 4237 * cpi->active_worst_quality has changed. 4238 */ 4239 if (!active_worst_qchanged) 4240 vp8_update_rate_correction_factors(cpi, 0); 4241 4242 Q = vp8_regulate_q(cpi, cpi->this_frame_target); 4243 4244 /* Special case reset for qlow for constrained quality. 4245 * This should only trigger where there is very substantial 4246 * undershoot on a frame and the auto cq level is above 4247 * the user passsed in value. 4248 */ 4249 if ( (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) && 4250 (Q < q_low) ) 4251 { 4252 q_low = Q; 4253 } 4254 4255 while (((Q > q_high) || 4256 (cpi->mb.zbin_over_quant > zbin_oq_high)) && 4257 (Retries < 10)) 4258 { 4259 vp8_update_rate_correction_factors(cpi, 0); 4260 Q = vp8_regulate_q(cpi, cpi->this_frame_target); 4261 Retries ++; 4262 } 4263 } 4264 4265 undershoot_seen = 1; 4266 } 4267 4268 /* Clamp Q to upper and lower limits: */ 4269 if (Q > q_high) 4270 Q = q_high; 4271 else if (Q < q_low) 4272 Q = q_low; 4273 4274 /* Clamp cpi->zbin_over_quant */ 4275 cpi->mb.zbin_over_quant = (cpi->mb.zbin_over_quant < zbin_oq_low) ? 4276 zbin_oq_low : (cpi->mb.zbin_over_quant > zbin_oq_high) ? 4277 zbin_oq_high : cpi->mb.zbin_over_quant; 4278 4279 Loop = Q != last_q; 4280 } 4281 else 4282 #endif 4283 Loop = 0; 4284 4285 if (cpi->is_src_frame_alt_ref) 4286 Loop = 0; 4287 4288 if (Loop == 1) 4289 { 4290 vp8_restore_coding_context(cpi); 4291 loop_count++; 4292 #if CONFIG_INTERNAL_STATS 4293 cpi->tot_recode_hits++; 4294 #endif 4295 } 4296 } 4297 while (Loop == 1); 4298 4299 #if 0 4300 /* Experimental code for lagged and one pass 4301 * Update stats used for one pass GF selection 4302 */ 4303 { 4304 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error; 4305 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error; 4306 cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0; 4307 } 4308 #endif 4309 4310 /* Special case code to reduce pulsing when key frames are forced at a 4311 * fixed interval. Note the reconstruction error if it is the frame before 4312 * the force key frame 4313 */ 4314 if ( cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0) ) 4315 { 4316 cpi->ambient_err = vp8_calc_ss_err(cpi->Source, 4317 &cm->yv12_fb[cm->new_fb_idx]); 4318 } 4319 4320 /* This frame's MVs are saved and will be used in next frame's MV predictor. 4321 * Last frame has one more line(add to bottom) and one more column(add to 4322 * right) than cm->mip. The edge elements are initialized to 0. 4323 */ 4324 #if CONFIG_MULTI_RES_ENCODING 4325 if(!cpi->oxcf.mr_encoder_id && cm->show_frame) 4326 #else 4327 if(cm->show_frame) /* do not save for altref frame */ 4328 #endif 4329 { 4330 int mb_row; 4331 int mb_col; 4332 /* Point to beginning of allocated MODE_INFO arrays. */ 4333 MODE_INFO *tmp = cm->mip; 4334 4335 if(cm->frame_type != KEY_FRAME) 4336 { 4337 for (mb_row = 0; mb_row < cm->mb_rows+1; mb_row ++) 4338 { 4339 for (mb_col = 0; mb_col < cm->mb_cols+1; mb_col ++) 4340 { 4341 if(tmp->mbmi.ref_frame != INTRA_FRAME) 4342 cpi->lfmv[mb_col + mb_row*(cm->mode_info_stride+1)].as_int = tmp->mbmi.mv.as_int; 4343 4344 cpi->lf_ref_frame_sign_bias[mb_col + mb_row*(cm->mode_info_stride+1)] = cm->ref_frame_sign_bias[tmp->mbmi.ref_frame]; 4345 cpi->lf_ref_frame[mb_col + mb_row*(cm->mode_info_stride+1)] = tmp->mbmi.ref_frame; 4346 tmp++; 4347 } 4348 } 4349 } 4350 } 4351 4352 /* Count last ref frame 0,0 usage on current encoded frame. */ 4353 { 4354 int mb_row; 4355 int mb_col; 4356 /* Point to beginning of MODE_INFO arrays. */ 4357 MODE_INFO *tmp = cm->mi; 4358 4359 cpi->zeromv_count = 0; 4360 4361 if(cm->frame_type != KEY_FRAME) 4362 { 4363 for (mb_row = 0; mb_row < cm->mb_rows; mb_row ++) 4364 { 4365 for (mb_col = 0; mb_col < cm->mb_cols; mb_col ++) 4366 { 4367 if(tmp->mbmi.mode == ZEROMV) 4368 cpi->zeromv_count++; 4369 tmp++; 4370 } 4371 tmp++; 4372 } 4373 } 4374 } 4375 4376 #if CONFIG_MULTI_RES_ENCODING 4377 vp8_cal_dissimilarity(cpi); 4378 #endif 4379 4380 /* Update the GF useage maps. 4381 * This is done after completing the compression of a frame when all 4382 * modes etc. are finalized but before loop filter 4383 */ 4384 if (cpi->oxcf.number_of_layers == 1) 4385 vp8_update_gf_useage_maps(cpi, cm, &cpi->mb); 4386 4387 if (cm->frame_type == KEY_FRAME) 4388 cm->refresh_last_frame = 1; 4389 4390 #if 0 4391 { 4392 FILE *f = fopen("gfactive.stt", "a"); 4393 fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame); 4394 fclose(f); 4395 } 4396 #endif 4397 4398 /* For inter frames the current default behavior is that when 4399 * cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer 4400 * This is purely an encoder decision at present. 4401 */ 4402 if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame) 4403 cm->copy_buffer_to_arf = 2; 4404 else 4405 cm->copy_buffer_to_arf = 0; 4406 4407 cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx]; 4408 4409 #if CONFIG_MULTITHREAD 4410 if (cpi->b_multi_threaded) 4411 { 4412 /* start loopfilter in separate thread */ 4413 sem_post(&cpi->h_event_start_lpf); 4414 cpi->b_lpf_running = 1; 4415 } 4416 else 4417 #endif 4418 { 4419 vp8_loopfilter_frame(cpi, cm); 4420 } 4421 4422 update_reference_frames(cpi); 4423 4424 #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING) 4425 if (cpi->oxcf.error_resilient_mode) 4426 { 4427 cm->refresh_entropy_probs = 0; 4428 } 4429 #endif 4430 4431 #if CONFIG_MULTITHREAD 4432 /* wait that filter_level is picked so that we can continue with stream packing */ 4433 if (cpi->b_multi_threaded) 4434 sem_wait(&cpi->h_event_end_lpf); 4435 #endif 4436 4437 /* build the bitstream */ 4438 vp8_pack_bitstream(cpi, dest, dest_end, size); 4439 4440 #if CONFIG_MULTITHREAD 4441 /* if PSNR packets are generated we have to wait for the lpf */ 4442 if (cpi->b_lpf_running && cpi->b_calculate_psnr) 4443 { 4444 sem_wait(&cpi->h_event_end_lpf); 4445 cpi->b_lpf_running = 0; 4446 } 4447 #endif 4448 4449 /* Move storing frame_type out of the above loop since it is also 4450 * needed in motion search besides loopfilter */ 4451 cm->last_frame_type = cm->frame_type; 4452 4453 /* Update rate control heuristics */ 4454 cpi->total_byte_count += (*size); 4455 cpi->projected_frame_size = (*size) << 3; 4456 4457 if (cpi->oxcf.number_of_layers > 1) 4458 { 4459 unsigned int i; 4460 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++) 4461 cpi->layer_context[i].total_byte_count += (*size); 4462 } 4463 4464 if (!active_worst_qchanged) 4465 vp8_update_rate_correction_factors(cpi, 2); 4466 4467 cpi->last_q[cm->frame_type] = cm->base_qindex; 4468 4469 if (cm->frame_type == KEY_FRAME) 4470 { 4471 vp8_adjust_key_frame_context(cpi); 4472 } 4473 4474 /* Keep a record of ambient average Q. */ 4475 if (cm->frame_type != KEY_FRAME) 4476 cpi->avg_frame_qindex = (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2; 4477 4478 /* Keep a record from which we can calculate the average Q excluding 4479 * GF updates and key frames 4480 */ 4481 if ((cm->frame_type != KEY_FRAME) && ((cpi->oxcf.number_of_layers > 1) || 4482 (!cm->refresh_golden_frame && !cm->refresh_alt_ref_frame))) 4483 { 4484 cpi->ni_frames++; 4485 4486 /* Calculate the average Q for normal inter frames (not key or GFU 4487 * frames). 4488 */ 4489 if ( cpi->pass == 2 ) 4490 { 4491 cpi->ni_tot_qi += Q; 4492 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames); 4493 } 4494 else 4495 { 4496 /* Damp value for first few frames */ 4497 if (cpi->ni_frames > 150 ) 4498 { 4499 cpi->ni_tot_qi += Q; 4500 cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames); 4501 } 4502 /* For one pass, early in the clip ... average the current frame Q 4503 * value with the worstq entered by the user as a dampening measure 4504 */ 4505 else 4506 { 4507 cpi->ni_tot_qi += Q; 4508 cpi->ni_av_qi = ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2; 4509 } 4510 4511 /* If the average Q is higher than what was used in the last 4512 * frame (after going through the recode loop to keep the frame 4513 * size within range) then use the last frame value - 1. The -1 4514 * is designed to stop Q and hence the data rate, from 4515 * progressively falling away during difficult sections, but at 4516 * the same time reduce the number of itterations around the 4517 * recode loop. 4518 */ 4519 if (Q > cpi->ni_av_qi) 4520 cpi->ni_av_qi = Q - 1; 4521 } 4522 } 4523 4524 /* Update the buffer level variable. */ 4525 /* Non-viewable frames are a special case and are treated as pure overhead. */ 4526 if ( !cm->show_frame ) 4527 cpi->bits_off_target -= cpi->projected_frame_size; 4528 else 4529 cpi->bits_off_target += cpi->av_per_frame_bandwidth - cpi->projected_frame_size; 4530 4531 /* Clip the buffer level to the maximum specified buffer size */ 4532 if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) 4533 cpi->bits_off_target = cpi->oxcf.maximum_buffer_size; 4534 4535 /* Rolling monitors of whether we are over or underspending used to 4536 * help regulate min and Max Q in two pass. 4537 */ 4538 cpi->rolling_target_bits = ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4; 4539 cpi->rolling_actual_bits = ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4; 4540 cpi->long_rolling_target_bits = ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32; 4541 cpi->long_rolling_actual_bits = ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) / 32; 4542 4543 /* Actual bits spent */ 4544 cpi->total_actual_bits += cpi->projected_frame_size; 4545 4546 /* Debug stats */ 4547 cpi->total_target_vs_actual += (cpi->this_frame_target - cpi->projected_frame_size); 4548 4549 cpi->buffer_level = cpi->bits_off_target; 4550 4551 /* Propagate values to higher temporal layers */ 4552 if (cpi->oxcf.number_of_layers > 1) 4553 { 4554 unsigned int i; 4555 4556 for (i=cpi->current_layer+1; i<cpi->oxcf.number_of_layers; i++) 4557 { 4558 LAYER_CONTEXT *lc = &cpi->layer_context[i]; 4559 int bits_off_for_this_layer = 4560 (int)(lc->target_bandwidth / lc->framerate - 4561 cpi->projected_frame_size); 4562 4563 lc->bits_off_target += bits_off_for_this_layer; 4564 4565 /* Clip buffer level to maximum buffer size for the layer */ 4566 if (lc->bits_off_target > lc->maximum_buffer_size) 4567 lc->bits_off_target = lc->maximum_buffer_size; 4568 4569 lc->total_actual_bits += cpi->projected_frame_size; 4570 lc->total_target_vs_actual += bits_off_for_this_layer; 4571 lc->buffer_level = lc->bits_off_target; 4572 } 4573 } 4574 4575 /* Update bits left to the kf and gf groups to account for overshoot 4576 * or undershoot on these frames 4577 */ 4578 if (cm->frame_type == KEY_FRAME) 4579 { 4580 cpi->twopass.kf_group_bits += cpi->this_frame_target - cpi->projected_frame_size; 4581 4582 if (cpi->twopass.kf_group_bits < 0) 4583 cpi->twopass.kf_group_bits = 0 ; 4584 } 4585 else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame) 4586 { 4587 cpi->twopass.gf_group_bits += cpi->this_frame_target - cpi->projected_frame_size; 4588 4589 if (cpi->twopass.gf_group_bits < 0) 4590 cpi->twopass.gf_group_bits = 0 ; 4591 } 4592 4593 if (cm->frame_type != KEY_FRAME) 4594 { 4595 if (cpi->common.refresh_alt_ref_frame) 4596 { 4597 cpi->last_skip_false_probs[2] = cpi->prob_skip_false; 4598 cpi->last_skip_probs_q[2] = cm->base_qindex; 4599 } 4600 else if (cpi->common.refresh_golden_frame) 4601 { 4602 cpi->last_skip_false_probs[1] = cpi->prob_skip_false; 4603 cpi->last_skip_probs_q[1] = cm->base_qindex; 4604 } 4605 else 4606 { 4607 cpi->last_skip_false_probs[0] = cpi->prob_skip_false; 4608 cpi->last_skip_probs_q[0] = cm->base_qindex; 4609 4610 /* update the baseline */ 4611 cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false; 4612 4613 } 4614 } 4615 4616 #if 0 && CONFIG_INTERNAL_STATS 4617 { 4618 FILE *f = fopen("tmp.stt", "a"); 4619 4620 vp8_clear_system_state(); 4621 4622 if (cpi->twopass.total_left_stats.coded_error != 0.0) 4623 fprintf(f, "%10d %10d %10d %10d %10d %10d %10d %10d %10d %6d %6d" 4624 "%6d %6d %6d %5d %5d %5d %8d %8.2f %10d %10.3f" 4625 "%10.3f %8d\n", 4626 cpi->common.current_video_frame, cpi->this_frame_target, 4627 cpi->projected_frame_size, 4628 (cpi->projected_frame_size - cpi->this_frame_target), 4629 (int)cpi->total_target_vs_actual, 4630 cpi->buffer_level, 4631 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target), 4632 (int)cpi->total_actual_bits, cm->base_qindex, 4633 cpi->active_best_quality, cpi->active_worst_quality, 4634 cpi->ni_av_qi, cpi->cq_target_quality, 4635 cpi->zbin_over_quant, 4636 cm->refresh_golden_frame, cm->refresh_alt_ref_frame, 4637 cm->frame_type, cpi->gfu_boost, 4638 cpi->twopass.est_max_qcorrection_factor, 4639 (int)cpi->twopass.bits_left, 4640 cpi->twopass.total_left_stats.coded_error, 4641 (double)cpi->twopass.bits_left / 4642 cpi->twopass.total_left_stats.coded_error, 4643 cpi->tot_recode_hits); 4644 else 4645 fprintf(f, "%10d %10d %10d %10d %10d %10d %10d %10d %10d %6d %6d" 4646 "%6d %6d %6d %5d %5d %5d %8d %8.2f %10d %10.3f" 4647 "%8d\n", 4648 cpi->common.current_video_frame, 4649 cpi->this_frame_target, cpi->projected_frame_size, 4650 (cpi->projected_frame_size - cpi->this_frame_target), 4651 (int)cpi->total_target_vs_actual, 4652 cpi->buffer_level, 4653 (cpi->oxcf.starting_buffer_level-cpi->bits_off_target), 4654 (int)cpi->total_actual_bits, cm->base_qindex, 4655 cpi->active_best_quality, cpi->active_worst_quality, 4656 cpi->ni_av_qi, cpi->cq_target_quality, 4657 cpi->zbin_over_quant, 4658 cm->refresh_golden_frame, cm->refresh_alt_ref_frame, 4659 cm->frame_type, cpi->gfu_boost, 4660 cpi->twopass.est_max_qcorrection_factor, 4661 (int)cpi->twopass.bits_left, 4662 cpi->twopass.total_left_stats.coded_error, 4663 cpi->tot_recode_hits); 4664 4665 fclose(f); 4666 4667 { 4668 FILE *fmodes = fopen("Modes.stt", "a"); 4669 int i; 4670 4671 fprintf(fmodes, "%6d:%1d:%1d:%1d ", 4672 cpi->common.current_video_frame, 4673 cm->frame_type, cm->refresh_golden_frame, 4674 cm->refresh_alt_ref_frame); 4675 4676 fprintf(fmodes, "\n"); 4677 4678 fclose(fmodes); 4679 } 4680 } 4681 4682 #endif 4683 4684 if (cm->refresh_golden_frame == 1) 4685 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN; 4686 else 4687 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_GOLDEN; 4688 4689 if (cm->refresh_alt_ref_frame == 1) 4690 cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF; 4691 else 4692 cm->frame_flags = cm->frame_flags&~FRAMEFLAGS_ALTREF; 4693 4694 4695 if (cm->refresh_last_frame & cm->refresh_golden_frame) 4696 /* both refreshed */ 4697 cpi->gold_is_last = 1; 4698 else if (cm->refresh_last_frame ^ cm->refresh_golden_frame) 4699 /* 1 refreshed but not the other */ 4700 cpi->gold_is_last = 0; 4701 4702 if (cm->refresh_last_frame & cm->refresh_alt_ref_frame) 4703 /* both refreshed */ 4704 cpi->alt_is_last = 1; 4705 else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame) 4706 /* 1 refreshed but not the other */ 4707 cpi->alt_is_last = 0; 4708 4709 if (cm->refresh_alt_ref_frame & cm->refresh_golden_frame) 4710 /* both refreshed */ 4711 cpi->gold_is_alt = 1; 4712 else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame) 4713 /* 1 refreshed but not the other */ 4714 cpi->gold_is_alt = 0; 4715 4716 cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME; 4717 4718 if (cpi->gold_is_last) 4719 cpi->ref_frame_flags &= ~VP8_GOLD_FRAME; 4720 4721 if (cpi->alt_is_last) 4722 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME; 4723 4724 if (cpi->gold_is_alt) 4725 cpi->ref_frame_flags &= ~VP8_ALTR_FRAME; 4726 4727 4728 if (!cpi->oxcf.error_resilient_mode) 4729 { 4730 if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame && (cm->frame_type != KEY_FRAME)) 4731 /* Update the alternate reference frame stats as appropriate. */ 4732 update_alt_ref_frame_stats(cpi); 4733 else 4734 /* Update the Golden frame stats as appropriate. */ 4735 update_golden_frame_stats(cpi); 4736 } 4737 4738 if (cm->frame_type == KEY_FRAME) 4739 { 4740 /* Tell the caller that the frame was coded as a key frame */ 4741 *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY; 4742 4743 /* As this frame is a key frame the next defaults to an inter frame. */ 4744 cm->frame_type = INTER_FRAME; 4745 4746 cpi->last_frame_percent_intra = 100; 4747 } 4748 else 4749 { 4750 *frame_flags = cm->frame_flags&~FRAMEFLAGS_KEY; 4751 4752 cpi->last_frame_percent_intra = cpi->this_frame_percent_intra; 4753 } 4754 4755 /* Clear the one shot update flags for segmentation map and mode/ref 4756 * loop filter deltas. 4757 */ 4758 cpi->mb.e_mbd.update_mb_segmentation_map = 0; 4759 cpi->mb.e_mbd.update_mb_segmentation_data = 0; 4760 cpi->mb.e_mbd.mode_ref_lf_delta_update = 0; 4761 4762 4763 /* Dont increment frame counters if this was an altref buffer update 4764 * not a real frame 4765 */ 4766 if (cm->show_frame) 4767 { 4768 cm->current_video_frame++; 4769 cpi->frames_since_key++; 4770 cpi->temporal_pattern_counter++; 4771 } 4772 4773 /* reset to normal state now that we are done. */ 4774 4775 4776 4777 #if 0 4778 { 4779 char filename[512]; 4780 FILE *recon_file; 4781 sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame); 4782 recon_file = fopen(filename, "wb"); 4783 fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc, 4784 cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file); 4785 fclose(recon_file); 4786 } 4787 #endif 4788 4789 /* DEBUG */ 4790 /* vp8_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show); */ 4791 4792 4793 } 4794 #if !(CONFIG_REALTIME_ONLY) 4795 static void Pass2Encode(VP8_COMP *cpi, unsigned long *size, unsigned char *dest, unsigned char * dest_end, unsigned int *frame_flags) 4796 { 4797 4798 if (!cpi->common.refresh_alt_ref_frame) 4799 vp8_second_pass(cpi); 4800 4801 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags); 4802 cpi->twopass.bits_left -= 8 * *size; 4803 4804 if (!cpi->common.refresh_alt_ref_frame) 4805 { 4806 double two_pass_min_rate = (double)(cpi->oxcf.target_bandwidth 4807 *cpi->oxcf.two_pass_vbrmin_section / 100); 4808 cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->framerate); 4809 } 4810 } 4811 #endif 4812 4813 /* For ARM NEON, d8-d15 are callee-saved registers, and need to be saved. */ 4814 #if HAVE_NEON 4815 extern void vp8_push_neon(int64_t *store); 4816 extern void vp8_pop_neon(int64_t *store); 4817 #endif 4818 4819 4820 int vp8_receive_raw_frame(VP8_COMP *cpi, unsigned int frame_flags, YV12_BUFFER_CONFIG *sd, int64_t time_stamp, int64_t end_time) 4821 { 4822 #if HAVE_NEON 4823 int64_t store_reg[8]; 4824 #if CONFIG_RUNTIME_CPU_DETECT 4825 VP8_COMMON *cm = &cpi->common; 4826 #endif 4827 #endif 4828 struct vpx_usec_timer timer; 4829 int res = 0; 4830 4831 #if HAVE_NEON 4832 #if CONFIG_RUNTIME_CPU_DETECT 4833 if (cm->cpu_caps & HAS_NEON) 4834 #endif 4835 { 4836 vp8_push_neon(store_reg); 4837 } 4838 #endif 4839 4840 vpx_usec_timer_start(&timer); 4841 4842 /* Reinit the lookahead buffer if the frame size changes */ 4843 if (sd->y_width != cpi->oxcf.Width || sd->y_height != cpi->oxcf.Height) 4844 { 4845 assert(cpi->oxcf.lag_in_frames < 2); 4846 dealloc_raw_frame_buffers(cpi); 4847 alloc_raw_frame_buffers(cpi); 4848 } 4849 4850 if(vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time, 4851 frame_flags, cpi->active_map_enabled ? cpi->active_map : NULL)) 4852 res = -1; 4853 vpx_usec_timer_mark(&timer); 4854 cpi->time_receive_data += vpx_usec_timer_elapsed(&timer); 4855 4856 #if HAVE_NEON 4857 #if CONFIG_RUNTIME_CPU_DETECT 4858 if (cm->cpu_caps & HAS_NEON) 4859 #endif 4860 { 4861 vp8_pop_neon(store_reg); 4862 } 4863 #endif 4864 4865 return res; 4866 } 4867 4868 4869 static int frame_is_reference(const VP8_COMP *cpi) 4870 { 4871 const VP8_COMMON *cm = &cpi->common; 4872 const MACROBLOCKD *xd = &cpi->mb.e_mbd; 4873 4874 return cm->frame_type == KEY_FRAME || cm->refresh_last_frame 4875 || cm->refresh_golden_frame || cm->refresh_alt_ref_frame 4876 || cm->copy_buffer_to_gf || cm->copy_buffer_to_arf 4877 || cm->refresh_entropy_probs 4878 || xd->mode_ref_lf_delta_update 4879 || xd->update_mb_segmentation_map || xd->update_mb_segmentation_data; 4880 } 4881 4882 4883 int vp8_get_compressed_data(VP8_COMP *cpi, unsigned int *frame_flags, unsigned long *size, unsigned char *dest, unsigned char *dest_end, int64_t *time_stamp, int64_t *time_end, int flush) 4884 { 4885 #if HAVE_NEON 4886 int64_t store_reg[8]; 4887 #endif 4888 VP8_COMMON *cm; 4889 struct vpx_usec_timer tsctimer; 4890 struct vpx_usec_timer ticktimer; 4891 struct vpx_usec_timer cmptimer; 4892 YV12_BUFFER_CONFIG *force_src_buffer = NULL; 4893 4894 if (!cpi) 4895 return -1; 4896 4897 cm = &cpi->common; 4898 4899 if (setjmp(cpi->common.error.jmp)) 4900 { 4901 cpi->common.error.setjmp = 0; 4902 return VPX_CODEC_CORRUPT_FRAME; 4903 } 4904 4905 cpi->common.error.setjmp = 1; 4906 4907 #if HAVE_NEON 4908 #if CONFIG_RUNTIME_CPU_DETECT 4909 if (cm->cpu_caps & HAS_NEON) 4910 #endif 4911 { 4912 vp8_push_neon(store_reg); 4913 } 4914 #endif 4915 4916 vpx_usec_timer_start(&cmptimer); 4917 4918 cpi->source = NULL; 4919 4920 #if !(CONFIG_REALTIME_ONLY) 4921 /* Should we code an alternate reference frame */ 4922 if (cpi->oxcf.error_resilient_mode == 0 && 4923 cpi->oxcf.play_alternate && 4924 cpi->source_alt_ref_pending) 4925 { 4926 if ((cpi->source = vp8_lookahead_peek(cpi->lookahead, 4927 cpi->frames_till_gf_update_due, 4928 PEEK_FORWARD))) 4929 { 4930 cpi->alt_ref_source = cpi->source; 4931 if (cpi->oxcf.arnr_max_frames > 0) 4932 { 4933 vp8_temporal_filter_prepare_c(cpi, 4934 cpi->frames_till_gf_update_due); 4935 force_src_buffer = &cpi->alt_ref_buffer; 4936 } 4937 cpi->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due; 4938 cm->refresh_alt_ref_frame = 1; 4939 cm->refresh_golden_frame = 0; 4940 cm->refresh_last_frame = 0; 4941 cm->show_frame = 0; 4942 /* Clear Pending alt Ref flag. */ 4943 cpi->source_alt_ref_pending = 0; 4944 cpi->is_src_frame_alt_ref = 0; 4945 } 4946 } 4947 #endif 4948 4949 if (!cpi->source) 4950 { 4951 /* Read last frame source if we are encoding first pass. */ 4952 if (cpi->pass == 1 && cm->current_video_frame > 0) 4953 { 4954 if((cpi->last_source = vp8_lookahead_peek(cpi->lookahead, 1, 4955 PEEK_BACKWARD)) == NULL) 4956 return -1; 4957 } 4958 4959 4960 if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush))) 4961 { 4962 cm->show_frame = 1; 4963 4964 cpi->is_src_frame_alt_ref = cpi->alt_ref_source 4965 && (cpi->source == cpi->alt_ref_source); 4966 4967 if(cpi->is_src_frame_alt_ref) 4968 cpi->alt_ref_source = NULL; 4969 } 4970 } 4971 4972 if (cpi->source) 4973 { 4974 cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img; 4975 cpi->un_scaled_source = cpi->Source; 4976 *time_stamp = cpi->source->ts_start; 4977 *time_end = cpi->source->ts_end; 4978 *frame_flags = cpi->source->flags; 4979 4980 if (cpi->pass == 1 && cm->current_video_frame > 0) 4981 { 4982 cpi->last_frame_unscaled_source = &cpi->last_source->img; 4983 } 4984 } 4985 else 4986 { 4987 *size = 0; 4988 #if !(CONFIG_REALTIME_ONLY) 4989 4990 if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done) 4991 { 4992 vp8_end_first_pass(cpi); /* get last stats packet */ 4993 cpi->twopass.first_pass_done = 1; 4994 } 4995 4996 #endif 4997 4998 #if HAVE_NEON 4999 #if CONFIG_RUNTIME_CPU_DETECT 5000 if (cm->cpu_caps & HAS_NEON) 5001 #endif 5002 { 5003 vp8_pop_neon(store_reg); 5004 } 5005 #endif 5006 return -1; 5007 } 5008 5009 if (cpi->source->ts_start < cpi->first_time_stamp_ever) 5010 { 5011 cpi->first_time_stamp_ever = cpi->source->ts_start; 5012 cpi->last_end_time_stamp_seen = cpi->source->ts_start; 5013 } 5014 5015 /* adjust frame rates based on timestamps given */ 5016 if (cm->show_frame) 5017 { 5018 int64_t this_duration; 5019 int step = 0; 5020 5021 if (cpi->source->ts_start == cpi->first_time_stamp_ever) 5022 { 5023 this_duration = cpi->source->ts_end - cpi->source->ts_start; 5024 step = 1; 5025 } 5026 else 5027 { 5028 int64_t last_duration; 5029 5030 this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen; 5031 last_duration = cpi->last_end_time_stamp_seen 5032 - cpi->last_time_stamp_seen; 5033 /* do a step update if the duration changes by 10% */ 5034 if (last_duration) 5035 step = (int)(((this_duration - last_duration) * 5036 10 / last_duration)); 5037 } 5038 5039 if (this_duration) 5040 { 5041 if (step) 5042 cpi->ref_framerate = 10000000.0 / this_duration; 5043 else 5044 { 5045 double avg_duration, interval; 5046 5047 /* Average this frame's rate into the last second's average 5048 * frame rate. If we haven't seen 1 second yet, then average 5049 * over the whole interval seen. 5050 */ 5051 interval = (double)(cpi->source->ts_end - 5052 cpi->first_time_stamp_ever); 5053 if(interval > 10000000.0) 5054 interval = 10000000; 5055 5056 avg_duration = 10000000.0 / cpi->ref_framerate; 5057 avg_duration *= (interval - avg_duration + this_duration); 5058 avg_duration /= interval; 5059 5060 cpi->ref_framerate = 10000000.0 / avg_duration; 5061 } 5062 5063 if (cpi->oxcf.number_of_layers > 1) 5064 { 5065 unsigned int i; 5066 5067 /* Update frame rates for each layer */ 5068 for (i=0; i<cpi->oxcf.number_of_layers; i++) 5069 { 5070 LAYER_CONTEXT *lc = &cpi->layer_context[i]; 5071 lc->framerate = cpi->ref_framerate / 5072 cpi->oxcf.rate_decimator[i]; 5073 } 5074 } 5075 else 5076 vp8_new_framerate(cpi, cpi->ref_framerate); 5077 } 5078 5079 cpi->last_time_stamp_seen = cpi->source->ts_start; 5080 cpi->last_end_time_stamp_seen = cpi->source->ts_end; 5081 } 5082 5083 if (cpi->oxcf.number_of_layers > 1) 5084 { 5085 int layer; 5086 5087 update_layer_contexts (cpi); 5088 5089 /* Restore layer specific context & set frame rate */ 5090 layer = cpi->oxcf.layer_id[ 5091 cpi->temporal_pattern_counter % cpi->oxcf.periodicity]; 5092 restore_layer_context (cpi, layer); 5093 vp8_new_framerate(cpi, cpi->layer_context[layer].framerate); 5094 } 5095 5096 if (cpi->compressor_speed == 2) 5097 { 5098 vpx_usec_timer_start(&tsctimer); 5099 vpx_usec_timer_start(&ticktimer); 5100 } 5101 5102 cpi->lf_zeromv_pct = (cpi->zeromv_count * 100)/cm->MBs; 5103 5104 #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING 5105 { 5106 int i; 5107 const int num_part = (1 << cm->multi_token_partition); 5108 /* the available bytes in dest */ 5109 const unsigned long dest_size = dest_end - dest; 5110 const int tok_part_buff_size = (dest_size * 9) / (10 * num_part); 5111 5112 unsigned char *dp = dest; 5113 5114 cpi->partition_d[0] = dp; 5115 dp += dest_size/10; /* reserve 1/10 for control partition */ 5116 cpi->partition_d_end[0] = dp; 5117 5118 for(i = 0; i < num_part; i++) 5119 { 5120 cpi->partition_d[i + 1] = dp; 5121 dp += tok_part_buff_size; 5122 cpi->partition_d_end[i + 1] = dp; 5123 } 5124 } 5125 #endif 5126 5127 /* start with a 0 size frame */ 5128 *size = 0; 5129 5130 /* Clear down mmx registers */ 5131 vp8_clear_system_state(); 5132 5133 cm->frame_type = INTER_FRAME; 5134 cm->frame_flags = *frame_flags; 5135 5136 #if 0 5137 5138 if (cm->refresh_alt_ref_frame) 5139 { 5140 cm->refresh_golden_frame = 0; 5141 cm->refresh_last_frame = 0; 5142 } 5143 else 5144 { 5145 cm->refresh_golden_frame = 0; 5146 cm->refresh_last_frame = 1; 5147 } 5148 5149 #endif 5150 /* find a free buffer for the new frame */ 5151 { 5152 int i = 0; 5153 for(; i < NUM_YV12_BUFFERS; i++) 5154 { 5155 if(!cm->yv12_fb[i].flags) 5156 { 5157 cm->new_fb_idx = i; 5158 break; 5159 } 5160 } 5161 5162 assert(i < NUM_YV12_BUFFERS ); 5163 } 5164 #if !(CONFIG_REALTIME_ONLY) 5165 5166 if (cpi->pass == 1) 5167 { 5168 Pass1Encode(cpi, size, dest, frame_flags); 5169 } 5170 else if (cpi->pass == 2) 5171 { 5172 Pass2Encode(cpi, size, dest, dest_end, frame_flags); 5173 } 5174 else 5175 #endif 5176 encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags); 5177 5178 if (cpi->compressor_speed == 2) 5179 { 5180 unsigned int duration, duration2; 5181 vpx_usec_timer_mark(&tsctimer); 5182 vpx_usec_timer_mark(&ticktimer); 5183 5184 duration = (int)(vpx_usec_timer_elapsed(&ticktimer)); 5185 duration2 = (unsigned int)((double)duration / 2); 5186 5187 if (cm->frame_type != KEY_FRAME) 5188 { 5189 if (cpi->avg_encode_time == 0) 5190 cpi->avg_encode_time = duration; 5191 else 5192 cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3; 5193 } 5194 5195 if (duration2) 5196 { 5197 { 5198 5199 if (cpi->avg_pick_mode_time == 0) 5200 cpi->avg_pick_mode_time = duration2; 5201 else 5202 cpi->avg_pick_mode_time = (7 * cpi->avg_pick_mode_time + duration2) >> 3; 5203 } 5204 } 5205 5206 } 5207 5208 if (cm->refresh_entropy_probs == 0) 5209 { 5210 vpx_memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc)); 5211 } 5212 5213 /* Save the contexts separately for alt ref, gold and last. */ 5214 /* (TODO jbb -> Optimize this with pointers to avoid extra copies. ) */ 5215 if(cm->refresh_alt_ref_frame) 5216 vpx_memcpy(&cpi->lfc_a, &cm->fc, sizeof(cm->fc)); 5217 5218 if(cm->refresh_golden_frame) 5219 vpx_memcpy(&cpi->lfc_g, &cm->fc, sizeof(cm->fc)); 5220 5221 if(cm->refresh_last_frame) 5222 vpx_memcpy(&cpi->lfc_n, &cm->fc, sizeof(cm->fc)); 5223 5224 /* if its a dropped frame honor the requests on subsequent frames */ 5225 if (*size > 0) 5226 { 5227 cpi->droppable = !frame_is_reference(cpi); 5228 5229 /* return to normal state */ 5230 cm->refresh_entropy_probs = 1; 5231 cm->refresh_alt_ref_frame = 0; 5232 cm->refresh_golden_frame = 0; 5233 cm->refresh_last_frame = 1; 5234 cm->frame_type = INTER_FRAME; 5235 5236 } 5237 5238 /* Save layer specific state */ 5239 if (cpi->oxcf.number_of_layers > 1) 5240 save_layer_context (cpi); 5241 5242 vpx_usec_timer_mark(&cmptimer); 5243 cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer); 5244 5245 if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame) 5246 { 5247 generate_psnr_packet(cpi); 5248 } 5249 5250 #if CONFIG_INTERNAL_STATS 5251 5252 if (cpi->pass != 1) 5253 { 5254 cpi->bytes += *size; 5255 5256 if (cm->show_frame) 5257 { 5258 cpi->common.show_frame_mi = cpi->common.mi; 5259 cpi->count ++; 5260 5261 if (cpi->b_calculate_psnr) 5262 { 5263 uint64_t ye,ue,ve; 5264 double frame_psnr; 5265 YV12_BUFFER_CONFIG *orig = cpi->Source; 5266 YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show; 5267 int y_samples = orig->y_height * orig->y_width ; 5268 int uv_samples = orig->uv_height * orig->uv_width ; 5269 int t_samples = y_samples + 2 * uv_samples; 5270 double sq_error, sq_error2; 5271 5272 ye = calc_plane_error(orig->y_buffer, orig->y_stride, 5273 recon->y_buffer, recon->y_stride, orig->y_width, orig->y_height); 5274 5275 ue = calc_plane_error(orig->u_buffer, orig->uv_stride, 5276 recon->u_buffer, recon->uv_stride, orig->uv_width, orig->uv_height); 5277 5278 ve = calc_plane_error(orig->v_buffer, orig->uv_stride, 5279 recon->v_buffer, recon->uv_stride, orig->uv_width, orig->uv_height); 5280 5281 sq_error = (double)(ye + ue + ve); 5282 5283 frame_psnr = vp8_mse2psnr(t_samples, 255.0, sq_error); 5284 5285 cpi->total_y += vp8_mse2psnr(y_samples, 255.0, (double)ye); 5286 cpi->total_u += vp8_mse2psnr(uv_samples, 255.0, (double)ue); 5287 cpi->total_v += vp8_mse2psnr(uv_samples, 255.0, (double)ve); 5288 cpi->total_sq_error += sq_error; 5289 cpi->total += frame_psnr; 5290 #if CONFIG_POSTPROC 5291 { 5292 YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer; 5293 double frame_psnr2, frame_ssim2 = 0; 5294 double weight = 0; 5295 5296 vp8_deblock(cm, cm->frame_to_show, &cm->post_proc_buffer, cm->filter_level * 10 / 6, 1, 0); 5297 vp8_clear_system_state(); 5298 5299 ye = calc_plane_error(orig->y_buffer, orig->y_stride, 5300 pp->y_buffer, pp->y_stride, orig->y_width, orig->y_height); 5301 5302 ue = calc_plane_error(orig->u_buffer, orig->uv_stride, 5303 pp->u_buffer, pp->uv_stride, orig->uv_width, orig->uv_height); 5304 5305 ve = calc_plane_error(orig->v_buffer, orig->uv_stride, 5306 pp->v_buffer, pp->uv_stride, orig->uv_width, orig->uv_height); 5307 5308 sq_error2 = (double)(ye + ue + ve); 5309 5310 frame_psnr2 = vp8_mse2psnr(t_samples, 255.0, sq_error2); 5311 5312 cpi->totalp_y += vp8_mse2psnr(y_samples, 5313 255.0, (double)ye); 5314 cpi->totalp_u += vp8_mse2psnr(uv_samples, 5315 255.0, (double)ue); 5316 cpi->totalp_v += vp8_mse2psnr(uv_samples, 5317 255.0, (double)ve); 5318 cpi->total_sq_error2 += sq_error2; 5319 cpi->totalp += frame_psnr2; 5320 5321 frame_ssim2 = vp8_calc_ssim(cpi->Source, 5322 &cm->post_proc_buffer, 1, &weight); 5323 5324 cpi->summed_quality += frame_ssim2 * weight; 5325 cpi->summed_weights += weight; 5326 5327 if (cpi->oxcf.number_of_layers > 1) 5328 { 5329 unsigned int i; 5330 5331 for (i=cpi->current_layer; 5332 i<cpi->oxcf.number_of_layers; i++) 5333 { 5334 cpi->frames_in_layer[i]++; 5335 5336 cpi->bytes_in_layer[i] += *size; 5337 cpi->sum_psnr[i] += frame_psnr; 5338 cpi->sum_psnr_p[i] += frame_psnr2; 5339 cpi->total_error2[i] += sq_error; 5340 cpi->total_error2_p[i] += sq_error2; 5341 cpi->sum_ssim[i] += frame_ssim2 * weight; 5342 cpi->sum_weights[i] += weight; 5343 } 5344 } 5345 } 5346 #endif 5347 } 5348 5349 if (cpi->b_calculate_ssimg) 5350 { 5351 double y, u, v, frame_all; 5352 frame_all = vp8_calc_ssimg(cpi->Source, cm->frame_to_show, 5353 &y, &u, &v); 5354 5355 if (cpi->oxcf.number_of_layers > 1) 5356 { 5357 unsigned int i; 5358 5359 for (i=cpi->current_layer; 5360 i<cpi->oxcf.number_of_layers; i++) 5361 { 5362 if (!cpi->b_calculate_psnr) 5363 cpi->frames_in_layer[i]++; 5364 5365 cpi->total_ssimg_y_in_layer[i] += y; 5366 cpi->total_ssimg_u_in_layer[i] += u; 5367 cpi->total_ssimg_v_in_layer[i] += v; 5368 cpi->total_ssimg_all_in_layer[i] += frame_all; 5369 } 5370 } 5371 else 5372 { 5373 cpi->total_ssimg_y += y; 5374 cpi->total_ssimg_u += u; 5375 cpi->total_ssimg_v += v; 5376 cpi->total_ssimg_all += frame_all; 5377 } 5378 } 5379 5380 } 5381 } 5382 5383 #if 0 5384 5385 if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q) 5386 { 5387 skiptruecount += cpi->skip_true_count; 5388 skipfalsecount += cpi->skip_false_count; 5389 } 5390 5391 #endif 5392 #if 0 5393 5394 if (cpi->pass != 1) 5395 { 5396 FILE *f = fopen("skip.stt", "a"); 5397 fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size); 5398 5399 if (cpi->is_src_frame_alt_ref == 1) 5400 fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size); 5401 5402 fclose(f); 5403 } 5404 5405 #endif 5406 #endif 5407 5408 #if HAVE_NEON 5409 #if CONFIG_RUNTIME_CPU_DETECT 5410 if (cm->cpu_caps & HAS_NEON) 5411 #endif 5412 { 5413 vp8_pop_neon(store_reg); 5414 } 5415 #endif 5416 5417 cpi->common.error.setjmp = 0; 5418 5419 return 0; 5420 } 5421 5422 int vp8_get_preview_raw_frame(VP8_COMP *cpi, YV12_BUFFER_CONFIG *dest, vp8_ppflags_t *flags) 5423 { 5424 if (cpi->common.refresh_alt_ref_frame) 5425 return -1; 5426 else 5427 { 5428 int ret; 5429 5430 #if CONFIG_MULTITHREAD 5431 if(cpi->b_lpf_running) 5432 { 5433 sem_wait(&cpi->h_event_end_lpf); 5434 cpi->b_lpf_running = 0; 5435 } 5436 #endif 5437 5438 #if CONFIG_POSTPROC 5439 cpi->common.show_frame_mi = cpi->common.mi; 5440 ret = vp8_post_proc_frame(&cpi->common, dest, flags); 5441 #else 5442 5443 if (cpi->common.frame_to_show) 5444 { 5445 *dest = *cpi->common.frame_to_show; 5446 dest->y_width = cpi->common.Width; 5447 dest->y_height = cpi->common.Height; 5448 dest->uv_height = cpi->common.Height / 2; 5449 ret = 0; 5450 } 5451 else 5452 { 5453 ret = -1; 5454 } 5455 5456 #endif 5457 vp8_clear_system_state(); 5458 return ret; 5459 } 5460 } 5461 5462 int vp8_set_roimap(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols, int delta_q[4], int delta_lf[4], unsigned int threshold[4]) 5463 { 5464 signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS]; 5465 int internal_delta_q[MAX_MB_SEGMENTS]; 5466 const int range = 63; 5467 int i; 5468 5469 // This method is currently incompatible with the cyclic refresh method 5470 if ( cpi->cyclic_refresh_mode_enabled ) 5471 return -1; 5472 5473 // Check number of rows and columns match 5474 if (cpi->common.mb_rows != rows || cpi->common.mb_cols != cols) 5475 return -1; 5476 5477 // Range check the delta Q values and convert the external Q range values 5478 // to internal ones. 5479 if ( (abs(delta_q[0]) > range) || (abs(delta_q[1]) > range) || 5480 (abs(delta_q[2]) > range) || (abs(delta_q[3]) > range) ) 5481 return -1; 5482 5483 // Range check the delta lf values 5484 if ( (abs(delta_lf[0]) > range) || (abs(delta_lf[1]) > range) || 5485 (abs(delta_lf[2]) > range) || (abs(delta_lf[3]) > range) ) 5486 return -1; 5487 5488 if (!map) 5489 { 5490 disable_segmentation(cpi); 5491 return 0; 5492 } 5493 5494 // Translate the external delta q values to internal values. 5495 for ( i = 0; i < MAX_MB_SEGMENTS; i++ ) 5496 internal_delta_q[i] = 5497 ( delta_q[i] >= 0 ) ? q_trans[delta_q[i]] : -q_trans[-delta_q[i]]; 5498 5499 /* Set the segmentation Map */ 5500 set_segmentation_map(cpi, map); 5501 5502 /* Activate segmentation. */ 5503 enable_segmentation(cpi); 5504 5505 /* Set up the quant segment data */ 5506 feature_data[MB_LVL_ALT_Q][0] = internal_delta_q[0]; 5507 feature_data[MB_LVL_ALT_Q][1] = internal_delta_q[1]; 5508 feature_data[MB_LVL_ALT_Q][2] = internal_delta_q[2]; 5509 feature_data[MB_LVL_ALT_Q][3] = internal_delta_q[3]; 5510 5511 /* Set up the loop segment data s */ 5512 feature_data[MB_LVL_ALT_LF][0] = delta_lf[0]; 5513 feature_data[MB_LVL_ALT_LF][1] = delta_lf[1]; 5514 feature_data[MB_LVL_ALT_LF][2] = delta_lf[2]; 5515 feature_data[MB_LVL_ALT_LF][3] = delta_lf[3]; 5516 5517 cpi->segment_encode_breakout[0] = threshold[0]; 5518 cpi->segment_encode_breakout[1] = threshold[1]; 5519 cpi->segment_encode_breakout[2] = threshold[2]; 5520 cpi->segment_encode_breakout[3] = threshold[3]; 5521 5522 /* Initialise the feature data structure */ 5523 set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA); 5524 5525 return 0; 5526 } 5527 5528 int vp8_set_active_map(VP8_COMP *cpi, unsigned char *map, unsigned int rows, unsigned int cols) 5529 { 5530 if (rows == cpi->common.mb_rows && cols == cpi->common.mb_cols) 5531 { 5532 if (map) 5533 { 5534 vpx_memcpy(cpi->active_map, map, rows * cols); 5535 cpi->active_map_enabled = 1; 5536 } 5537 else 5538 cpi->active_map_enabled = 0; 5539 5540 return 0; 5541 } 5542 else 5543 { 5544 return -1 ; 5545 } 5546 } 5547 5548 int vp8_set_internal_size(VP8_COMP *cpi, VPX_SCALING horiz_mode, VPX_SCALING vert_mode) 5549 { 5550 if (horiz_mode <= ONETWO) 5551 cpi->common.horiz_scale = horiz_mode; 5552 else 5553 return -1; 5554 5555 if (vert_mode <= ONETWO) 5556 cpi->common.vert_scale = vert_mode; 5557 else 5558 return -1; 5559 5560 return 0; 5561 } 5562 5563 5564 5565 int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest) 5566 { 5567 int i, j; 5568 int Total = 0; 5569 5570 unsigned char *src = source->y_buffer; 5571 unsigned char *dst = dest->y_buffer; 5572 5573 /* Loop through the Y plane raw and reconstruction data summing 5574 * (square differences) 5575 */ 5576 for (i = 0; i < source->y_height; i += 16) 5577 { 5578 for (j = 0; j < source->y_width; j += 16) 5579 { 5580 unsigned int sse; 5581 Total += vp8_mse16x16(src + j, source->y_stride, dst + j, dest->y_stride, &sse); 5582 } 5583 5584 src += 16 * source->y_stride; 5585 dst += 16 * dest->y_stride; 5586 } 5587 5588 return Total; 5589 } 5590 5591 5592 int vp8_get_quantizer(VP8_COMP *cpi) 5593 { 5594 return cpi->common.base_qindex; 5595 } 5596