Home | History | Annotate | Download | only in unit_test
      1 /*
      2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
      3  *
      4  *  Use of this source code is governed by a BSD-style license
      5  *  that can be found in the LICENSE file in the root of the source
      6  *  tree. An additional intellectual property rights grant can be found
      7  *  in the file PATENTS.  All contributing project authors may
      8  *  be found in the AUTHORS file in the root of the source tree.
      9  */
     10 
     11 #include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
     12 
     13 #include <string>
     14 
     15 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
     16 #include "webrtc/system_wrappers/interface/tick_util.h"
     17 #include "webrtc/test/testsupport/fileutils.h"
     18 
     19 namespace webrtc {
     20 
     21 // The |sourceFrame| is scaled to |targetwidth_|,|targetheight_|, using the
     22 // filter mode set to |mode|. The |expected_psnr| is used to verify basic
     23 // quality when the resampled frame is scaled back up/down to the
     24 // original/source size. |expected_psnr| is set to be  ~0.1/0.05dB lower than
     25 // actual PSNR verified under the same conditions.
     26 void TestSize(const I420VideoFrame& sourceFrame, int targetwidth_,
     27               int targetheight_, int mode, double expected_psnr,
     28               VideoProcessingModule* vpm);
     29 bool CompareFrames(const webrtc::I420VideoFrame& frame1,
     30                   const webrtc::I420VideoFrame& frame2);
     31 
     32 VideoProcessingModuleTest::VideoProcessingModuleTest()
     33     : vpm_(NULL),
     34       source_file_(NULL),
     35       width_(352),
     36       half_width_((width_ + 1) / 2),
     37       height_(288),
     38       size_y_(width_ * height_),
     39       size_uv_(half_width_ * ((height_ + 1) / 2)),
     40       frame_length_(CalcBufferSize(kI420, width_, height_)) {}
     41 
     42 void VideoProcessingModuleTest::SetUp() {
     43   vpm_ = VideoProcessingModule::Create(0);
     44   ASSERT_TRUE(vpm_ != NULL);
     45 
     46   ASSERT_EQ(0, video_frame_.CreateEmptyFrame(width_, height_, width_,
     47                                             half_width_, half_width_));
     48   // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
     49   memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
     50   memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
     51   memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane));
     52   const std::string video_file =
     53       webrtc::test::ResourcePath("foreman_cif", "yuv");
     54   source_file_  = fopen(video_file.c_str(),"rb");
     55   ASSERT_TRUE(source_file_ != NULL) <<
     56       "Cannot read source file: " + video_file + "\n";
     57 }
     58 
     59 void VideoProcessingModuleTest::TearDown() {
     60   if (source_file_ != NULL)  {
     61     ASSERT_EQ(0, fclose(source_file_));
     62   }
     63   source_file_ = NULL;
     64 
     65   if (vpm_ != NULL)  {
     66     VideoProcessingModule::Destroy(vpm_);
     67   }
     68   vpm_ = NULL;
     69 }
     70 
     71 TEST_F(VideoProcessingModuleTest, HandleNullBuffer) {
     72   // TODO(mikhal/stefan): Do we need this one?
     73   VideoProcessingModule::FrameStats stats;
     74   // Video frame with unallocated buffer.
     75   I420VideoFrame videoFrame;
     76   videoFrame.set_width(width_);
     77   videoFrame.set_height(height_);
     78 
     79   EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, videoFrame));
     80 
     81   EXPECT_EQ(-1, vpm_->ColorEnhancement(&videoFrame));
     82 
     83   EXPECT_EQ(-1, vpm_->Deflickering(&videoFrame, &stats));
     84 
     85   EXPECT_EQ(-3, vpm_->BrightnessDetection(videoFrame, stats));
     86 }
     87 
     88 TEST_F(VideoProcessingModuleTest, HandleBadStats) {
     89   VideoProcessingModule::FrameStats stats;
     90   scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
     91   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
     92                                  source_file_));
     93   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
     94                              width_, height_,
     95                              0, kRotateNone, &video_frame_));
     96 
     97   EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats));
     98 
     99   EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats));
    100 }
    101 
    102 TEST_F(VideoProcessingModuleTest, HandleBadSize) {
    103   VideoProcessingModule::FrameStats stats;
    104 
    105   video_frame_.ResetSize();
    106   video_frame_.set_width(width_);
    107   video_frame_.set_height(0);
    108   EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, video_frame_));
    109 
    110   EXPECT_EQ(-1, vpm_->ColorEnhancement(&video_frame_));
    111 
    112   EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats));
    113 
    114   EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats));
    115 
    116   EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->SetTargetResolution(0,0,0));
    117 
    118   I420VideoFrame *out_frame = NULL;
    119   EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->PreprocessFrame(video_frame_,
    120                                                        &out_frame));
    121 }
    122 
    123 TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) {
    124   I420VideoFrame video_frame2;
    125   VideoProcessingModule::FrameStats stats;
    126   // Only testing non-static functions here.
    127   scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
    128   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    129                                 source_file_));
    130   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    131                              width_, height_,
    132                              0, kRotateNone, &video_frame_));
    133   ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
    134   ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_));
    135   ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats));
    136   vpm_->Reset();
    137   // Retrieve frame stats again in case Deflickering() has zeroed them.
    138   ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame2));
    139   ASSERT_EQ(0, vpm_->Deflickering(&video_frame2, &stats));
    140   EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
    141 
    142   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    143                                  source_file_));
    144   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    145                              width_, height_,
    146                              0, kRotateNone, &video_frame_));
    147   ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
    148   video_frame2.CopyFrame(video_frame_);
    149   ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats));
    150   vpm_->Reset();
    151   ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame2, stats));
    152   EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
    153 }
    154 
    155 TEST_F(VideoProcessingModuleTest, FrameStats) {
    156   VideoProcessingModule::FrameStats stats;
    157   scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
    158   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    159                                  source_file_));
    160   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    161                              width_, height_,
    162                              0, kRotateNone, &video_frame_));
    163 
    164   EXPECT_FALSE(vpm_->ValidFrameStats(stats));
    165   EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
    166   EXPECT_TRUE(vpm_->ValidFrameStats(stats));
    167 
    168   printf("\nFrameStats\n");
    169   printf("mean: %u\nnum_pixels: %u\nsubSamplWidth: "
    170          "%u\nsumSamplHeight: %u\nsum: %u\n\n",
    171          static_cast<unsigned int>(stats.mean),
    172          static_cast<unsigned int>(stats.num_pixels),
    173          static_cast<unsigned int>(stats.subSamplHeight),
    174          static_cast<unsigned int>(stats.subSamplWidth),
    175          static_cast<unsigned int>(stats.sum));
    176 
    177   vpm_->ClearFrameStats(&stats);
    178   EXPECT_FALSE(vpm_->ValidFrameStats(stats));
    179 }
    180 
    181 TEST_F(VideoProcessingModuleTest, PreprocessorLogic) {
    182   // Disable temporal sampling (frame dropping).
    183   vpm_->EnableTemporalDecimation(false);
    184   int resolution = 100;
    185   EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 15));
    186   EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
    187   // Disable spatial sampling.
    188   vpm_->SetInputFrameResampleMode(kNoRescaling);
    189   EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
    190   I420VideoFrame* out_frame = NULL;
    191   // Set rescaling => output frame != NULL.
    192   vpm_->SetInputFrameResampleMode(kFastRescaling);
    193   EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
    194   EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame));
    195   EXPECT_FALSE(out_frame == NULL);
    196   if (out_frame) {
    197     EXPECT_EQ(resolution, out_frame->width());
    198     EXPECT_EQ(resolution, out_frame->height());
    199   }
    200   // No rescaling=> output frame = NULL.
    201   vpm_->SetInputFrameResampleMode(kNoRescaling);
    202   EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame));
    203   EXPECT_TRUE(out_frame == NULL);
    204 }
    205 
    206 TEST_F(VideoProcessingModuleTest, Resampler) {
    207   enum { NumRuns = 1 };
    208 
    209   int64_t min_runtime = 0;
    210   int64_t avg_runtime = 0;
    211 
    212   TickTime t0;
    213   TickTime t1;
    214   TickInterval acc_ticks;
    215 
    216   rewind(source_file_);
    217   ASSERT_TRUE(source_file_ != NULL) <<
    218       "Cannot read input file \n";
    219 
    220   // CA not needed here
    221   vpm_->EnableContentAnalysis(false);
    222   // no temporal decimation
    223   vpm_->EnableTemporalDecimation(false);
    224 
    225   // Reading test frame
    226   scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
    227   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    228                                  source_file_));
    229   // Using ConvertToI420 to add stride to the image.
    230   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    231                              width_, height_,
    232                              0, kRotateNone, &video_frame_));
    233 
    234   for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
    235     // Initiate test timer.
    236     t0 = TickTime::Now();
    237 
    238     // Init the sourceFrame with a timestamp.
    239     video_frame_.set_render_time_ms(t0.MillisecondTimestamp());
    240     video_frame_.set_timestamp(t0.MillisecondTimestamp() * 90);
    241 
    242     // Test scaling to different sizes: source is of |width|/|height| = 352/288.
    243     // Scaling mode in VPM is currently fixed to kScaleBox (mode = 3).
    244     TestSize(video_frame_, 100, 50, 3, 24.0, vpm_);
    245     TestSize(video_frame_, 352/4, 288/4, 3, 25.2, vpm_);
    246     TestSize(video_frame_, 352/2, 288/2, 3, 28.1, vpm_);
    247     TestSize(video_frame_, 352, 288, 3, -1, vpm_);  // no resampling.
    248     TestSize(video_frame_, 2*352, 2*288, 3, 32.2, vpm_);
    249     TestSize(video_frame_, 400, 256, 3, 31.3, vpm_);
    250     TestSize(video_frame_, 480, 640, 3, 32.15, vpm_);
    251     TestSize(video_frame_, 960, 720, 3, 32.2, vpm_);
    252     TestSize(video_frame_, 1280, 720, 3, 32.15, vpm_);
    253     // Upsampling to odd size.
    254     TestSize(video_frame_, 501, 333, 3, 32.05, vpm_);
    255     // Downsample to odd size.
    256     TestSize(video_frame_, 281, 175, 3, 29.3, vpm_);
    257 
    258     // stop timer
    259     t1 = TickTime::Now();
    260     acc_ticks += (t1 - t0);
    261 
    262     if (acc_ticks.Microseconds() < min_runtime || run_idx == 0)  {
    263       min_runtime = acc_ticks.Microseconds();
    264     }
    265     avg_runtime += acc_ticks.Microseconds();
    266   }
    267 
    268   printf("\nAverage run time = %d us / frame\n",
    269          //static_cast<int>(avg_runtime / frameNum / NumRuns));
    270          static_cast<int>(avg_runtime));
    271   printf("Min run time = %d us / frame\n\n",
    272          //static_cast<int>(min_runtime / frameNum));
    273          static_cast<int>(min_runtime));
    274 }
    275 
    276 void TestSize(const I420VideoFrame& source_frame, int targetwidth_,
    277               int targetheight_, int mode, double expected_psnr,
    278               VideoProcessingModule* vpm) {
    279   int sourcewidth_ = source_frame.width();
    280   int sourceheight_ = source_frame.height();
    281   I420VideoFrame* out_frame = NULL;
    282 
    283   ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(targetwidth_, targetheight_, 30));
    284   ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame));
    285 
    286   if (out_frame) {
    287     EXPECT_EQ(source_frame.render_time_ms(), out_frame->render_time_ms());
    288     EXPECT_EQ(source_frame.timestamp(), out_frame->timestamp());
    289   }
    290 
    291   // If the frame was resampled (scale changed) then:
    292   // (1) verify the new size and write out processed frame for viewing.
    293   // (2) scale the resampled frame (|out_frame|) back to the original size and
    294   // compute PSNR relative to |source_frame| (for automatic verification).
    295   // (3) write out the processed frame for viewing.
    296   if (targetwidth_ != static_cast<int>(sourcewidth_) ||
    297       targetheight_ != static_cast<int>(sourceheight_))  {
    298     // Write the processed frame to file for visual inspection.
    299     std::ostringstream filename;
    300     filename << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
    301         "from_" << sourcewidth_ << "x" << sourceheight_ << "_to_" <<
    302         targetwidth_ << "x" << targetheight_ << "_30Hz_P420.yuv";
    303     std::cout << "Watch " << filename.str() << " and verify that it is okay."
    304         << std::endl;
    305     FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
    306     if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) {
    307       fprintf(stderr, "Failed to write frame for scaling to width/height: "
    308           " %d %d \n", targetwidth_, targetheight_);
    309       return;
    310     }
    311     fclose(stand_alone_file);
    312 
    313     I420VideoFrame resampled_source_frame;
    314     resampled_source_frame.CopyFrame(*out_frame);
    315 
    316     // Scale |resampled_source_frame| back to original/source size.
    317     ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(sourcewidth_,
    318                                                sourceheight_,
    319                                                30));
    320     ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(resampled_source_frame,
    321                                            &out_frame));
    322 
    323     // Write the processed frame to file for visual inspection.
    324     std::ostringstream filename2;
    325     filename2 << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
    326           "from_" << targetwidth_ << "x" << targetheight_ << "_to_" <<
    327           sourcewidth_ << "x" << sourceheight_ << "_30Hz_P420.yuv";
    328     std::cout << "Watch " << filename2.str() << " and verify that it is okay."
    329                 << std::endl;
    330     stand_alone_file = fopen(filename2.str().c_str(), "wb");
    331     if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) {
    332       fprintf(stderr, "Failed to write frame for scaling to width/height "
    333               "%d %d \n", sourcewidth_, sourceheight_);
    334       return;
    335     }
    336     fclose(stand_alone_file);
    337 
    338     // Compute the PSNR and check expectation.
    339     double psnr = I420PSNR(&source_frame, out_frame);
    340     EXPECT_GT(psnr, expected_psnr);
    341     printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
    342         "source which is scaled down/up to: %d %d, and back to source size \n",
    343         psnr, sourcewidth_, sourceheight_, targetwidth_, targetheight_);
    344   }
    345 }
    346 
    347 bool CompareFrames(const webrtc::I420VideoFrame& frame1,
    348                    const webrtc::I420VideoFrame& frame2) {
    349   for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
    350     webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
    351     int allocated_size1 = frame1.allocated_size(plane_type);
    352     int allocated_size2 = frame2.allocated_size(plane_type);
    353     if (allocated_size1 != allocated_size2)
    354       return false;
    355     const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
    356     const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
    357     if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
    358       return false;
    359   }
    360   return true;
    361 }
    362 
    363 }  // namespace webrtc
    364