Home | History | Annotate | Download | only in unit_test
      1 /*
      2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
      3  *
      4  *  Use of this source code is governed by a BSD-style license
      5  *  that can be found in the LICENSE file in the root of the source
      6  *  tree. An additional intellectual property rights grant can be found
      7  *  in the file PATENTS.  All contributing project authors may
      8  *  be found in the AUTHORS file in the root of the source tree.
      9  */
     10 
     11 #include "webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h"
     12 
     13 #include <string>
     14 
     15 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
     16 #include "webrtc/system_wrappers/interface/tick_util.h"
     17 #include "webrtc/test/testsupport/fileutils.h"
     18 
     19 namespace webrtc {
     20 
     21 // The |sourceFrame| is scaled to |targetwidth_|,|targetheight_|, using the
     22 // filter mode set to |mode|. The |expected_psnr| is used to verify basic
     23 // quality when the resampled frame is scaled back up/down to the
     24 // original/source size. |expected_psnr| is set to be  ~0.1/0.05dB lower than
     25 // actual PSNR verified under the same conditions.
     26 void TestSize(const I420VideoFrame& sourceFrame, int targetwidth_,
     27               int targetheight_, int mode, double expected_psnr,
     28               VideoProcessingModule* vpm);
     29 bool CompareFrames(const webrtc::I420VideoFrame& frame1,
     30                   const webrtc::I420VideoFrame& frame2);
     31 
     32 VideoProcessingModuleTest::VideoProcessingModuleTest()
     33     : vpm_(NULL),
     34       source_file_(NULL),
     35       width_(352),
     36       half_width_((width_ + 1) / 2),
     37       height_(288),
     38       size_y_(width_ * height_),
     39       size_uv_(half_width_ * ((height_ + 1) / 2)),
     40       frame_length_(CalcBufferSize(kI420, width_, height_)) {}
     41 
     42 void VideoProcessingModuleTest::SetUp() {
     43   vpm_ = VideoProcessingModule::Create(0);
     44   ASSERT_TRUE(vpm_ != NULL);
     45 
     46   ASSERT_EQ(0, video_frame_.CreateEmptyFrame(width_, height_, width_,
     47                                             half_width_, half_width_));
     48   // Clear video frame so DrMemory/Valgrind will allow reads of the buffer.
     49   memset(video_frame_.buffer(kYPlane), 0, video_frame_.allocated_size(kYPlane));
     50   memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane));
     51   memset(video_frame_.buffer(kVPlane), 0, video_frame_.allocated_size(kVPlane));
     52   const std::string video_file =
     53       webrtc::test::ResourcePath("foreman_cif", "yuv");
     54   source_file_  = fopen(video_file.c_str(),"rb");
     55   ASSERT_TRUE(source_file_ != NULL) <<
     56       "Cannot read source file: " + video_file + "\n";
     57 }
     58 
     59 void VideoProcessingModuleTest::TearDown() {
     60   if (source_file_ != NULL)  {
     61     ASSERT_EQ(0, fclose(source_file_));
     62   }
     63   source_file_ = NULL;
     64 
     65   if (vpm_ != NULL)  {
     66     VideoProcessingModule::Destroy(vpm_);
     67   }
     68   vpm_ = NULL;
     69 }
     70 
     71 TEST_F(VideoProcessingModuleTest, HandleNullBuffer) {
     72   // TODO(mikhal/stefan): Do we need this one?
     73   VideoProcessingModule::FrameStats stats;
     74   // Video frame with unallocated buffer.
     75   I420VideoFrame videoFrame;
     76   videoFrame.set_width(width_);
     77   videoFrame.set_height(height_);
     78 
     79   EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, videoFrame));
     80 
     81   EXPECT_EQ(-1, vpm_->ColorEnhancement(&videoFrame));
     82 
     83   EXPECT_EQ(-1, vpm_->Deflickering(&videoFrame, &stats));
     84 
     85   EXPECT_EQ(-1, vpm_->Denoising(&videoFrame));
     86 
     87   EXPECT_EQ(-3, vpm_->BrightnessDetection(videoFrame, stats));
     88 }
     89 
     90 TEST_F(VideoProcessingModuleTest, HandleBadStats) {
     91   VideoProcessingModule::FrameStats stats;
     92   scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
     93   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
     94                                  source_file_));
     95   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
     96                              width_, height_,
     97                              0, kRotateNone, &video_frame_));
     98 
     99   EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats));
    100 
    101   EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats));
    102 }
    103 
    104 TEST_F(VideoProcessingModuleTest, HandleBadSize) {
    105   VideoProcessingModule::FrameStats stats;
    106 
    107   video_frame_.ResetSize();
    108   video_frame_.set_width(width_);
    109   video_frame_.set_height(0);
    110   EXPECT_EQ(-3, vpm_->GetFrameStats(&stats, video_frame_));
    111 
    112   EXPECT_EQ(-1, vpm_->ColorEnhancement(&video_frame_));
    113 
    114   EXPECT_EQ(-1, vpm_->Deflickering(&video_frame_, &stats));
    115 
    116   EXPECT_EQ(-1, vpm_->Denoising(&video_frame_));
    117 
    118   EXPECT_EQ(-3, vpm_->BrightnessDetection(video_frame_, stats));
    119 
    120   EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->SetTargetResolution(0,0,0));
    121 
    122   I420VideoFrame *out_frame = NULL;
    123   EXPECT_EQ(VPM_PARAMETER_ERROR, vpm_->PreprocessFrame(video_frame_,
    124                                                        &out_frame));
    125 }
    126 
    127 TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset) {
    128   I420VideoFrame video_frame2;
    129   VideoProcessingModule::FrameStats stats;
    130   // Only testing non-static functions here.
    131   scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
    132   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    133                                 source_file_));
    134   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    135                              width_, height_,
    136                              0, kRotateNone, &video_frame_));
    137   ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
    138   ASSERT_EQ(0, video_frame2.CopyFrame(video_frame_));
    139   ASSERT_EQ(0, vpm_->Deflickering(&video_frame_, &stats));
    140   vpm_->Reset();
    141   // Retrieve frame stats again in case Deflickering() has zeroed them.
    142   ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame2));
    143   ASSERT_EQ(0, vpm_->Deflickering(&video_frame2, &stats));
    144   EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
    145 
    146   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    147                                  source_file_));
    148   // Using ConvertToI420 to add stride to the image.
    149   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    150                              width_, height_,
    151                              0, kRotateNone, &video_frame_));
    152   video_frame2.CopyFrame(video_frame_);
    153   EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
    154   ASSERT_GE(vpm_->Denoising(&video_frame_), 0);
    155   vpm_->Reset();
    156   ASSERT_GE(vpm_->Denoising(&video_frame2), 0);
    157   EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
    158 
    159   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    160                                  source_file_));
    161   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    162                              width_, height_,
    163                              0, kRotateNone, &video_frame_));
    164   ASSERT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
    165   video_frame2.CopyFrame(video_frame_);
    166   ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame_, stats));
    167   vpm_->Reset();
    168   ASSERT_EQ(0, vpm_->BrightnessDetection(video_frame2, stats));
    169   EXPECT_TRUE(CompareFrames(video_frame_, video_frame2));
    170 }
    171 
    172 TEST_F(VideoProcessingModuleTest, FrameStats) {
    173   VideoProcessingModule::FrameStats stats;
    174   scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
    175   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    176                                  source_file_));
    177   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    178                              width_, height_,
    179                              0, kRotateNone, &video_frame_));
    180 
    181   EXPECT_FALSE(vpm_->ValidFrameStats(stats));
    182   EXPECT_EQ(0, vpm_->GetFrameStats(&stats, video_frame_));
    183   EXPECT_TRUE(vpm_->ValidFrameStats(stats));
    184 
    185   printf("\nFrameStats\n");
    186   printf("mean: %u\nnum_pixels: %u\nsubSamplWidth: "
    187          "%u\nsumSamplHeight: %u\nsum: %u\n\n",
    188          static_cast<unsigned int>(stats.mean),
    189          static_cast<unsigned int>(stats.num_pixels),
    190          static_cast<unsigned int>(stats.subSamplHeight),
    191          static_cast<unsigned int>(stats.subSamplWidth),
    192          static_cast<unsigned int>(stats.sum));
    193 
    194   vpm_->ClearFrameStats(&stats);
    195   EXPECT_FALSE(vpm_->ValidFrameStats(stats));
    196 }
    197 
    198 TEST_F(VideoProcessingModuleTest, PreprocessorLogic) {
    199   // Disable temporal sampling (frame dropping).
    200   vpm_->EnableTemporalDecimation(false);
    201   int resolution = 100;
    202   EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 15));
    203   EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
    204   // Disable spatial sampling.
    205   vpm_->SetInputFrameResampleMode(kNoRescaling);
    206   EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
    207   I420VideoFrame* out_frame = NULL;
    208   // Set rescaling => output frame != NULL.
    209   vpm_->SetInputFrameResampleMode(kFastRescaling);
    210   EXPECT_EQ(VPM_OK, vpm_->SetTargetResolution(resolution, resolution, 30));
    211   EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame));
    212   EXPECT_FALSE(out_frame == NULL);
    213   if (out_frame) {
    214     EXPECT_EQ(resolution, out_frame->width());
    215     EXPECT_EQ(resolution, out_frame->height());
    216   }
    217   // No rescaling=> output frame = NULL.
    218   vpm_->SetInputFrameResampleMode(kNoRescaling);
    219   EXPECT_EQ(VPM_OK, vpm_->PreprocessFrame(video_frame_, &out_frame));
    220   EXPECT_TRUE(out_frame == NULL);
    221 }
    222 
    223 TEST_F(VideoProcessingModuleTest, Resampler) {
    224   enum { NumRuns = 1 };
    225 
    226   int64_t min_runtime = 0;
    227   int64_t avg_runtime = 0;
    228 
    229   TickTime t0;
    230   TickTime t1;
    231   TickInterval acc_ticks;
    232 
    233   rewind(source_file_);
    234   ASSERT_TRUE(source_file_ != NULL) <<
    235       "Cannot read input file \n";
    236 
    237   // CA not needed here
    238   vpm_->EnableContentAnalysis(false);
    239   // no temporal decimation
    240   vpm_->EnableTemporalDecimation(false);
    241 
    242   // Reading test frame
    243   scoped_ptr<uint8_t[]> video_buffer(new uint8_t[frame_length_]);
    244   ASSERT_EQ(frame_length_, fread(video_buffer.get(), 1, frame_length_,
    245                                  source_file_));
    246   // Using ConvertToI420 to add stride to the image.
    247   EXPECT_EQ(0, ConvertToI420(kI420, video_buffer.get(), 0, 0,
    248                              width_, height_,
    249                              0, kRotateNone, &video_frame_));
    250 
    251   for (uint32_t run_idx = 0; run_idx < NumRuns; run_idx++) {
    252     // Initiate test timer.
    253     t0 = TickTime::Now();
    254 
    255     // Init the sourceFrame with a timestamp.
    256     video_frame_.set_render_time_ms(t0.MillisecondTimestamp());
    257     video_frame_.set_timestamp(t0.MillisecondTimestamp() * 90);
    258 
    259     // Test scaling to different sizes: source is of |width|/|height| = 352/288.
    260     // Scaling mode in VPM is currently fixed to kScaleBox (mode = 3).
    261     TestSize(video_frame_, 100, 50, 3, 24.0, vpm_);
    262     TestSize(video_frame_, 352/4, 288/4, 3, 25.2, vpm_);
    263     TestSize(video_frame_, 352/2, 288/2, 3, 28.1, vpm_);
    264     TestSize(video_frame_, 352, 288, 3, -1, vpm_);  // no resampling.
    265     TestSize(video_frame_, 2*352, 2*288, 3, 32.2, vpm_);
    266     TestSize(video_frame_, 400, 256, 3, 31.3, vpm_);
    267     TestSize(video_frame_, 480, 640, 3, 32.15, vpm_);
    268     TestSize(video_frame_, 960, 720, 3, 32.2, vpm_);
    269     TestSize(video_frame_, 1280, 720, 3, 32.15, vpm_);
    270     // Upsampling to odd size.
    271     TestSize(video_frame_, 501, 333, 3, 32.05, vpm_);
    272     // Downsample to odd size.
    273     TestSize(video_frame_, 281, 175, 3, 29.3, vpm_);
    274 
    275     // stop timer
    276     t1 = TickTime::Now();
    277     acc_ticks += (t1 - t0);
    278 
    279     if (acc_ticks.Microseconds() < min_runtime || run_idx == 0)  {
    280       min_runtime = acc_ticks.Microseconds();
    281     }
    282     avg_runtime += acc_ticks.Microseconds();
    283   }
    284 
    285   printf("\nAverage run time = %d us / frame\n",
    286          //static_cast<int>(avg_runtime / frameNum / NumRuns));
    287          static_cast<int>(avg_runtime));
    288   printf("Min run time = %d us / frame\n\n",
    289          //static_cast<int>(min_runtime / frameNum));
    290          static_cast<int>(min_runtime));
    291 }
    292 
    293 void TestSize(const I420VideoFrame& source_frame, int targetwidth_,
    294               int targetheight_, int mode, double expected_psnr,
    295               VideoProcessingModule* vpm) {
    296   int sourcewidth_ = source_frame.width();
    297   int sourceheight_ = source_frame.height();
    298   I420VideoFrame* out_frame = NULL;
    299 
    300   ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(targetwidth_, targetheight_, 30));
    301   ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame));
    302 
    303   if (out_frame) {
    304     EXPECT_EQ(source_frame.render_time_ms(), out_frame->render_time_ms());
    305     EXPECT_EQ(source_frame.timestamp(), out_frame->timestamp());
    306   }
    307 
    308   // If the frame was resampled (scale changed) then:
    309   // (1) verify the new size and write out processed frame for viewing.
    310   // (2) scale the resampled frame (|out_frame|) back to the original size and
    311   // compute PSNR relative to |source_frame| (for automatic verification).
    312   // (3) write out the processed frame for viewing.
    313   if (targetwidth_ != static_cast<int>(sourcewidth_) ||
    314       targetheight_ != static_cast<int>(sourceheight_))  {
    315     // Write the processed frame to file for visual inspection.
    316     std::ostringstream filename;
    317     filename << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
    318         "from_" << sourcewidth_ << "x" << sourceheight_ << "_to_" <<
    319         targetwidth_ << "x" << targetheight_ << "_30Hz_P420.yuv";
    320     std::cout << "Watch " << filename.str() << " and verify that it is okay."
    321         << std::endl;
    322     FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
    323     if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) {
    324       fprintf(stderr, "Failed to write frame for scaling to width/height: "
    325           " %d %d \n", targetwidth_, targetheight_);
    326       return;
    327     }
    328     fclose(stand_alone_file);
    329 
    330     I420VideoFrame resampled_source_frame;
    331     resampled_source_frame.CopyFrame(*out_frame);
    332 
    333     // Scale |resampled_source_frame| back to original/source size.
    334     ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(sourcewidth_,
    335                                                sourceheight_,
    336                                                30));
    337     ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(resampled_source_frame,
    338                                            &out_frame));
    339 
    340     // Write the processed frame to file for visual inspection.
    341     std::ostringstream filename2;
    342     filename2 << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
    343           "from_" << targetwidth_ << "x" << targetheight_ << "_to_" <<
    344           sourcewidth_ << "x" << sourceheight_ << "_30Hz_P420.yuv";
    345     std::cout << "Watch " << filename2.str() << " and verify that it is okay."
    346                 << std::endl;
    347     stand_alone_file = fopen(filename2.str().c_str(), "wb");
    348     if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) {
    349       fprintf(stderr, "Failed to write frame for scaling to width/height "
    350               "%d %d \n", sourcewidth_, sourceheight_);
    351       return;
    352     }
    353     fclose(stand_alone_file);
    354 
    355     // Compute the PSNR and check expectation.
    356     double psnr = I420PSNR(&source_frame, out_frame);
    357     EXPECT_GT(psnr, expected_psnr);
    358     printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
    359         "source which is scaled down/up to: %d %d, and back to source size \n",
    360         psnr, sourcewidth_, sourceheight_, targetwidth_, targetheight_);
    361   }
    362 }
    363 
    364 bool CompareFrames(const webrtc::I420VideoFrame& frame1,
    365                    const webrtc::I420VideoFrame& frame2) {
    366   for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
    367     webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
    368     int allocated_size1 = frame1.allocated_size(plane_type);
    369     int allocated_size2 = frame2.allocated_size(plane_type);
    370     if (allocated_size1 != allocated_size2)
    371       return false;
    372     const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
    373     const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
    374     if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
    375       return false;
    376   }
    377   return true;
    378 }
    379 
    380 }  // namespace webrtc
    381