1 /* Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2 3 Licensed under the Apache License, Version 2.0 (the "License"); 4 you may not use this file except in compliance with the License. 5 You may obtain a copy of the License at 6 7 http://www.apache.org/licenses/LICENSE-2.0 8 9 Unless required by applicable law or agreed to in writing, software 10 distributed under the License is distributed on an "AS IS" BASIS, 11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 See the License for the specific language governing permissions and 13 limitations under the License. 14 ==============================================================================*/ 15 16 /* This file is a modification of the Tensorflow Micro Lite file 17 * micro_speech_test.cc */ 18 19 #include "tensorflow/lite/c/c_api_internal.h" 20 #include "tensorflow/lite/experimental/micro/examples/micro_speech/simple_features/simple_features_generator.h" 21 #include "tensorflow/lite/experimental/micro/examples/micro_speech/simple_features/tiny_conv_simple_features_model_data.h" 22 #include "tensorflow/lite/experimental/micro/kernels/all_ops_resolver.h" 23 #include "tensorflow/lite/experimental/micro/micro_error_reporter.h" 24 #include "tensorflow/lite/experimental/micro/micro_interpreter.h" 25 #include "tensorflow/lite/experimental/micro/testing/micro_test.h" 26 #include "tensorflow/lite/schema/schema_generated.h" 27 #include "tensorflow/lite/version.h" 28 29 extern int16_t captured_data[16000]; 30 uint8_t g_silence_score = 0; 31 uint8_t g_unknown_score = 0; 32 uint8_t g_yes_score = 0; 33 uint8_t g_no_score = 0; 34 35 namespace { 36 37 TfLiteStatus GenerateSimpleFeatures_1sec(tflite::ErrorReporter* error_reporter, 38 const int16_t* input, 39 uint8_t* output) { 40 int i; 41 for (i = 0; i < 49; i++) { 42 GenerateSimpleFeatures(error_reporter, input + i * 320, 480, 43, 43 output + i * 43); 44 } 45 return kTfLiteOk; 46 } 47 48 } // namespace 49 50 TF_LITE_MICRO_TESTS_BEGIN 51 52 TF_LITE_MICRO_TEST(TestSimpleFeaturesGenerator) { 53 tflite::MicroErrorReporter micro_error_reporter; 54 tflite::ErrorReporter* error_reporter = µ_error_reporter; 55 56 uint8_t preprocessed_data[43 * 49]; 57 TfLiteStatus generate_1sec_status = GenerateSimpleFeatures_1sec( 58 error_reporter, captured_data, preprocessed_data); 59 TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, generate_1sec_status); 60 61 // Map the model into a usable data structure. This doesn't involve any 62 // copying or parsing, it's a very lightweight operation. 63 const tflite::Model* model = 64 ::tflite::GetModel(g_tiny_conv_simple_features_model_data); 65 if (model->version() != TFLITE_SCHEMA_VERSION) { 66 error_reporter->Report( 67 "Model provided is schema version %d not equal " 68 "to supported version %d.\n", 69 model->version(), TFLITE_SCHEMA_VERSION); 70 } 71 72 // This pulls in all the operation implementations we need. 73 tflite::ops::micro::AllOpsResolver resolver; 74 75 // Create an area of memory to use for input, output, and intermediate arrays. 76 const int tensor_arena_size = 10 * 1024; 77 uint8_t tensor_arena[tensor_arena_size]; 78 tflite::SimpleTensorAllocator tensor_allocator(tensor_arena, 79 tensor_arena_size); 80 81 // Build an interpreter to run the model with. 82 tflite::MicroInterpreter interpreter(model, resolver, &tensor_allocator, 83 error_reporter); 84 85 // Get information about the memory area to use for the model's input. 86 TfLiteTensor* input = interpreter.input(0); 87 88 // Make sure the input has the properties we expect. 89 TF_LITE_MICRO_EXPECT_NE(nullptr, input); 90 TF_LITE_MICRO_EXPECT_EQ(4, input->dims->size); 91 TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); 92 TF_LITE_MICRO_EXPECT_EQ(49, input->dims->data[1]); 93 TF_LITE_MICRO_EXPECT_EQ(43, input->dims->data[2]); 94 TF_LITE_MICRO_EXPECT_EQ(kTfLiteUInt8, input->type); 95 96 // Copy a spectrogram created from a .wav audio file of someone saying "Yes", 97 // into the memory area used for the input. 98 for (int i = 0; i < input->bytes; ++i) { 99 input->data.uint8[i] = preprocessed_data[i]; 100 } 101 102 // Run the model on this input and make sure it succeeds. 103 TfLiteStatus invoke_status = interpreter.Invoke(); 104 if (invoke_status != kTfLiteOk) { 105 error_reporter->Report("Invoke failed\n"); 106 } 107 TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); 108 109 // Get the output from the model, and make sure it's the expected size and 110 // type. 111 TfLiteTensor* output = interpreter.output(0); 112 TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); 113 TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); 114 TF_LITE_MICRO_EXPECT_EQ(4, output->dims->data[1]); 115 TF_LITE_MICRO_EXPECT_EQ(kTfLiteUInt8, output->type); 116 117 // There are four possible classes in the output, each with a score. 118 const int kSilenceIndex = 0; 119 const int kUnknownIndex = 1; 120 const int kYesIndex = 2; 121 const int kNoIndex = 3; 122 123 // Make sure that the expected "Yes" score is higher than the other classes. 124 g_silence_score = output->data.uint8[kSilenceIndex]; 125 g_unknown_score = output->data.uint8[kUnknownIndex]; 126 g_yes_score = output->data.uint8[kYesIndex]; 127 g_no_score = output->data.uint8[kNoIndex]; 128 129 error_reporter->Report("Ran successfully\n"); 130 } 131 132 TF_LITE_MICRO_TESTS_END 133