blob: 3cdaee12119f239655b4199c589f3e7e85414547 [file] [log] [blame]
Richard Burton00553462021-11-10 16:27:14 +00001/*
Richard Burtonf32a86a2022-11-15 11:46:11 +00002 * SPDX-FileCopyrightText: Copyright 2021 Arm Limited and/or its affiliates <open-source-office@arm.com>
Richard Burton00553462021-11-10 16:27:14 +00003 * SPDX-License-Identifier: Apache-2.0
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
Richard Burton00553462021-11-10 16:27:14 +000017#include "TensorFlowLiteMicro.hpp"
18#include "RNNoiseModel.hpp"
19#include "TestData_noise_reduction.hpp"
Kshitij Sisodiaaa4bcb12022-05-06 09:13:03 +010020#include "BufAttributes.hpp"
Richard Burton00553462021-11-10 16:27:14 +000021
22#include <catch.hpp>
23#include <random>
24
Kshitij Sisodiaaa4bcb12022-05-06 09:13:03 +010025namespace arm {
Liam Barry213a5432022-05-09 17:06:19 +010026namespace app {
27 static uint8_t tensorArena[ACTIVATION_BUF_SZ] ACTIVATION_BUF_ATTRIBUTE;
28 namespace rnn {
29 extern uint8_t* GetModelPointer();
30 extern size_t GetModelLen();
31 } /* namespace rnn */
32} /* namespace app */
Kshitij Sisodiaaa4bcb12022-05-06 09:13:03 +010033} /* namespace arm */
34
Richard Burton00553462021-11-10 16:27:14 +000035namespace test {
Liam Barry213a5432022-05-09 17:06:19 +010036namespace noise_reduction {
Richard Burton00553462021-11-10 16:27:14 +000037
38 bool RunInference(arm::app::Model& model, const std::vector<std::vector<int8_t>> inData)
39 {
40 for (size_t i = 0; i < model.GetNumInputs(); ++i) {
41 TfLiteTensor* inputTensor = model.GetInputTensor(i);
42 REQUIRE(inputTensor);
43 memcpy(inputTensor->data.data, inData[i].data(), inData[i].size());
44 }
45
46 return model.RunInference();
47 }
48
49 bool RunInferenceRandom(arm::app::Model& model)
50 {
51 std::random_device rndDevice;
52 std::mt19937 mersenneGen{rndDevice()};
53 std::uniform_int_distribution<short> dist {-128, 127};
54
55 auto gen = [&dist, &mersenneGen](){
56 return dist(mersenneGen);
57 };
58
59 std::vector<std::vector<int8_t>> randomInput{NUMBER_OF_IFM_FILES};
60 for (size_t i = 0; i < model.GetNumInputs(); ++i) {
61 TfLiteTensor *inputTensor = model.GetInputTensor(i);
62 REQUIRE(inputTensor);
63 randomInput[i].resize(inputTensor->bytes);
64 std::generate(std::begin(randomInput[i]), std::end(randomInput[i]), gen);
65 }
66
67 REQUIRE(RunInference(model, randomInput));
68 return true;
69 }
70
71 TEST_CASE("Running random inference with Tflu and RNNoise Int8", "[RNNoise]")
72 {
73 arm::app::RNNoiseModel model{};
74
75 REQUIRE_FALSE(model.IsInited());
Kshitij Sisodiaaa4bcb12022-05-06 09:13:03 +010076 REQUIRE(model.Init(arm::app::tensorArena,
Liam Barry213a5432022-05-09 17:06:19 +010077 sizeof(arm::app::tensorArena),
78 arm::app::rnn::GetModelPointer(),
79 arm::app::rnn::GetModelLen()));
Richard Burton00553462021-11-10 16:27:14 +000080 REQUIRE(model.IsInited());
81
82 REQUIRE(RunInferenceRandom(model));
83 }
84
85 template<typename T>
86 void TestInference(const std::vector<std::vector<T>> input_goldenFV, const std::vector<std::vector<T>> output_goldenFV, arm::app::Model& model)
87 {
88 for (size_t i = 0; i < model.GetNumInputs(); ++i) {
89 TfLiteTensor* inputTensor = model.GetInputTensor(i);
90 REQUIRE(inputTensor);
91 }
92
93 REQUIRE(RunInference(model, input_goldenFV));
94
95 for (size_t i = 0; i < model.GetNumOutputs(); ++i) {
96 TfLiteTensor *outputTensor = model.GetOutputTensor(i);
97
98 REQUIRE(outputTensor);
99 auto tensorData = tflite::GetTensorData<T>(outputTensor);
100 REQUIRE(tensorData);
101
102 for (size_t j = 0; j < outputTensor->bytes; j++) {
103 REQUIRE(static_cast<int>(tensorData[j]) == static_cast<int>((output_goldenFV[i][j])));
104 }
105 }
106 }
107
108 TEST_CASE("Running inference with Tflu and RNNoise Int8", "[RNNoise]")
109 {
110 std::vector<std::vector<int8_t>> goldenInputFV {NUMBER_OF_IFM_FILES};
111 std::vector<std::vector<int8_t>> goldenOutputFV {NUMBER_OF_OFM_FILES};
112
113 std::array<size_t, NUMBER_OF_IFM_FILES> inputSizes = {IFM_0_DATA_SIZE,
114 IFM_1_DATA_SIZE,
115 IFM_2_DATA_SIZE,
116 IFM_3_DATA_SIZE};
117
118 std::array<size_t, NUMBER_OF_OFM_FILES> outputSizes = {OFM_0_DATA_SIZE,
119 OFM_1_DATA_SIZE,
120 OFM_2_DATA_SIZE,
121 OFM_3_DATA_SIZE,
122 OFM_4_DATA_SIZE};
123
124 for (uint32_t i = 0 ; i < NUMBER_OF_IFM_FILES; ++i) {
125 goldenInputFV[i].resize(inputSizes[i]);
126 std::memcpy(goldenInputFV[i].data(), get_ifm_data_array(i), inputSizes[i]);
127 }
128 for (uint32_t i = 0 ; i < NUMBER_OF_OFM_FILES; ++i) {
129 goldenOutputFV[i].resize(outputSizes[i]);
130 std::memcpy(goldenOutputFV[i].data(), get_ofm_data_array(i), outputSizes[i]);
131 }
132
133 DYNAMIC_SECTION("Executing inference with re-init")
134 {
135 arm::app::RNNoiseModel model{};
136
137 REQUIRE_FALSE(model.IsInited());
Kshitij Sisodiaaa4bcb12022-05-06 09:13:03 +0100138 REQUIRE(model.Init(arm::app::tensorArena,
Liam Barry213a5432022-05-09 17:06:19 +0100139 sizeof(arm::app::tensorArena),
140 arm::app::rnn::GetModelPointer(),
141 arm::app::rnn::GetModelLen()));
Richard Burton00553462021-11-10 16:27:14 +0000142 REQUIRE(model.IsInited());
143
144 TestInference<int8_t>(goldenInputFV, goldenOutputFV, model);
145 }
146 }
147
Liam Barry213a5432022-05-09 17:06:19 +0100148} /* namespace noise_reduction */
Richard Burton00553462021-11-10 16:27:14 +0000149} /* namespace test */