blob: 0943db8a01b48e0b5a2caa324f1f821fd1f73043 [file] [log] [blame]
alexander3c798932021-03-26 21:42:19 +00001/*
2 * Copyright (c) 2021 Arm Limited. All rights reserved.
3 * SPDX-License-Identifier: Apache-2.0
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17#include "hal.h"
18#include "TensorFlowLiteMicro.hpp"
19#include "Wav2LetterModel.hpp"
20#include "TestData_asr.hpp"
21
22#include <catch.hpp>
23#include <random>
24
Isabella Gottardi2181d0a2021-04-07 09:27:38 +010025using namespace test;
26
alexander3c798932021-03-26 21:42:19 +000027bool RunInference(arm::app::Model& model, const int8_t vec[], const size_t copySz)
28{
29 TfLiteTensor* inputTensor = model.GetInputTensor(0);
30 REQUIRE(inputTensor);
31
32 memcpy(inputTensor->data.data, vec, copySz);
33
34 return model.RunInference();
35}
36
37bool RunInferenceRandom(arm::app::Model& model)
38{
39 TfLiteTensor* inputTensor = model.GetInputTensor(0);
40 REQUIRE(inputTensor);
41
42 std::random_device rndDevice;
43 std::mt19937 mersenneGen{rndDevice()};
44 std::uniform_int_distribution<short> dist {-128, 127};
45
46 auto gen = [&dist, &mersenneGen](){
47 return dist(mersenneGen);
48 };
49
50 std::vector<int8_t> randomAudio(inputTensor->bytes);
51 std::generate(std::begin(randomAudio), std::end(randomAudio), gen);
52
53 REQUIRE(RunInference(model, randomAudio.data(), inputTensor->bytes));
54 return true;
55}
56
57/* Skip this test, Wav2LetterModel if not Vela optimized but only from ML-zoo will fail. */
58TEST_CASE("Running random inference with TensorFlow Lite Micro and Wav2LetterModel Int8", "[Wav2Letter][.]")
59{
60 arm::app::Wav2LetterModel model{};
61
62 REQUIRE_FALSE(model.IsInited());
63 REQUIRE(model.Init());
64 REQUIRE(model.IsInited());
65
66 REQUIRE(RunInferenceRandom(model));
67}
68
69template<typename T>
70void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::Model& model)
71{
72 TfLiteTensor* inputTensor = model.GetInputTensor(0);
73 REQUIRE(inputTensor);
74
75 REQUIRE(RunInference(model, input_goldenFV, inputTensor->bytes));
76
77 TfLiteTensor* outputTensor = model.GetOutputTensor(0);
78
79 REQUIRE(outputTensor);
80 REQUIRE(outputTensor->bytes == OFM_DATA_SIZE);
81 auto tensorData = tflite::GetTensorData<T>(outputTensor);
82 REQUIRE(tensorData);
83
84 for (size_t i = 0; i < outputTensor->bytes; i++) {
85 REQUIRE((int)tensorData[i] == (int)((T)output_goldenFV[i]));
86 }
87}
88
89TEST_CASE("Running inference with Tflu and Wav2LetterModel Int8", "[Wav2Letter][.]")
90{
91 for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
92 auto input_goldenFV = get_ifm_data_array(i);;
93 auto output_goldenFV = get_ofm_data_array(i);
94
95 DYNAMIC_SECTION("Executing inference with re-init")
96 {
97 arm::app::Wav2LetterModel model{};
98
99 REQUIRE_FALSE(model.IsInited());
100 REQUIRE(model.Init());
101 REQUIRE(model.IsInited());
102
103 TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
104
105 }
106 }
107}