blob: 06358a4d02ee933ed7e28541fdaee9fb97866305 [file] [log] [blame]
alexander3c798932021-03-26 21:42:19 +00001/*
2 * Copyright (c) 2021 Arm Limited. All rights reserved.
3 * SPDX-License-Identifier: Apache-2.0
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17#include "DsCnnModel.hpp"
18#include "hal.h"
19#include "TestData_kws.hpp"
20#include "TensorFlowLiteMicro.hpp"
21
22#include <catch.hpp>
23#include <random>
24
25bool RunInference(arm::app::Model& model, const int8_t vec[])
26{
27 TfLiteTensor* inputTensor = model.GetInputTensor(0);
28 REQUIRE(inputTensor);
29
30 const size_t copySz = inputTensor->bytes < IFM_DATA_SIZE ?
31 inputTensor->bytes :
32 IFM_DATA_SIZE;
33 memcpy(inputTensor->data.data, vec, copySz);
34
35 return model.RunInference();
36}
37
38bool RunInferenceRandom(arm::app::Model& model)
39{
40 TfLiteTensor* inputTensor = model.GetInputTensor(0);
41 REQUIRE(inputTensor);
42
43 std::random_device rndDevice;
44 std::mt19937 mersenneGen{rndDevice()};
45 std::uniform_int_distribution<short> dist {-128, 127};
46
47 auto gen = [&dist, &mersenneGen](){
48 return dist(mersenneGen);
49 };
50
51 std::vector<int8_t> randomAudio(inputTensor->bytes);
52 std::generate(std::begin(randomAudio), std::end(randomAudio), gen);
53
54 REQUIRE(RunInference(model, randomAudio.data()));
55 return true;
56}
57
58template<typename T>
59void TestInference(const T* input_goldenFV, const T* output_goldenFV, arm::app::Model& model)
60{
61 REQUIRE(RunInference(model, input_goldenFV));
62
63 TfLiteTensor* outputTensor = model.GetOutputTensor(0);
64
65 REQUIRE(outputTensor);
66 REQUIRE(outputTensor->bytes == OFM_DATA_SIZE);
67 auto tensorData = tflite::GetTensorData<T>(outputTensor);
68 REQUIRE(tensorData);
69
70 for (size_t i = 0; i < outputTensor->bytes; i++) {
71 REQUIRE((int)tensorData[i] == (int)((T)output_goldenFV[i]));
72 }
73}
74
75TEST_CASE("Running random inference with TensorFlow Lite Micro and DsCnnModel Int8", "[DS_CNN]")
76{
77 arm::app::DsCnnModel model{};
78
79 REQUIRE_FALSE(model.IsInited());
80 REQUIRE(model.Init());
81 REQUIRE(model.IsInited());
82
83 REQUIRE(RunInferenceRandom(model));
84}
85
86TEST_CASE("Running inference with TensorFlow Lite Micro and DsCnnModel Uint8", "[DS_CNN]")
87{
88 for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
89 const int8_t* input_goldenFV = get_ifm_data_array(i);;
90 const int8_t* output_goldenFV = get_ofm_data_array(i);
91
92 DYNAMIC_SECTION("Executing inference with re-init")
93 {
94 arm::app::DsCnnModel model{};
95
96 REQUIRE_FALSE(model.IsInited());
97 REQUIRE(model.Init());
98 REQUIRE(model.IsInited());
99
100 TestInference<int8_t>(input_goldenFV, output_goldenFV, model);
101
102 }
103 }
104}