blob: b2720a8945fe27e5ecfc21b86b9ca1c765e5be8a [file] [log] [blame]
alexander3c798932021-03-26 21:42:19 +00001/*
2 * Copyright (c) 2021 Arm Limited. All rights reserved.
3 * SPDX-License-Identifier: Apache-2.0
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17#include "hal.h"
18#include "ImageUtils.hpp"
19#include "MobileNetModel.hpp"
20#include "TensorFlowLiteMicro.hpp"
21#include "TestData_img_class.hpp"
22
23#include <catch.hpp>
24
Isabella Gottardi2181d0a2021-04-07 09:27:38 +010025using namespace test;
alexander3c798932021-03-26 21:42:19 +000026
27bool RunInference(arm::app::Model& model, const uint8_t imageData[])
28{
29 TfLiteTensor* inputTensor = model.GetInputTensor(0);
30 REQUIRE(inputTensor);
31
32 const size_t copySz = inputTensor->bytes < IFM_DATA_SIZE ?
33 inputTensor->bytes :
34 IFM_DATA_SIZE;
35 memcpy(inputTensor->data.data, imageData, copySz);
36
37 if(model.IsDataSigned()){
38 convertImgIoInt8(inputTensor->data.data, copySz);
39 }
40
41 return model.RunInference();
42}
43
44template<typename T>
45void TestInference(int imageIdx, arm::app::Model& model, T tolerance) {
46 auto image = get_ifm_data_array(imageIdx);
47 auto goldenFV = get_ofm_data_array(imageIdx);
48
49 REQUIRE(RunInference(model, image));
50
51 TfLiteTensor* outputTensor = model.GetOutputTensor(0);
52
53 REQUIRE(outputTensor);
54 REQUIRE(outputTensor->bytes == OFM_DATA_SIZE);
55 auto tensorData = tflite::GetTensorData<T>(outputTensor);
56 REQUIRE(tensorData);
57
58 for (size_t i = 0; i < outputTensor->bytes; i++) {
59 REQUIRE((int)tensorData[i] == Approx((int)((T)goldenFV[i])).epsilon(tolerance));
60 }
61}
62
63
64TEST_CASE("Running inference with TensorFlow Lite Micro and MobileNeV2 Uint8", "[MobileNetV2]")
65{
66 SECTION("Executing inferences sequentially")
67 {
68 arm::app::MobileNetModel model{};
69
70 REQUIRE_FALSE(model.IsInited());
71 REQUIRE(model.Init());
72 REQUIRE(model.IsInited());
73
74 for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
75 TestInference<uint8_t>(i, model, 1);
76 }
77 }
78
79 for (uint32_t i = 0 ; i < NUMBER_OF_FM_FILES; ++i) {
80 DYNAMIC_SECTION("Executing inference with re-init")
81 {
82 arm::app::MobileNetModel model{};
83
84 REQUIRE_FALSE(model.IsInited());
85 REQUIRE(model.Init());
86 REQUIRE(model.IsInited());
87
88 TestInference<uint8_t>(i, model, 1);
89 }
90 }
91}