blob: 294215f00b8b8a0738d3318c6aadfccb428bcfbf [file] [log] [blame]
alexander3c798932021-03-26 21:42:19 +00001/*
2 * Copyright (c) 2021 Arm Limited. All rights reserved.
3 * SPDX-License-Identifier: Apache-2.0
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
alexander3c798932021-03-26 21:42:19 +000017#include "ImageUtils.hpp"
18#include "MobileNetModel.hpp"
19#include "TensorFlowLiteMicro.hpp"
20#include "TestData_img_class.hpp"
21
22#include <catch.hpp>
23
Isabella Gottardi2181d0a2021-04-07 09:27:38 +010024using namespace test;
alexander3c798932021-03-26 21:42:19 +000025
Richard Burton0d110592021-08-12 17:26:30 +010026bool RunInference(arm::app::Model& model, const int8_t imageData[])
alexander3c798932021-03-26 21:42:19 +000027{
28 TfLiteTensor* inputTensor = model.GetInputTensor(0);
29 REQUIRE(inputTensor);
30
Richard Burton00553462021-11-10 16:27:14 +000031 const size_t copySz = inputTensor->bytes < IFM_0_DATA_SIZE ?
alexander3c798932021-03-26 21:42:19 +000032 inputTensor->bytes :
Richard Burton00553462021-11-10 16:27:14 +000033 IFM_0_DATA_SIZE;
alexander3c798932021-03-26 21:42:19 +000034 memcpy(inputTensor->data.data, imageData, copySz);
35
36 if(model.IsDataSigned()){
37 convertImgIoInt8(inputTensor->data.data, copySz);
38 }
39
40 return model.RunInference();
41}
42
43template<typename T>
44void TestInference(int imageIdx, arm::app::Model& model, T tolerance) {
45 auto image = get_ifm_data_array(imageIdx);
46 auto goldenFV = get_ofm_data_array(imageIdx);
47
48 REQUIRE(RunInference(model, image));
49
50 TfLiteTensor* outputTensor = model.GetOutputTensor(0);
51
52 REQUIRE(outputTensor);
Richard Burton00553462021-11-10 16:27:14 +000053 REQUIRE(outputTensor->bytes == OFM_0_DATA_SIZE);
alexander3c798932021-03-26 21:42:19 +000054 auto tensorData = tflite::GetTensorData<T>(outputTensor);
55 REQUIRE(tensorData);
56
57 for (size_t i = 0; i < outputTensor->bytes; i++) {
Isabella Gottardi79d41542021-10-20 15:52:32 +010058 REQUIRE(static_cast<int>(tensorData[i]) == Approx(static_cast<int>((T)goldenFV[i])).epsilon(tolerance));
alexander3c798932021-03-26 21:42:19 +000059 }
60}
61
62
63TEST_CASE("Running inference with TensorFlow Lite Micro and MobileNeV2 Uint8", "[MobileNetV2]")
64{
65 SECTION("Executing inferences sequentially")
66 {
67 arm::app::MobileNetModel model{};
68
69 REQUIRE_FALSE(model.IsInited());
70 REQUIRE(model.Init());
71 REQUIRE(model.IsInited());
72
Richard Burton00553462021-11-10 16:27:14 +000073 for (uint32_t i = 0 ; i < NUMBER_OF_IFM_FILES; ++i) {
alexander3c798932021-03-26 21:42:19 +000074 TestInference<uint8_t>(i, model, 1);
75 }
76 }
77
Richard Burton00553462021-11-10 16:27:14 +000078 for (uint32_t i = 0 ; i < NUMBER_OF_IFM_FILES; ++i) {
alexander3c798932021-03-26 21:42:19 +000079 DYNAMIC_SECTION("Executing inference with re-init")
80 {
81 arm::app::MobileNetModel model{};
82
83 REQUIRE_FALSE(model.IsInited());
84 REQUIRE(model.Init());
85 REQUIRE(model.IsInited());
86
87 TestInference<uint8_t>(i, model, 1);
88 }
89 }
90}