blob: b98b1c5d9b3a5c5995bea7cfeae4d475f1d943f1 [file] [log] [blame]
alexander3c798932021-03-26 21:42:19 +00001/*
2 * Copyright (c) 2021 Arm Limited. All rights reserved.
3 * SPDX-License-Identifier: Apache-2.0
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17#include "UseCaseHandler.hpp"
18
19#include "TestModel.hpp"
20#include "UseCaseCommonUtils.hpp"
21#include "hal.h"
22
23#include <cstdlib>
24
25namespace arm {
26namespace app {
27
28 bool RunInferenceHandler(ApplicationContext& ctx)
29 {
30 auto& platform = ctx.Get<hal_platform&>("platform");
Isabella Gottardi8df12f32021-04-07 17:15:31 +010031 auto& profiler = ctx.Get<Profiler&>("profiler");
alexander3c798932021-03-26 21:42:19 +000032 auto& model = ctx.Get<Model&>("model");
33
34 constexpr uint32_t dataPsnTxtInfStartX = 150;
35 constexpr uint32_t dataPsnTxtInfStartY = 40;
36
37 if (!model.IsInited()) {
38 printf_err("Model is not initialised! Terminating processing.\n");
39 return false;
40 }
41
42 const size_t numInputs = model.GetNumInputs();
43
alexander80eecfb2021-07-06 19:47:59 +010044#if VERIFY_TEST_OUTPUT
45 info("Initial input tensors values:\n");
46 for (size_t inputIndex = 0; inputIndex < model.GetNumInputs(); inputIndex++) {
47 arm::app::DumpTensor(model.GetInputTensor(inputIndex));
48 }
49 info("Initial output tensors values:\n");
50 for (size_t outputIndex = 0; outputIndex < model.GetNumOutputs(); outputIndex++) {
51 arm::app::DumpTensor(model.GetOutputTensor(outputIndex));
52 }
53#endif /* VERIFY_TEST_OUTPUT */
54
alexander3c798932021-03-26 21:42:19 +000055 /* Populate each input tensor with random data. */
56 for (size_t inputIndex = 0; inputIndex < numInputs; inputIndex++) {
57
58 TfLiteTensor* inputTensor = model.GetInputTensor(inputIndex);
59
60 debug("Populating input tensor %zu@%p\n", inputIndex, inputTensor);
61 debug("Total input size to be populated: %zu\n", inputTensor->bytes);
62
63 /* Create a random input. */
64 if (inputTensor->bytes > 0) {
65
66 uint8_t* tData = tflite::GetTensorData<uint8_t>(inputTensor);
67
68 for (size_t j = 0; j < inputTensor->bytes; ++j) {
69 tData[j] = static_cast<uint8_t>(std::rand() & 0xFF);
70 }
71 }
72 }
73
74 /* Strings for presentation/logging. */
75 std::string str_inf{"Running inference... "};
76
77 /* Display message on the LCD - inference running. */
78 platform.data_psn->present_data_text(
79 str_inf.c_str(), str_inf.size(),
80 dataPsnTxtInfStartX, dataPsnTxtInfStartY, 0);
81
alexander27b62d92021-05-04 20:46:08 +010082 if (!RunInference(model, profiler)) {
83 return false;
84 }
alexander3c798932021-03-26 21:42:19 +000085
86 /* Erase. */
87 str_inf = std::string(str_inf.size(), ' ');
88 platform.data_psn->present_data_text(
89 str_inf.c_str(), str_inf.size(),
90 dataPsnTxtInfStartX, dataPsnTxtInfStartY, 0);
91
Isabella Gottardi8df12f32021-04-07 17:15:31 +010092 info("Final results:\n");
93 profiler.PrintProfilingResult();
94
alexander3c798932021-03-26 21:42:19 +000095#if VERIFY_TEST_OUTPUT
96 for (size_t outputIndex = 0; outputIndex < model.GetNumOutputs(); outputIndex++) {
97 arm::app::DumpTensor(model.GetOutputTensor(outputIndex));
98 }
99#endif /* VERIFY_TEST_OUTPUT */
100
101 return true;
102 }
103
104} /* namespace app */
105} /* namespace arm */