blob: f79f6edf19f5790e624029041cd63d526d1a3db9 [file] [log] [blame]
alexander3c798932021-03-26 21:42:19 +00001/*
Liam Barrye9588502022-01-25 14:31:15 +00002 * Copyright (c) 2021-2022 Arm Limited. All rights reserved.
alexander3c798932021-03-26 21:42:19 +00003 * SPDX-License-Identifier: Apache-2.0
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17#ifndef USECASE_COMMON_UTILS_HPP
18#define USECASE_COMMON_UTILS_HPP
19
20#include "hal.h"
21#include "Model.hpp"
22#include "AppContext.hpp"
23#include "Profiler.hpp"
Éanna Ó Catháin8f958872021-09-15 09:32:30 +010024#include "UseCaseHandler.hpp" /* Handlers for different user options. */
25#include "Classifier.hpp" /* Classifier. */
26#include "InputFiles.hpp"
Richard Burton11b75cc2022-04-07 18:00:55 +010027#include "BaseProcessing.hpp"
alexander3c798932021-03-26 21:42:19 +000028
Éanna Ó Catháin8f958872021-09-15 09:32:30 +010029
30void DisplayCommonMenu();
31
Éanna Ó Catháin8f958872021-09-15 09:32:30 +010032 /**
33 * @brief Presents inference results using the data presentation
34 * object.
Éanna Ó Catháin8f958872021-09-15 09:32:30 +010035 * @param[in] results Vector of classification results to be displayed.
36 * @return true if successful, false otherwise.
37 **/
Kshitij Sisodia68fdd112022-04-06 13:03:20 +010038bool PresentInferenceResult(const std::vector<arm::app::ClassificationResult>& results);
Isabella Gottardi3107aa22022-01-27 16:39:37 +000039
Éanna Ó Catháin8f958872021-09-15 09:32:30 +010040
41/**
42 * @brief Helper function to increment current input feature vector index.
43 * @param[in,out] ctx Pointer to the application context object.
44 * @param[in] useCase Use case name
45 **/
alexander31ae9f02022-02-10 16:15:54 +000046void IncrementAppCtxIfmIdx(arm::app::ApplicationContext& ctx, const std::string& useCase);
Éanna Ó Catháin8f958872021-09-15 09:32:30 +010047
48/**
49 * @brief Helper function to set the input feature map index.
50 * @param[in,out] ctx Pointer to the application context object.
51 * @param[in] idx Value to be set.
52 * @param[in] ctxIfmName Input Feature Map name
53 * @return true if index is set, false otherwise.
54 **/
alexander31ae9f02022-02-10 16:15:54 +000055bool SetAppCtxIfmIdx(arm::app::ApplicationContext& ctx, uint32_t idx, const std::string& ctxIfmName);
Éanna Ó Catháin8f958872021-09-15 09:32:30 +010056
57
58namespace common {
59
60 enum OPCODES {
61 MENU_OPT_RUN_INF_NEXT = 1, /* Run on next vector. */
62 MENU_OPT_RUN_INF_CHOSEN, /* Run on a user provided vector index. */
63 MENU_OPT_RUN_INF_ALL, /* Run inference on all. */
64 MENU_OPT_SHOW_MODEL_INFO, /* Show model info. */
65 MENU_OPT_LIST_IFM /* List the current IFM. */
66 };
67
68}
69
alexander3c798932021-03-26 21:42:19 +000070namespace arm {
71namespace app {
alexander3c798932021-03-26 21:42:19 +000072 /**
73 * @brief Run inference using given model
74 * object. If profiling is enabled, it will log the
75 * statistics too.
alexander3c798932021-03-26 21:42:19 +000076 * @param[in] model Reference to the initialised model.
Isabella Gottardi8df12f32021-04-07 17:15:31 +010077 * @param[in] profiler Reference to the initialised profiler.
alexander3c798932021-03-26 21:42:19 +000078 * @return true if inference succeeds, false otherwise.
79 **/
Isabella Gottardi56ee6202021-05-12 08:27:15 +010080 bool RunInference(arm::app::Model& model, Profiler& profiler);
alexander3c798932021-03-26 21:42:19 +000081
82 /**
83 * @brief Read input and return as an integer.
alexander3c798932021-03-26 21:42:19 +000084 * @return Integer value corresponding to the user input.
85 **/
Kshitij Sisodia68fdd112022-04-06 13:03:20 +010086 int ReadUserInputAsInt();
alexander3c798932021-03-26 21:42:19 +000087
88#if VERIFY_TEST_OUTPUT
89 /**
90 * @brief Helper function to dump a tensor to stdout
91 * @param[in] tensor tensor to be dumped
92 * @param[in] lineBreakForNumElements number of elements
93 * after which line break will be added.
94 **/
alexander80eecfb2021-07-06 19:47:59 +010095 void DumpTensor(const TfLiteTensor* tensor,
96 size_t lineBreakForNumElements = 16);
97
98
99 void DumpTensorData(const uint8_t* tensorData,
100 size_t size,
101 size_t lineBreakForNumElements = 16);
alexander3c798932021-03-26 21:42:19 +0000102#endif /* VERIFY_TEST_OUTPUT */
103
104 /**
105 * @brief List the files baked in the application.
106 * @param[in] ctx Reference to the application context.
107 * @return true or false based on event being handled.
108 **/
109 bool ListFilesHandler(ApplicationContext& ctx);
110
Richard Burton11b75cc2022-04-07 18:00:55 +0100111 /**
112 * @brief Use case runner class that will handle calling pre-processing,
113 * inference and post-processing.
114 * After constructing an instance of this class the user can call
115 * PreProcess(), RunInference() and PostProcess() to perform inference.
116 */
117 class UseCaseRunner {
118
119 private:
120 BasePreProcess* m_preProcess;
121 BasePostProcess* m_postProcess;
122 Model* m_model;
123
124 public:
125 explicit UseCaseRunner(BasePreProcess* preprocess, BasePostProcess* postprocess, Model* model)
126 : m_preProcess{preprocess},
127 m_postProcess{postprocess},
128 m_model{model}
129 {};
130
131 /**
132 * @brief Runs pre-processing as defined by PreProcess object within the runner.
133 * Templated for the input data type.
134 * @param[in] inputData Pointer to the data that inference will be performed on.
135 * @param[in] inputSize Size of the input data that inference will be performed on.
136 * @return true if successful, false otherwise.
137 **/
138 template<typename T>
139 bool PreProcess(T* inputData, size_t inputSize) {
140 if (!this->m_preProcess->DoPreProcess(inputData, inputSize)) {
141 printf_err("Pre-processing failed.");
142 return false;
143 }
144 return true;
145 }
146
147 /**
148 * @brief Runs inference with the Model object within the runner.
149 * @return true if successful, false otherwise.
150 **/
151 bool RunInference() {
152 if (!this->m_model->RunInference()) {
153 printf_err("Inference failed.");
154 return false;
155 }
156 return true;
157 }
158
159 /**
160 * @brief Runs post-processing as defined by PostProcess object within the runner.
161 * @return true if successful, false otherwise.
162 **/
163 bool PostProcess() {
164 if (!this->m_postProcess->DoPostProcess()) {
165 printf_err("Post-processing failed.");
166 return false;
167 }
168 return true;
169 }
170 };
171
alexander3c798932021-03-26 21:42:19 +0000172} /* namespace app */
173} /* namespace arm */
174
Éanna Ó Catháin8f958872021-09-15 09:32:30 +0100175
176#endif /* USECASE_COMMON_UTILS_HPP */