blob: 7058f9c3e4f647cb56f8545ce0b12d444cdc3492 [file] [log] [blame]
Kristofer Jonsson641c0912020-08-31 11:34:14 +02001/*
Per Åstrand90455452021-02-25 11:10:08 +01002 * Copyright (c) 2019-2021 Arm Limited. All rights reserved.
Kristofer Jonsson641c0912020-08-31 11:34:14 +02003 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19#include "tensorflow/lite/micro/all_ops_resolver.h"
Måns Nilsson231e1d92020-11-05 12:19:34 +010020#include "tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h"
Kristofer Jonsson641c0912020-08-31 11:34:14 +020021#include "tensorflow/lite/micro/micro_error_reporter.h"
22#include "tensorflow/lite/micro/micro_interpreter.h"
Bhavik Patelffe845d2020-11-16 12:13:56 +010023#include "tensorflow/lite/micro/micro_profiler.h"
Kristofer Jonsson641c0912020-08-31 11:34:14 +020024#include "tensorflow/lite/schema/schema_generated.h"
Kristofer Jonsson641c0912020-08-31 11:34:14 +020025
Jens Elofsson955288a2021-04-22 20:57:15 +020026#include "arm_profiler.hpp"
27#ifdef ETHOSU
Jens Elofsson701a63b2021-05-23 17:37:07 +020028#include "layer_by_layer_profiler.hpp"
Jens Elofsson955288a2021-04-22 20:57:15 +020029#endif
Anton Moberg07cf70b2021-07-07 11:08:17 +020030#include "ethosu_log.h"
Jens Elofsson955288a2021-04-22 20:57:15 +020031
Kristofer Jonsson641c0912020-08-31 11:34:14 +020032#include "inference_process.hpp"
33
Per Åstrandd9afc082020-10-06 13:25:08 +020034#include "cmsis_compiler.h"
35
Per Åstrand91a91732020-09-25 15:04:26 +020036#include <inttypes.h>
37
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +020038using namespace std;
39
Kristofer Jonsson641c0912020-08-31 11:34:14 +020040namespace {
Måns Nilsson231e1d92020-11-05 12:19:34 +010041
42void tflu_debug_log(const char *s) {
Anton Moberg07cf70b2021-07-07 11:08:17 +020043 LOG_DEBUG("%s", s);
Måns Nilsson231e1d92020-11-05 12:19:34 +010044}
45
Kristofer Jonsson641c0912020-08-31 11:34:14 +020046void print_output_data(TfLiteTensor *output, size_t bytesToPrint) {
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +020047 const int numBytesToPrint = min(output->bytes, bytesToPrint);
Anton Moberg07cf70b2021-07-07 11:08:17 +020048 int dims_size = output->dims->size;
49 LOG("{\n");
50 LOG("\"dims\": [%d,", dims_size);
Kristofer Jonsson641c0912020-08-31 11:34:14 +020051 for (int i = 0; i < output->dims->size - 1; ++i) {
Anton Moberg07cf70b2021-07-07 11:08:17 +020052 LOG("%d,", output->dims->data[i]);
Kristofer Jonsson641c0912020-08-31 11:34:14 +020053 }
Anton Moberg07cf70b2021-07-07 11:08:17 +020054 LOG("%d],\n", output->dims->data[dims_size - 1]);
55 LOG("\"data_address\": \"%08" PRIx32 "\",\n", (uint32_t)output->data.data);
56 LOG("\"data\":\"");
Kristofer Jonsson641c0912020-08-31 11:34:14 +020057 for (int i = 0; i < numBytesToPrint - 1; ++i) {
58 if (i % 16 == 0 && i != 0) {
Anton Moberg07cf70b2021-07-07 11:08:17 +020059 LOG("\n");
Kristofer Jonsson641c0912020-08-31 11:34:14 +020060 }
Anton Moberg07cf70b2021-07-07 11:08:17 +020061 LOG("0x%02x,", output->data.uint8[i]);
Kristofer Jonsson641c0912020-08-31 11:34:14 +020062 }
Anton Moberg07cf70b2021-07-07 11:08:17 +020063 LOG("0x%02x\"\n", output->data.uint8[numBytesToPrint - 1]);
64 LOG("}");
Kristofer Jonsson641c0912020-08-31 11:34:14 +020065}
66
67bool copyOutput(const TfLiteTensor &src, InferenceProcess::DataPtr &dst) {
68 if (dst.data == nullptr) {
69 return false;
70 }
71
72 if (src.bytes > dst.size) {
Anton Moberg07cf70b2021-07-07 11:08:17 +020073 LOG_ERR("Tensor size mismatch (bytes): actual=%d, expected%d.\n", src.bytes, dst.size);
Kristofer Jonsson641c0912020-08-31 11:34:14 +020074 return true;
75 }
76
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +020077 copy(src.data.uint8, src.data.uint8 + src.bytes, static_cast<uint8_t *>(dst.data));
Kristofer Jonsson641c0912020-08-31 11:34:14 +020078 dst.size = src.bytes;
79
80 return false;
81}
82
83} // namespace
84
85namespace InferenceProcess {
Per Åstrandbbd9c8f2020-09-25 15:07:35 +020086DataPtr::DataPtr(void *_data, size_t _size) : data(_data), size(_size) {}
Kristofer Jonsson641c0912020-08-31 11:34:14 +020087
Kristofer Jonsson34e24962020-11-23 16:22:10 +010088void DataPtr::invalidate() {
89#if defined(__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
Kristofer Jonsson34e24962020-11-23 16:22:10 +010090 SCB_InvalidateDCache_by_Addr(reinterpret_cast<uint32_t *>(data), size);
91#endif
92}
93
94void DataPtr::clean() {
95#if defined(__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
Kristofer Jonsson34e24962020-11-23 16:22:10 +010096 SCB_CleanDCache_by_Addr(reinterpret_cast<uint32_t *>(data), size);
97#endif
98}
99
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200100InferenceJob::InferenceJob() : numBytesToPrint(0) {}
101
Per Åstrandbbd9c8f2020-09-25 15:07:35 +0200102InferenceJob::InferenceJob(const string &_name,
103 const DataPtr &_networkModel,
104 const vector<DataPtr> &_input,
105 const vector<DataPtr> &_output,
106 const vector<DataPtr> &_expectedOutput,
Bhavik Patelffe845d2020-11-16 12:13:56 +0100107 size_t _numBytesToPrint,
108 const vector<uint8_t> &_pmuEventConfig,
Bhavik Patel97906eb2020-12-17 15:32:16 +0100109 const uint32_t _pmuCycleCounterEnable) :
Per Åstrandbbd9c8f2020-09-25 15:07:35 +0200110 name(_name),
111 networkModel(_networkModel), input(_input), output(_output), expectedOutput(_expectedOutput),
Bhavik Patel97906eb2020-12-17 15:32:16 +0100112 numBytesToPrint(_numBytesToPrint), pmuEventConfig(_pmuEventConfig), pmuCycleCounterEnable(_pmuCycleCounterEnable),
Jens Elofssonde044c32021-05-06 16:21:29 +0200113 pmuEventCount(), pmuCycleCounterCount(0) {}
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200114
Kristofer Jonsson34e24962020-11-23 16:22:10 +0100115void InferenceJob::invalidate() {
116 networkModel.invalidate();
117
118 for (auto &it : input) {
119 it.invalidate();
120 }
121
122 for (auto &it : output) {
123 it.invalidate();
124 }
125
126 for (auto &it : expectedOutput) {
127 it.invalidate();
128 }
129}
130
131void InferenceJob::clean() {
132 networkModel.clean();
133
134 for (auto &it : input) {
135 it.clean();
136 }
137
138 for (auto &it : output) {
139 it.clean();
140 }
141
142 for (auto &it : expectedOutput) {
143 it.clean();
144 }
145}
146
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200147// NOTE: Adding code for get_lock & free_lock with some corrections from
148// http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dai0321a/BIHEJCHB.html
149// TODO: check correctness?
150void InferenceProcess::getLock() {
151 int status = 0;
152
153 do {
154 // Wait until lock_var is free
155 while (__LDREXW(&lock) != 0)
156 ;
157
158 // Try to set lock_var
159 status = __STREXW(1, &lock);
160 } while (status != 0);
161
162 // Do not start any other memory access until memory barrier is completed
163 __DMB();
164}
165
166// TODO: check correctness?
167void InferenceProcess::freeLock() {
168 // Ensure memory operations completed before releasing lock
169 __DMB();
170
171 lock = 0;
172}
173
174bool InferenceProcess::push(const InferenceJob &job) {
175 getLock();
176 inferenceJobQueue.push(job);
177 freeLock();
178
179 return true;
180}
181
182bool InferenceProcess::runJob(InferenceJob &job) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200183 LOG_INFO("Running inference job: %s\n", job.name.c_str());
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200184
Bhavik Patelffe845d2020-11-16 12:13:56 +0100185 // Register debug log callback for profiling
186 RegisterDebugLogCallback(tflu_debug_log);
187
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200188 tflite::MicroErrorReporter microErrorReporter;
189 tflite::ErrorReporter *reporter = &microErrorReporter;
190
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200191 // Get model handle and verify that the version is correct
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200192 const tflite::Model *model = ::tflite::GetModel(job.networkModel.data);
193 if (model->version() != TFLITE_SCHEMA_VERSION) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200194 LOG_ERR("Model schema version unsupported: version=%" PRIu32 ", supported=%d.\n",
195 model->version(),
196 TFLITE_SCHEMA_VERSION);
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200197 return true;
198 }
199
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200200 // Create the TFL micro interpreter
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200201 tflite::AllOpsResolver resolver;
Jens Elofsson955288a2021-04-22 20:57:15 +0200202#ifdef ETHOSU
Jens Elofsson701a63b2021-05-23 17:37:07 +0200203 tflite::LayerByLayerProfiler profiler;
Jens Elofsson955288a2021-04-22 20:57:15 +0200204#else
205 tflite::ArmProfiler profiler;
Bhavik Patelffe845d2020-11-16 12:13:56 +0100206#endif
Jens Elofssonde044c32021-05-06 16:21:29 +0200207
Anton Moberg66ed1822021-02-10 08:49:28 +0100208 tflite::MicroInterpreter interpreter(model, resolver, tensorArena, tensorArenaSize, reporter, &profiler);
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200209
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200210 // Allocate tensors
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200211 TfLiteStatus allocate_status = interpreter.AllocateTensors();
212 if (allocate_status != kTfLiteOk) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200213 LOG_ERR("Failed to allocate tensors for inference: job=%s\n", job.name.c_str());
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200214 return true;
215 }
216
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200217 // Create a filtered list of non empty input tensors
218 vector<TfLiteTensor *> inputTensors;
219 for (size_t i = 0; i < interpreter.inputs_size(); ++i) {
220 TfLiteTensor *tensor = interpreter.input(i);
221
222 if (tensor->bytes > 0) {
223 inputTensors.push_back(tensor);
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200224 }
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200225 }
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200226 if (job.input.size() != inputTensors.size()) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200227 LOG_ERR("Number of input buffers does not match number of non empty network tensors: input=%zu, network=%zu\n",
228 job.input.size(),
229 inputTensors.size());
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200230 return true;
231 }
232
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200233 // Copy input data
234 for (size_t i = 0; i < inputTensors.size(); ++i) {
235 const DataPtr &input = job.input[i];
236 const TfLiteTensor *tensor = inputTensors[i];
237
238 if (input.size != tensor->bytes) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200239 LOG_ERR("Job input size does not match network input size: job=%s, index=%zu, input=%zu, network=%u\n",
240 job.name.c_str(),
241 i,
242 input.size,
243 tensor->bytes);
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200244 return true;
245 }
246
247 copy(static_cast<char *>(input.data), static_cast<char *>(input.data) + input.size, tensor->data.uint8);
248 }
249
250 // Run the inference
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200251 TfLiteStatus invoke_status = interpreter.Invoke();
252 if (invoke_status != kTfLiteOk) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200253 LOG_ERR("Invoke failed for inference: job=%s\n", job.name.c_str());
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200254 return true;
255 }
256
Anton Moberg07cf70b2021-07-07 11:08:17 +0200257 LOG("arena_used_bytes : %zu\n", interpreter.arena_used_bytes());
Bhavik Patelffe845d2020-11-16 12:13:56 +0100258
Anton Moberg07cf70b2021-07-07 11:08:17 +0200259 LOG("Inference runtime: %u cycles\n", (unsigned int)profiler.GetTotalTicks());
Bhavik Patelffe845d2020-11-16 12:13:56 +0100260
261 if (job.pmuCycleCounterEnable != 0) {
Kristofer Jonsson91f600c2021-02-10 11:29:52 +0100262 job.pmuCycleCounterCount = profiler.GetTotalTicks();
Bhavik Patelffe845d2020-11-16 12:13:56 +0100263 }
264
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200265 // Copy output data
266 if (job.output.size() > 0) {
267 if (interpreter.outputs_size() != job.output.size()) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200268 LOG_ERR("Output size mismatch: job=%zu, network=%u\n", job.output.size(), interpreter.outputs_size());
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200269 return true;
270 }
271
272 for (unsigned i = 0; i < interpreter.outputs_size(); ++i) {
273 if (copyOutput(*interpreter.output(i), job.output[i])) {
274 return true;
275 }
276 }
277 }
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200278
279 if (job.numBytesToPrint > 0) {
280 // Print all of the output data, or the first NUM_BYTES_TO_PRINT bytes,
281 // whichever comes first as well as the output shape.
Anton Moberg07cf70b2021-07-07 11:08:17 +0200282 LOG("num_of_outputs: %d\n", interpreter.outputs_size());
283 LOG("output_begin\n");
284 LOG("[\n");
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200285 for (unsigned int i = 0; i < interpreter.outputs_size(); i++) {
286 TfLiteTensor *output = interpreter.output(i);
287 print_output_data(output, job.numBytesToPrint);
288 if (i != interpreter.outputs_size() - 1) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200289 LOG(",\n");
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200290 }
291 }
Anton Moberg07cf70b2021-07-07 11:08:17 +0200292 LOG("]\n");
293 LOG("output_end\n");
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200294 }
295
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200296 if (job.expectedOutput.size() > 0) {
297 if (job.expectedOutput.size() != interpreter.outputs_size()) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200298 LOG_ERR("Expected number of output tensors mismatch: job=%s, expected=%zu, network=%zu\n",
299 job.name.c_str(),
300 job.expectedOutput.size(),
301 interpreter.outputs_size());
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200302 return true;
303 }
304
305 for (unsigned int i = 0; i < interpreter.outputs_size(); i++) {
306 const DataPtr &expected = job.expectedOutput[i];
307 const TfLiteTensor *output = interpreter.output(i);
308
309 if (expected.size != output->bytes) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200310 LOG_ERR("Expected output tensor size mismatch: job=%s, index=%u, expected=%zu, network=%zu\n",
311 job.name.c_str(),
312 i,
313 expected.size,
314 output->bytes);
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200315 return true;
316 }
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200317
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200318 for (unsigned int j = 0; j < output->bytes; ++j) {
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200319 if (output->data.uint8[j] != static_cast<uint8_t *>(expected.data)[j]) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200320 LOG_ERR("Expected output tensor data mismatch: job=%s, index=%u, offset=%u, "
321 "expected=%02x, network=%02x\n",
322 job.name.c_str(),
323 i,
324 j,
325 static_cast<uint8_t *>(expected.data)[j],
326 output->data.uint8[j]);
Per Åstrand90455452021-02-25 11:10:08 +0100327 return true;
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200328 }
329 }
330 }
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200331 }
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200332
Anton Moberg07cf70b2021-07-07 11:08:17 +0200333 LOG_INFO("Finished running job: %s\n", job.name.c_str());
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200334
335 return false;
Anton Moberg07cf70b2021-07-07 11:08:17 +0200336} // namespace InferenceProcess
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200337
338bool InferenceProcess::run(bool exitOnEmpty) {
339 bool anyJobFailed = false;
340
341 while (true) {
342 getLock();
343 bool empty = inferenceJobQueue.empty();
344 freeLock();
345
346 if (empty) {
347 if (exitOnEmpty) {
Anton Moberg07cf70b2021-07-07 11:08:17 +0200348 LOG_INFO("Exit from InferenceProcess::run() due to empty job queue\n");
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200349 break;
350 }
351
352 continue;
353 }
354
355 getLock();
356 InferenceJob job = inferenceJobQueue.front();
357 inferenceJobQueue.pop();
358 freeLock();
359
360 if (runJob(job)) {
361 anyJobFailed = true;
362 continue;
363 }
364 }
365
366 return anyJobFailed;
367}
368
369} // namespace InferenceProcess