blob: ecf6144a119ded46568104fb85b51726080039f0 [file] [log] [blame]
Kristofer Jonsson641c0912020-08-31 11:34:14 +02001/*
2 * Copyright (c) 2019-2020 Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 *
6 * Licensed under the Apache License, Version 2.0 (the License); you may
7 * not use this file except in compliance with the License.
8 * You may obtain a copy of the License at
9 *
10 * www.apache.org/licenses/LICENSE-2.0
11 *
12 * Unless required by applicable law or agreed to in writing, software
13 * distributed under the License is distributed on an AS IS BASIS, WITHOUT
14 * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 * See the License for the specific language governing permissions and
16 * limitations under the License.
17 */
18
19#include "tensorflow/lite/micro/all_ops_resolver.h"
Måns Nilsson231e1d92020-11-05 12:19:34 +010020#include "tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h"
Kristofer Jonsson641c0912020-08-31 11:34:14 +020021#include "tensorflow/lite/micro/micro_error_reporter.h"
22#include "tensorflow/lite/micro/micro_interpreter.h"
23#include "tensorflow/lite/schema/schema_generated.h"
24#include "tensorflow/lite/version.h"
25
26#include "inference_process.hpp"
27
Per Åstrandd9afc082020-10-06 13:25:08 +020028#include "cmsis_compiler.h"
29
Per Åstrand91a91732020-09-25 15:04:26 +020030#include <inttypes.h>
31
Kristofer Jonsson641c0912020-08-31 11:34:14 +020032#ifndef TENSOR_ARENA_SIZE
33#define TENSOR_ARENA_SIZE (1024)
34#endif
35
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +020036using namespace std;
37
Kristofer Jonsson641c0912020-08-31 11:34:14 +020038__attribute__((section(".bss.NoInit"), aligned(16))) uint8_t inferenceProcessTensorArena[TENSOR_ARENA_SIZE];
39
40namespace {
Måns Nilsson231e1d92020-11-05 12:19:34 +010041
42void tflu_debug_log(const char *s) {
43 fprintf(stderr, "%s", s);
44}
45
Kristofer Jonsson641c0912020-08-31 11:34:14 +020046void print_output_data(TfLiteTensor *output, size_t bytesToPrint) {
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +020047 const int numBytesToPrint = min(output->bytes, bytesToPrint);
Kristofer Jonsson641c0912020-08-31 11:34:14 +020048
49 int dims_size = output->dims->size;
50 printf("{\n");
51 printf("\"dims\": [%d,", dims_size);
52 for (int i = 0; i < output->dims->size - 1; ++i) {
53 printf("%d,", output->dims->data[i]);
54 }
55 printf("%d],\n", output->dims->data[dims_size - 1]);
56
Per Åstrand91a91732020-09-25 15:04:26 +020057 printf("\"data_address\": \"%08" PRIx32 "\",\n", (uint32_t)output->data.data);
Kristofer Jonsson641c0912020-08-31 11:34:14 +020058 printf("\"data\":\"");
59 for (int i = 0; i < numBytesToPrint - 1; ++i) {
60 if (i % 16 == 0 && i != 0) {
61 printf("\n");
62 }
63 printf("0x%02x,", output->data.uint8[i]);
64 }
65 printf("0x%02x\"\n", output->data.uint8[numBytesToPrint - 1]);
66 printf("}");
67}
68
69bool copyOutput(const TfLiteTensor &src, InferenceProcess::DataPtr &dst) {
70 if (dst.data == nullptr) {
71 return false;
72 }
73
74 if (src.bytes > dst.size) {
75 printf("Tensor size %d does not match output size %d.\n", src.bytes, dst.size);
76 return true;
77 }
78
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +020079 copy(src.data.uint8, src.data.uint8 + src.bytes, static_cast<uint8_t *>(dst.data));
Kristofer Jonsson641c0912020-08-31 11:34:14 +020080 dst.size = src.bytes;
81
82 return false;
83}
84
85} // namespace
86
87namespace InferenceProcess {
Per Åstrandbbd9c8f2020-09-25 15:07:35 +020088DataPtr::DataPtr(void *_data, size_t _size) : data(_data), size(_size) {}
Kristofer Jonsson641c0912020-08-31 11:34:14 +020089
Kristofer Jonsson34e24962020-11-23 16:22:10 +010090void DataPtr::invalidate() {
91#if defined(__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
Kristofer Jonsson34e24962020-11-23 16:22:10 +010092 SCB_InvalidateDCache_by_Addr(reinterpret_cast<uint32_t *>(data), size);
93#endif
94}
95
96void DataPtr::clean() {
97#if defined(__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U)
Kristofer Jonsson34e24962020-11-23 16:22:10 +010098 SCB_CleanDCache_by_Addr(reinterpret_cast<uint32_t *>(data), size);
99#endif
100}
101
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200102InferenceJob::InferenceJob() : numBytesToPrint(0) {}
103
Per Åstrandbbd9c8f2020-09-25 15:07:35 +0200104InferenceJob::InferenceJob(const string &_name,
105 const DataPtr &_networkModel,
106 const vector<DataPtr> &_input,
107 const vector<DataPtr> &_output,
108 const vector<DataPtr> &_expectedOutput,
109 size_t _numBytesToPrint) :
110 name(_name),
111 networkModel(_networkModel), input(_input), output(_output), expectedOutput(_expectedOutput),
112 numBytesToPrint(_numBytesToPrint) {}
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200113
Kristofer Jonsson34e24962020-11-23 16:22:10 +0100114void InferenceJob::invalidate() {
115 networkModel.invalidate();
116
117 for (auto &it : input) {
118 it.invalidate();
119 }
120
121 for (auto &it : output) {
122 it.invalidate();
123 }
124
125 for (auto &it : expectedOutput) {
126 it.invalidate();
127 }
128}
129
130void InferenceJob::clean() {
131 networkModel.clean();
132
133 for (auto &it : input) {
134 it.clean();
135 }
136
137 for (auto &it : output) {
138 it.clean();
139 }
140
141 for (auto &it : expectedOutput) {
142 it.clean();
143 }
144}
145
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200146InferenceProcess::InferenceProcess() : lock(0) {}
147
148// NOTE: Adding code for get_lock & free_lock with some corrections from
149// http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dai0321a/BIHEJCHB.html
150// TODO: check correctness?
151void InferenceProcess::getLock() {
152 int status = 0;
153
154 do {
155 // Wait until lock_var is free
156 while (__LDREXW(&lock) != 0)
157 ;
158
159 // Try to set lock_var
160 status = __STREXW(1, &lock);
161 } while (status != 0);
162
163 // Do not start any other memory access until memory barrier is completed
164 __DMB();
165}
166
167// TODO: check correctness?
168void InferenceProcess::freeLock() {
169 // Ensure memory operations completed before releasing lock
170 __DMB();
171
172 lock = 0;
173}
174
175bool InferenceProcess::push(const InferenceJob &job) {
176 getLock();
177 inferenceJobQueue.push(job);
178 freeLock();
179
180 return true;
181}
182
183bool InferenceProcess::runJob(InferenceJob &job) {
184 printf("Running inference job: %s\n", job.name.c_str());
185
186 tflite::MicroErrorReporter microErrorReporter;
187 tflite::ErrorReporter *reporter = &microErrorReporter;
188
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200189 // Get model handle and verify that the version is correct
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200190 const tflite::Model *model = ::tflite::GetModel(job.networkModel.data);
191 if (model->version() != TFLITE_SCHEMA_VERSION) {
Per Åstrand91a91732020-09-25 15:04:26 +0200192 printf("Model provided is schema version %" PRIu32 " not equal to supported version %d.\n",
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200193 model->version(),
194 TFLITE_SCHEMA_VERSION);
195 return true;
196 }
197
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200198 // Create the TFL micro interpreter
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200199 tflite::AllOpsResolver resolver;
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200200 tflite::MicroInterpreter interpreter(model, resolver, inferenceProcessTensorArena, TENSOR_ARENA_SIZE, reporter);
201
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200202 // Allocate tensors
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200203 TfLiteStatus allocate_status = interpreter.AllocateTensors();
204 if (allocate_status != kTfLiteOk) {
205 printf("AllocateTensors failed for inference job: %s\n", job.name.c_str());
206 return true;
207 }
208
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200209 // Create a filtered list of non empty input tensors
210 vector<TfLiteTensor *> inputTensors;
211 for (size_t i = 0; i < interpreter.inputs_size(); ++i) {
212 TfLiteTensor *tensor = interpreter.input(i);
213
214 if (tensor->bytes > 0) {
215 inputTensors.push_back(tensor);
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200216 }
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200217 }
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200218
219 if (job.input.size() != inputTensors.size()) {
220 printf("Number of input buffers does not match number of non empty network tensors. input=%zu, network=%zu\n",
221 job.input.size(),
222 inputTensors.size());
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200223 return true;
224 }
225
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200226 // Copy input data
227 for (size_t i = 0; i < inputTensors.size(); ++i) {
228 const DataPtr &input = job.input[i];
229 const TfLiteTensor *tensor = inputTensors[i];
230
231 if (input.size != tensor->bytes) {
232 printf("Input size does not match network size. job=%s, index=%zu, input=%zu, network=%u\n",
233 job.name.c_str(),
234 i,
235 input.size,
236 tensor->bytes);
237 return true;
238 }
239
240 copy(static_cast<char *>(input.data), static_cast<char *>(input.data) + input.size, tensor->data.uint8);
241 }
242
Måns Nilsson231e1d92020-11-05 12:19:34 +0100243 // Register debug log callback for profiling
244 RegisterDebugLogCallback(tflu_debug_log);
245
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200246 // Run the inference
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200247 TfLiteStatus invoke_status = interpreter.Invoke();
248 if (invoke_status != kTfLiteOk) {
249 printf("Invoke failed for inference job: %s\n", job.name.c_str());
250 return true;
251 }
252
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200253 // Copy output data
254 if (job.output.size() > 0) {
255 if (interpreter.outputs_size() != job.output.size()) {
256 printf("Number of outputs mismatch. job=%zu, network=%u\n", job.output.size(), interpreter.outputs_size());
257 return true;
258 }
259
260 for (unsigned i = 0; i < interpreter.outputs_size(); ++i) {
261 if (copyOutput(*interpreter.output(i), job.output[i])) {
262 return true;
263 }
264 }
265 }
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200266
267 if (job.numBytesToPrint > 0) {
268 // Print all of the output data, or the first NUM_BYTES_TO_PRINT bytes,
269 // whichever comes first as well as the output shape.
270 printf("num_of_outputs: %d\n", interpreter.outputs_size());
271 printf("output_begin\n");
272 printf("[\n");
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200273
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200274 for (unsigned int i = 0; i < interpreter.outputs_size(); i++) {
275 TfLiteTensor *output = interpreter.output(i);
276 print_output_data(output, job.numBytesToPrint);
277 if (i != interpreter.outputs_size() - 1) {
278 printf(",\n");
279 }
280 }
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200281
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200282 printf("]\n");
283 printf("output_end\n");
284 }
285
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200286 if (job.expectedOutput.size() > 0) {
287 if (job.expectedOutput.size() != interpreter.outputs_size()) {
288 printf("Expeded number of output tensors does not match network. job=%s, expected=%zu, network=%zu\n",
289 job.name.c_str(),
290 job.expectedOutput.size(),
291 interpreter.outputs_size());
292 return true;
293 }
294
295 for (unsigned int i = 0; i < interpreter.outputs_size(); i++) {
296 const DataPtr &expected = job.expectedOutput[i];
297 const TfLiteTensor *output = interpreter.output(i);
298
299 if (expected.size != output->bytes) {
300 printf(
301 "Expected tensor size does not match network size. job=%s, index=%u, expected=%zu, network=%zu\n",
302 job.name.c_str(),
303 i,
304 expected.size,
305 output->bytes);
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200306 return true;
307 }
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200308
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200309 for (unsigned int j = 0; j < output->bytes; ++j) {
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200310 if (output->data.uint8[j] != static_cast<uint8_t *>(expected.data)[j]) {
311 printf("Expected tensor size does not match network size. job=%s, index=%u, offset=%u, "
312 "expected=%02x, network=%02x\n",
313 job.name.c_str(),
314 i,
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200315 j,
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200316 static_cast<uint8_t *>(expected.data)[j],
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200317 output->data.uint8[j]);
318 }
319 }
320 }
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200321 }
Kristofer Jonsson72fa50b2020-09-10 13:26:41 +0200322
Kristofer Jonsson641c0912020-08-31 11:34:14 +0200323 printf("Finished running job: %s\n", job.name.c_str());
324
325 return false;
326}
327
328bool InferenceProcess::run(bool exitOnEmpty) {
329 bool anyJobFailed = false;
330
331 while (true) {
332 getLock();
333 bool empty = inferenceJobQueue.empty();
334 freeLock();
335
336 if (empty) {
337 if (exitOnEmpty) {
338 printf("Exit from InferenceProcess::run() on empty job queue!\n");
339 break;
340 }
341
342 continue;
343 }
344
345 getLock();
346 InferenceJob job = inferenceJobQueue.front();
347 inferenceJobQueue.pop();
348 freeLock();
349
350 if (runJob(job)) {
351 anyJobFailed = true;
352 continue;
353 }
354 }
355
356 return anyJobFailed;
357}
358
359} // namespace InferenceProcess