Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 1 | /* |
Per Åstrand | 9045545 | 2021-02-25 11:10:08 +0100 | [diff] [blame] | 2 | * Copyright (c) 2019-2021 Arm Limited. All rights reserved. |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 3 | * |
| 4 | * SPDX-License-Identifier: Apache-2.0 |
| 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the License); you may |
| 7 | * not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an AS IS BASIS, WITHOUT |
| 14 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | */ |
| 18 | |
| 19 | #include "tensorflow/lite/micro/all_ops_resolver.h" |
Måns Nilsson | 231e1d9 | 2020-11-05 12:19:34 +0100 | [diff] [blame] | 20 | #include "tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h" |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 21 | #include "tensorflow/lite/micro/micro_error_reporter.h" |
| 22 | #include "tensorflow/lite/micro/micro_interpreter.h" |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 23 | #include "tensorflow/lite/micro/micro_profiler.h" |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 24 | #include "tensorflow/lite/schema/schema_generated.h" |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 25 | |
Jens Elofsson | 955288a | 2021-04-22 20:57:15 +0200 | [diff] [blame] | 26 | #include "arm_profiler.hpp" |
Kristofer Jonsson | 3bd3423 | 2021-08-30 13:55:55 +0200 | [diff] [blame] | 27 | #ifdef LAYER_BY_LAYER_PROFILER |
Jens Elofsson | 701a63b | 2021-05-23 17:37:07 +0200 | [diff] [blame] | 28 | #include "layer_by_layer_profiler.hpp" |
Jens Elofsson | 955288a | 2021-04-22 20:57:15 +0200 | [diff] [blame] | 29 | #endif |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 30 | #include "ethosu_log.h" |
Jens Elofsson | 955288a | 2021-04-22 20:57:15 +0200 | [diff] [blame] | 31 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 32 | #include "inference_process.hpp" |
| 33 | |
Per Åstrand | d9afc08 | 2020-10-06 13:25:08 +0200 | [diff] [blame] | 34 | #include "cmsis_compiler.h" |
| 35 | |
Per Åstrand | 91a9173 | 2020-09-25 15:04:26 +0200 | [diff] [blame] | 36 | #include <inttypes.h> |
| 37 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 38 | using namespace std; |
| 39 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 40 | namespace { |
Måns Nilsson | 231e1d9 | 2020-11-05 12:19:34 +0100 | [diff] [blame] | 41 | |
| 42 | void tflu_debug_log(const char *s) { |
Anton Moberg | e39d389 | 2021-08-17 11:45:18 +0200 | [diff] [blame] | 43 | LOG("%s", s); |
Måns Nilsson | 231e1d9 | 2020-11-05 12:19:34 +0100 | [diff] [blame] | 44 | } |
| 45 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 46 | void print_output_data(TfLiteTensor *output, size_t bytesToPrint) { |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 47 | const int numBytesToPrint = min(output->bytes, bytesToPrint); |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 48 | int dims_size = output->dims->size; |
| 49 | LOG("{\n"); |
| 50 | LOG("\"dims\": [%d,", dims_size); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 51 | for (int i = 0; i < output->dims->size - 1; ++i) { |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 52 | LOG("%d,", output->dims->data[i]); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 53 | } |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 54 | LOG("%d],\n", output->dims->data[dims_size - 1]); |
| 55 | LOG("\"data_address\": \"%08" PRIx32 "\",\n", (uint32_t)output->data.data); |
| 56 | LOG("\"data\":\""); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 57 | for (int i = 0; i < numBytesToPrint - 1; ++i) { |
Davide Grohmann | fa479e4 | 2021-08-11 13:23:09 +0200 | [diff] [blame] | 58 | /* |
| 59 | * Workaround an issue when compiling with GCC where by |
| 60 | * printing only a '\n' the produced global output is wrong. |
| 61 | */ |
| 62 | if (i % 15 == 0 && i != 0) { |
| 63 | LOG("0x%02x,\n", output->data.uint8[i]); |
| 64 | } else { |
| 65 | LOG("0x%02x,", output->data.uint8[i]); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 66 | } |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 67 | } |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 68 | LOG("0x%02x\"\n", output->data.uint8[numBytesToPrint - 1]); |
| 69 | LOG("}"); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 70 | } |
| 71 | |
| 72 | bool copyOutput(const TfLiteTensor &src, InferenceProcess::DataPtr &dst) { |
| 73 | if (dst.data == nullptr) { |
| 74 | return false; |
| 75 | } |
| 76 | |
| 77 | if (src.bytes > dst.size) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 78 | LOG_ERR("Tensor size mismatch (bytes): actual=%d, expected%d.", src.bytes, dst.size); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 79 | return true; |
| 80 | } |
| 81 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 82 | copy(src.data.uint8, src.data.uint8 + src.bytes, static_cast<uint8_t *>(dst.data)); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 83 | dst.size = src.bytes; |
| 84 | |
| 85 | return false; |
| 86 | } |
| 87 | |
| 88 | } // namespace |
| 89 | |
| 90 | namespace InferenceProcess { |
Per Åstrand | bbd9c8f | 2020-09-25 15:07:35 +0200 | [diff] [blame] | 91 | DataPtr::DataPtr(void *_data, size_t _size) : data(_data), size(_size) {} |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 92 | |
Kristofer Jonsson | 34e2496 | 2020-11-23 16:22:10 +0100 | [diff] [blame] | 93 | void DataPtr::invalidate() { |
| 94 | #if defined(__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U) |
Kristofer Jonsson | 34e2496 | 2020-11-23 16:22:10 +0100 | [diff] [blame] | 95 | SCB_InvalidateDCache_by_Addr(reinterpret_cast<uint32_t *>(data), size); |
| 96 | #endif |
| 97 | } |
| 98 | |
| 99 | void DataPtr::clean() { |
| 100 | #if defined(__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U) |
Kristofer Jonsson | 34e2496 | 2020-11-23 16:22:10 +0100 | [diff] [blame] | 101 | SCB_CleanDCache_by_Addr(reinterpret_cast<uint32_t *>(data), size); |
| 102 | #endif |
| 103 | } |
| 104 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 105 | InferenceJob::InferenceJob() : numBytesToPrint(0) {} |
| 106 | |
Per Åstrand | bbd9c8f | 2020-09-25 15:07:35 +0200 | [diff] [blame] | 107 | InferenceJob::InferenceJob(const string &_name, |
| 108 | const DataPtr &_networkModel, |
| 109 | const vector<DataPtr> &_input, |
| 110 | const vector<DataPtr> &_output, |
| 111 | const vector<DataPtr> &_expectedOutput, |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 112 | size_t _numBytesToPrint, |
| 113 | const vector<uint8_t> &_pmuEventConfig, |
Bhavik Patel | 97906eb | 2020-12-17 15:32:16 +0100 | [diff] [blame] | 114 | const uint32_t _pmuCycleCounterEnable) : |
Per Åstrand | bbd9c8f | 2020-09-25 15:07:35 +0200 | [diff] [blame] | 115 | name(_name), |
| 116 | networkModel(_networkModel), input(_input), output(_output), expectedOutput(_expectedOutput), |
Bhavik Patel | 97906eb | 2020-12-17 15:32:16 +0100 | [diff] [blame] | 117 | numBytesToPrint(_numBytesToPrint), pmuEventConfig(_pmuEventConfig), pmuCycleCounterEnable(_pmuCycleCounterEnable), |
Jens Elofsson | de044c3 | 2021-05-06 16:21:29 +0200 | [diff] [blame] | 118 | pmuEventCount(), pmuCycleCounterCount(0) {} |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 119 | |
Kristofer Jonsson | 34e2496 | 2020-11-23 16:22:10 +0100 | [diff] [blame] | 120 | void InferenceJob::invalidate() { |
| 121 | networkModel.invalidate(); |
| 122 | |
| 123 | for (auto &it : input) { |
| 124 | it.invalidate(); |
| 125 | } |
| 126 | |
| 127 | for (auto &it : output) { |
| 128 | it.invalidate(); |
| 129 | } |
| 130 | |
| 131 | for (auto &it : expectedOutput) { |
| 132 | it.invalidate(); |
| 133 | } |
| 134 | } |
| 135 | |
| 136 | void InferenceJob::clean() { |
| 137 | networkModel.clean(); |
| 138 | |
| 139 | for (auto &it : input) { |
| 140 | it.clean(); |
| 141 | } |
| 142 | |
| 143 | for (auto &it : output) { |
| 144 | it.clean(); |
| 145 | } |
| 146 | |
| 147 | for (auto &it : expectedOutput) { |
| 148 | it.clean(); |
| 149 | } |
| 150 | } |
| 151 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 152 | // NOTE: Adding code for get_lock & free_lock with some corrections from |
| 153 | // http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dai0321a/BIHEJCHB.html |
| 154 | // TODO: check correctness? |
| 155 | void InferenceProcess::getLock() { |
| 156 | int status = 0; |
| 157 | |
| 158 | do { |
| 159 | // Wait until lock_var is free |
| 160 | while (__LDREXW(&lock) != 0) |
| 161 | ; |
| 162 | |
| 163 | // Try to set lock_var |
| 164 | status = __STREXW(1, &lock); |
| 165 | } while (status != 0); |
| 166 | |
| 167 | // Do not start any other memory access until memory barrier is completed |
| 168 | __DMB(); |
| 169 | } |
| 170 | |
| 171 | // TODO: check correctness? |
| 172 | void InferenceProcess::freeLock() { |
| 173 | // Ensure memory operations completed before releasing lock |
| 174 | __DMB(); |
| 175 | |
| 176 | lock = 0; |
| 177 | } |
| 178 | |
| 179 | bool InferenceProcess::push(const InferenceJob &job) { |
| 180 | getLock(); |
| 181 | inferenceJobQueue.push(job); |
| 182 | freeLock(); |
| 183 | |
| 184 | return true; |
| 185 | } |
| 186 | |
| 187 | bool InferenceProcess::runJob(InferenceJob &job) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 188 | LOG_INFO("Running inference job: %s", job.name.c_str()); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 189 | |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 190 | // Register debug log callback for profiling |
| 191 | RegisterDebugLogCallback(tflu_debug_log); |
| 192 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 193 | tflite::MicroErrorReporter microErrorReporter; |
| 194 | tflite::ErrorReporter *reporter = µErrorReporter; |
| 195 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 196 | // Get model handle and verify that the version is correct |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 197 | const tflite::Model *model = ::tflite::GetModel(job.networkModel.data); |
| 198 | if (model->version() != TFLITE_SCHEMA_VERSION) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 199 | LOG_ERR("Model schema version unsupported: version=%" PRIu32 ", supported=%d.", |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 200 | model->version(), |
| 201 | TFLITE_SCHEMA_VERSION); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 202 | return true; |
| 203 | } |
| 204 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 205 | // Create the TFL micro interpreter |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 206 | tflite::AllOpsResolver resolver; |
Kristofer Jonsson | 3bd3423 | 2021-08-30 13:55:55 +0200 | [diff] [blame] | 207 | #ifdef LAYER_BY_LAYER_PROFILER |
Jens Elofsson | 701a63b | 2021-05-23 17:37:07 +0200 | [diff] [blame] | 208 | tflite::LayerByLayerProfiler profiler; |
Jens Elofsson | 955288a | 2021-04-22 20:57:15 +0200 | [diff] [blame] | 209 | #else |
| 210 | tflite::ArmProfiler profiler; |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 211 | #endif |
Jens Elofsson | de044c3 | 2021-05-06 16:21:29 +0200 | [diff] [blame] | 212 | |
Jens Elofsson | 97dde7e | 2021-09-08 16:20:08 +0200 | [diff] [blame] | 213 | tflite::MicroInterpreter interpreter(model, resolver, tensorArena, tensorArenaSize, reporter, nullptr, &profiler); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 214 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 215 | // Allocate tensors |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 216 | TfLiteStatus allocate_status = interpreter.AllocateTensors(); |
| 217 | if (allocate_status != kTfLiteOk) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 218 | LOG_ERR("Failed to allocate tensors for inference: job=%s", job.name.c_str()); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 219 | return true; |
| 220 | } |
| 221 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 222 | // Create a filtered list of non empty input tensors |
| 223 | vector<TfLiteTensor *> inputTensors; |
| 224 | for (size_t i = 0; i < interpreter.inputs_size(); ++i) { |
| 225 | TfLiteTensor *tensor = interpreter.input(i); |
| 226 | |
| 227 | if (tensor->bytes > 0) { |
| 228 | inputTensors.push_back(tensor); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 229 | } |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 230 | } |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 231 | if (job.input.size() != inputTensors.size()) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 232 | LOG_ERR("Number of input buffers does not match number of non empty network tensors: input=%zu, network=%zu", |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 233 | job.input.size(), |
| 234 | inputTensors.size()); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 235 | return true; |
| 236 | } |
| 237 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 238 | // Copy input data |
| 239 | for (size_t i = 0; i < inputTensors.size(); ++i) { |
| 240 | const DataPtr &input = job.input[i]; |
| 241 | const TfLiteTensor *tensor = inputTensors[i]; |
| 242 | |
| 243 | if (input.size != tensor->bytes) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 244 | LOG_ERR("Job input size does not match network input size: job=%s, index=%zu, input=%zu, network=%u", |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 245 | job.name.c_str(), |
| 246 | i, |
| 247 | input.size, |
| 248 | tensor->bytes); |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 249 | return true; |
| 250 | } |
| 251 | |
| 252 | copy(static_cast<char *>(input.data), static_cast<char *>(input.data) + input.size, tensor->data.uint8); |
| 253 | } |
| 254 | |
| 255 | // Run the inference |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 256 | TfLiteStatus invoke_status = interpreter.Invoke(); |
| 257 | if (invoke_status != kTfLiteOk) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 258 | LOG_ERR("Invoke failed for inference: job=%s", job.name.c_str()); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 259 | return true; |
| 260 | } |
| 261 | |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 262 | LOG("arena_used_bytes : %zu\n", interpreter.arena_used_bytes()); |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 263 | |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 264 | LOG("Inference runtime: %u cycles\n", (unsigned int)profiler.GetTotalTicks()); |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 265 | |
| 266 | if (job.pmuCycleCounterEnable != 0) { |
Kristofer Jonsson | 91f600c | 2021-02-10 11:29:52 +0100 | [diff] [blame] | 267 | job.pmuCycleCounterCount = profiler.GetTotalTicks(); |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 268 | } |
| 269 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 270 | // Copy output data |
| 271 | if (job.output.size() > 0) { |
| 272 | if (interpreter.outputs_size() != job.output.size()) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 273 | LOG_ERR("Output size mismatch: job=%zu, network=%u", job.output.size(), interpreter.outputs_size()); |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 274 | return true; |
| 275 | } |
| 276 | |
| 277 | for (unsigned i = 0; i < interpreter.outputs_size(); ++i) { |
| 278 | if (copyOutput(*interpreter.output(i), job.output[i])) { |
| 279 | return true; |
| 280 | } |
| 281 | } |
| 282 | } |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 283 | |
| 284 | if (job.numBytesToPrint > 0) { |
| 285 | // Print all of the output data, or the first NUM_BYTES_TO_PRINT bytes, |
| 286 | // whichever comes first as well as the output shape. |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 287 | LOG("num_of_outputs: %d\n", interpreter.outputs_size()); |
| 288 | LOG("output_begin\n"); |
| 289 | LOG("[\n"); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 290 | for (unsigned int i = 0; i < interpreter.outputs_size(); i++) { |
| 291 | TfLiteTensor *output = interpreter.output(i); |
| 292 | print_output_data(output, job.numBytesToPrint); |
| 293 | if (i != interpreter.outputs_size() - 1) { |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 294 | LOG(",\n"); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 295 | } |
| 296 | } |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 297 | LOG("]\n"); |
| 298 | LOG("output_end\n"); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 299 | } |
| 300 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 301 | if (job.expectedOutput.size() > 0) { |
| 302 | if (job.expectedOutput.size() != interpreter.outputs_size()) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 303 | LOG_ERR("Expected number of output tensors mismatch: job=%s, expected=%zu, network=%zu", |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 304 | job.name.c_str(), |
| 305 | job.expectedOutput.size(), |
| 306 | interpreter.outputs_size()); |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 307 | return true; |
| 308 | } |
| 309 | |
| 310 | for (unsigned int i = 0; i < interpreter.outputs_size(); i++) { |
| 311 | const DataPtr &expected = job.expectedOutput[i]; |
| 312 | const TfLiteTensor *output = interpreter.output(i); |
| 313 | |
| 314 | if (expected.size != output->bytes) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 315 | LOG_ERR("Expected output tensor size mismatch: job=%s, index=%u, expected=%zu, network=%zu", |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 316 | job.name.c_str(), |
| 317 | i, |
| 318 | expected.size, |
| 319 | output->bytes); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 320 | return true; |
| 321 | } |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 322 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 323 | for (unsigned int j = 0; j < output->bytes; ++j) { |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 324 | if (output->data.uint8[j] != static_cast<uint8_t *>(expected.data)[j]) { |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 325 | LOG_ERR("Expected output tensor data mismatch: job=%s, index=%u, offset=%u, " |
| 326 | "expected=%02x, network=%02x\n", |
| 327 | job.name.c_str(), |
| 328 | i, |
| 329 | j, |
| 330 | static_cast<uint8_t *>(expected.data)[j], |
| 331 | output->data.uint8[j]); |
Per Åstrand | 9045545 | 2021-02-25 11:10:08 +0100 | [diff] [blame] | 332 | return true; |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 333 | } |
| 334 | } |
| 335 | } |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 336 | } |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 337 | |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 338 | LOG_INFO("Finished running job: %s", job.name.c_str()); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 339 | |
| 340 | return false; |
Anton Moberg | 07cf70b | 2021-07-07 11:08:17 +0200 | [diff] [blame] | 341 | } // namespace InferenceProcess |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 342 | |
| 343 | bool InferenceProcess::run(bool exitOnEmpty) { |
| 344 | bool anyJobFailed = false; |
| 345 | |
| 346 | while (true) { |
| 347 | getLock(); |
| 348 | bool empty = inferenceJobQueue.empty(); |
| 349 | freeLock(); |
| 350 | |
| 351 | if (empty) { |
| 352 | if (exitOnEmpty) { |
Kristofer Jonsson | eb91239 | 2021-11-12 12:51:27 +0100 | [diff] [blame^] | 353 | LOG_INFO("Exit from InferenceProcess::run() due to empty job queue"); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 354 | break; |
| 355 | } |
| 356 | |
| 357 | continue; |
| 358 | } |
| 359 | |
| 360 | getLock(); |
| 361 | InferenceJob job = inferenceJobQueue.front(); |
| 362 | inferenceJobQueue.pop(); |
| 363 | freeLock(); |
| 364 | |
| 365 | if (runJob(job)) { |
| 366 | anyJobFailed = true; |
| 367 | continue; |
| 368 | } |
| 369 | } |
| 370 | |
| 371 | return anyJobFailed; |
| 372 | } |
| 373 | |
| 374 | } // namespace InferenceProcess |