Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 1 | /* |
Per Åstrand | 9045545 | 2021-02-25 11:10:08 +0100 | [diff] [blame] | 2 | * Copyright (c) 2019-2021 Arm Limited. All rights reserved. |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 3 | * |
| 4 | * SPDX-License-Identifier: Apache-2.0 |
| 5 | * |
| 6 | * Licensed under the Apache License, Version 2.0 (the License); you may |
| 7 | * not use this file except in compliance with the License. |
| 8 | * You may obtain a copy of the License at |
| 9 | * |
| 10 | * www.apache.org/licenses/LICENSE-2.0 |
| 11 | * |
| 12 | * Unless required by applicable law or agreed to in writing, software |
| 13 | * distributed under the License is distributed on an AS IS BASIS, WITHOUT |
| 14 | * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 15 | * See the License for the specific language governing permissions and |
| 16 | * limitations under the License. |
| 17 | */ |
| 18 | |
| 19 | #include "tensorflow/lite/micro/all_ops_resolver.h" |
Måns Nilsson | 231e1d9 | 2020-11-05 12:19:34 +0100 | [diff] [blame] | 20 | #include "tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h" |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 21 | #include "tensorflow/lite/micro/micro_error_reporter.h" |
| 22 | #include "tensorflow/lite/micro/micro_interpreter.h" |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 23 | #include "tensorflow/lite/micro/micro_profiler.h" |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 24 | #include "tensorflow/lite/schema/schema_generated.h" |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 25 | |
Jens Elofsson | 955288a | 2021-04-22 20:57:15 +0200 | [diff] [blame] | 26 | #include "arm_profiler.hpp" |
| 27 | #ifdef ETHOSU |
Jens Elofsson | 701a63b | 2021-05-23 17:37:07 +0200 | [diff] [blame] | 28 | #include "layer_by_layer_profiler.hpp" |
Jens Elofsson | 955288a | 2021-04-22 20:57:15 +0200 | [diff] [blame] | 29 | #endif |
| 30 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 31 | #include "inference_process.hpp" |
| 32 | |
Per Åstrand | d9afc08 | 2020-10-06 13:25:08 +0200 | [diff] [blame] | 33 | #include "cmsis_compiler.h" |
| 34 | |
Per Åstrand | 91a9173 | 2020-09-25 15:04:26 +0200 | [diff] [blame] | 35 | #include <inttypes.h> |
| 36 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 37 | using namespace std; |
| 38 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 39 | namespace { |
Måns Nilsson | 231e1d9 | 2020-11-05 12:19:34 +0100 | [diff] [blame] | 40 | |
| 41 | void tflu_debug_log(const char *s) { |
| 42 | fprintf(stderr, "%s", s); |
| 43 | } |
| 44 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 45 | void print_output_data(TfLiteTensor *output, size_t bytesToPrint) { |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 46 | const int numBytesToPrint = min(output->bytes, bytesToPrint); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 47 | |
| 48 | int dims_size = output->dims->size; |
| 49 | printf("{\n"); |
| 50 | printf("\"dims\": [%d,", dims_size); |
| 51 | for (int i = 0; i < output->dims->size - 1; ++i) { |
| 52 | printf("%d,", output->dims->data[i]); |
| 53 | } |
| 54 | printf("%d],\n", output->dims->data[dims_size - 1]); |
| 55 | |
Per Åstrand | 91a9173 | 2020-09-25 15:04:26 +0200 | [diff] [blame] | 56 | printf("\"data_address\": \"%08" PRIx32 "\",\n", (uint32_t)output->data.data); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 57 | printf("\"data\":\""); |
| 58 | for (int i = 0; i < numBytesToPrint - 1; ++i) { |
| 59 | if (i % 16 == 0 && i != 0) { |
| 60 | printf("\n"); |
| 61 | } |
| 62 | printf("0x%02x,", output->data.uint8[i]); |
| 63 | } |
| 64 | printf("0x%02x\"\n", output->data.uint8[numBytesToPrint - 1]); |
| 65 | printf("}"); |
| 66 | } |
| 67 | |
| 68 | bool copyOutput(const TfLiteTensor &src, InferenceProcess::DataPtr &dst) { |
| 69 | if (dst.data == nullptr) { |
| 70 | return false; |
| 71 | } |
| 72 | |
| 73 | if (src.bytes > dst.size) { |
| 74 | printf("Tensor size %d does not match output size %d.\n", src.bytes, dst.size); |
| 75 | return true; |
| 76 | } |
| 77 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 78 | copy(src.data.uint8, src.data.uint8 + src.bytes, static_cast<uint8_t *>(dst.data)); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 79 | dst.size = src.bytes; |
| 80 | |
| 81 | return false; |
| 82 | } |
| 83 | |
| 84 | } // namespace |
| 85 | |
| 86 | namespace InferenceProcess { |
Per Åstrand | bbd9c8f | 2020-09-25 15:07:35 +0200 | [diff] [blame] | 87 | DataPtr::DataPtr(void *_data, size_t _size) : data(_data), size(_size) {} |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 88 | |
Kristofer Jonsson | 34e2496 | 2020-11-23 16:22:10 +0100 | [diff] [blame] | 89 | void DataPtr::invalidate() { |
| 90 | #if defined(__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U) |
Kristofer Jonsson | 34e2496 | 2020-11-23 16:22:10 +0100 | [diff] [blame] | 91 | SCB_InvalidateDCache_by_Addr(reinterpret_cast<uint32_t *>(data), size); |
| 92 | #endif |
| 93 | } |
| 94 | |
| 95 | void DataPtr::clean() { |
| 96 | #if defined(__DCACHE_PRESENT) && (__DCACHE_PRESENT == 1U) |
Kristofer Jonsson | 34e2496 | 2020-11-23 16:22:10 +0100 | [diff] [blame] | 97 | SCB_CleanDCache_by_Addr(reinterpret_cast<uint32_t *>(data), size); |
| 98 | #endif |
| 99 | } |
| 100 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 101 | InferenceJob::InferenceJob() : numBytesToPrint(0) {} |
| 102 | |
Per Åstrand | bbd9c8f | 2020-09-25 15:07:35 +0200 | [diff] [blame] | 103 | InferenceJob::InferenceJob(const string &_name, |
| 104 | const DataPtr &_networkModel, |
| 105 | const vector<DataPtr> &_input, |
| 106 | const vector<DataPtr> &_output, |
| 107 | const vector<DataPtr> &_expectedOutput, |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 108 | size_t _numBytesToPrint, |
| 109 | const vector<uint8_t> &_pmuEventConfig, |
Bhavik Patel | 97906eb | 2020-12-17 15:32:16 +0100 | [diff] [blame] | 110 | const uint32_t _pmuCycleCounterEnable) : |
Per Åstrand | bbd9c8f | 2020-09-25 15:07:35 +0200 | [diff] [blame] | 111 | name(_name), |
| 112 | networkModel(_networkModel), input(_input), output(_output), expectedOutput(_expectedOutput), |
Bhavik Patel | 97906eb | 2020-12-17 15:32:16 +0100 | [diff] [blame] | 113 | numBytesToPrint(_numBytesToPrint), pmuEventConfig(_pmuEventConfig), pmuCycleCounterEnable(_pmuCycleCounterEnable), |
Jens Elofsson | de044c3 | 2021-05-06 16:21:29 +0200 | [diff] [blame] | 114 | pmuEventCount(), pmuCycleCounterCount(0) {} |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 115 | |
Kristofer Jonsson | 34e2496 | 2020-11-23 16:22:10 +0100 | [diff] [blame] | 116 | void InferenceJob::invalidate() { |
| 117 | networkModel.invalidate(); |
| 118 | |
| 119 | for (auto &it : input) { |
| 120 | it.invalidate(); |
| 121 | } |
| 122 | |
| 123 | for (auto &it : output) { |
| 124 | it.invalidate(); |
| 125 | } |
| 126 | |
| 127 | for (auto &it : expectedOutput) { |
| 128 | it.invalidate(); |
| 129 | } |
| 130 | } |
| 131 | |
| 132 | void InferenceJob::clean() { |
| 133 | networkModel.clean(); |
| 134 | |
| 135 | for (auto &it : input) { |
| 136 | it.clean(); |
| 137 | } |
| 138 | |
| 139 | for (auto &it : output) { |
| 140 | it.clean(); |
| 141 | } |
| 142 | |
| 143 | for (auto &it : expectedOutput) { |
| 144 | it.clean(); |
| 145 | } |
| 146 | } |
| 147 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 148 | // NOTE: Adding code for get_lock & free_lock with some corrections from |
| 149 | // http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dai0321a/BIHEJCHB.html |
| 150 | // TODO: check correctness? |
| 151 | void InferenceProcess::getLock() { |
| 152 | int status = 0; |
| 153 | |
| 154 | do { |
| 155 | // Wait until lock_var is free |
| 156 | while (__LDREXW(&lock) != 0) |
| 157 | ; |
| 158 | |
| 159 | // Try to set lock_var |
| 160 | status = __STREXW(1, &lock); |
| 161 | } while (status != 0); |
| 162 | |
| 163 | // Do not start any other memory access until memory barrier is completed |
| 164 | __DMB(); |
| 165 | } |
| 166 | |
| 167 | // TODO: check correctness? |
| 168 | void InferenceProcess::freeLock() { |
| 169 | // Ensure memory operations completed before releasing lock |
| 170 | __DMB(); |
| 171 | |
| 172 | lock = 0; |
| 173 | } |
| 174 | |
| 175 | bool InferenceProcess::push(const InferenceJob &job) { |
| 176 | getLock(); |
| 177 | inferenceJobQueue.push(job); |
| 178 | freeLock(); |
| 179 | |
| 180 | return true; |
| 181 | } |
| 182 | |
| 183 | bool InferenceProcess::runJob(InferenceJob &job) { |
| 184 | printf("Running inference job: %s\n", job.name.c_str()); |
| 185 | |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 186 | // Register debug log callback for profiling |
| 187 | RegisterDebugLogCallback(tflu_debug_log); |
| 188 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 189 | tflite::MicroErrorReporter microErrorReporter; |
| 190 | tflite::ErrorReporter *reporter = µErrorReporter; |
| 191 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 192 | // Get model handle and verify that the version is correct |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 193 | const tflite::Model *model = ::tflite::GetModel(job.networkModel.data); |
| 194 | if (model->version() != TFLITE_SCHEMA_VERSION) { |
Per Åstrand | 91a9173 | 2020-09-25 15:04:26 +0200 | [diff] [blame] | 195 | printf("Model provided is schema version %" PRIu32 " not equal to supported version %d.\n", |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 196 | model->version(), |
| 197 | TFLITE_SCHEMA_VERSION); |
| 198 | return true; |
| 199 | } |
| 200 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 201 | // Create the TFL micro interpreter |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 202 | tflite::AllOpsResolver resolver; |
Jens Elofsson | 955288a | 2021-04-22 20:57:15 +0200 | [diff] [blame] | 203 | #ifdef ETHOSU |
Jens Elofsson | 701a63b | 2021-05-23 17:37:07 +0200 | [diff] [blame] | 204 | tflite::LayerByLayerProfiler profiler; |
Jens Elofsson | 955288a | 2021-04-22 20:57:15 +0200 | [diff] [blame] | 205 | #else |
| 206 | tflite::ArmProfiler profiler; |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 207 | #endif |
Jens Elofsson | de044c3 | 2021-05-06 16:21:29 +0200 | [diff] [blame] | 208 | |
Anton Moberg | 66ed182 | 2021-02-10 08:49:28 +0100 | [diff] [blame] | 209 | tflite::MicroInterpreter interpreter(model, resolver, tensorArena, tensorArenaSize, reporter, &profiler); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 210 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 211 | // Allocate tensors |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 212 | TfLiteStatus allocate_status = interpreter.AllocateTensors(); |
| 213 | if (allocate_status != kTfLiteOk) { |
| 214 | printf("AllocateTensors failed for inference job: %s\n", job.name.c_str()); |
| 215 | return true; |
| 216 | } |
| 217 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 218 | // Create a filtered list of non empty input tensors |
| 219 | vector<TfLiteTensor *> inputTensors; |
| 220 | for (size_t i = 0; i < interpreter.inputs_size(); ++i) { |
| 221 | TfLiteTensor *tensor = interpreter.input(i); |
| 222 | |
| 223 | if (tensor->bytes > 0) { |
| 224 | inputTensors.push_back(tensor); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 225 | } |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 226 | } |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 227 | if (job.input.size() != inputTensors.size()) { |
| 228 | printf("Number of input buffers does not match number of non empty network tensors. input=%zu, network=%zu\n", |
| 229 | job.input.size(), |
| 230 | inputTensors.size()); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 231 | return true; |
| 232 | } |
| 233 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 234 | // Copy input data |
| 235 | for (size_t i = 0; i < inputTensors.size(); ++i) { |
| 236 | const DataPtr &input = job.input[i]; |
| 237 | const TfLiteTensor *tensor = inputTensors[i]; |
| 238 | |
| 239 | if (input.size != tensor->bytes) { |
| 240 | printf("Input size does not match network size. job=%s, index=%zu, input=%zu, network=%u\n", |
| 241 | job.name.c_str(), |
| 242 | i, |
| 243 | input.size, |
| 244 | tensor->bytes); |
| 245 | return true; |
| 246 | } |
| 247 | |
| 248 | copy(static_cast<char *>(input.data), static_cast<char *>(input.data) + input.size, tensor->data.uint8); |
| 249 | } |
| 250 | |
| 251 | // Run the inference |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 252 | TfLiteStatus invoke_status = interpreter.Invoke(); |
| 253 | if (invoke_status != kTfLiteOk) { |
| 254 | printf("Invoke failed for inference job: %s\n", job.name.c_str()); |
| 255 | return true; |
| 256 | } |
| 257 | |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 258 | printf("%s : %zu\r\n", "arena_used_bytes", interpreter.arena_used_bytes()); |
| 259 | |
Kristofer Jonsson | 91f600c | 2021-02-10 11:29:52 +0100 | [diff] [blame] | 260 | printf("Inference runtime: %u cycles\r\n", (unsigned int)profiler.GetTotalTicks()); |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 261 | |
| 262 | if (job.pmuCycleCounterEnable != 0) { |
Kristofer Jonsson | 91f600c | 2021-02-10 11:29:52 +0100 | [diff] [blame] | 263 | job.pmuCycleCounterCount = profiler.GetTotalTicks(); |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 264 | } |
| 265 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 266 | // Copy output data |
| 267 | if (job.output.size() > 0) { |
| 268 | if (interpreter.outputs_size() != job.output.size()) { |
| 269 | printf("Number of outputs mismatch. job=%zu, network=%u\n", job.output.size(), interpreter.outputs_size()); |
| 270 | return true; |
| 271 | } |
| 272 | |
| 273 | for (unsigned i = 0; i < interpreter.outputs_size(); ++i) { |
| 274 | if (copyOutput(*interpreter.output(i), job.output[i])) { |
| 275 | return true; |
| 276 | } |
| 277 | } |
| 278 | } |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 279 | |
| 280 | if (job.numBytesToPrint > 0) { |
| 281 | // Print all of the output data, or the first NUM_BYTES_TO_PRINT bytes, |
| 282 | // whichever comes first as well as the output shape. |
| 283 | printf("num_of_outputs: %d\n", interpreter.outputs_size()); |
| 284 | printf("output_begin\n"); |
| 285 | printf("[\n"); |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 286 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 287 | for (unsigned int i = 0; i < interpreter.outputs_size(); i++) { |
| 288 | TfLiteTensor *output = interpreter.output(i); |
| 289 | print_output_data(output, job.numBytesToPrint); |
| 290 | if (i != interpreter.outputs_size() - 1) { |
| 291 | printf(",\n"); |
| 292 | } |
| 293 | } |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 294 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 295 | printf("]\n"); |
| 296 | printf("output_end\n"); |
| 297 | } |
| 298 | |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 299 | if (job.expectedOutput.size() > 0) { |
| 300 | if (job.expectedOutput.size() != interpreter.outputs_size()) { |
Bhavik Patel | ffe845d | 2020-11-16 12:13:56 +0100 | [diff] [blame] | 301 | printf("Expected number of output tensors does not match network. job=%s, expected=%zu, network=%zu\n", |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 302 | job.name.c_str(), |
| 303 | job.expectedOutput.size(), |
| 304 | interpreter.outputs_size()); |
| 305 | return true; |
| 306 | } |
| 307 | |
| 308 | for (unsigned int i = 0; i < interpreter.outputs_size(); i++) { |
| 309 | const DataPtr &expected = job.expectedOutput[i]; |
| 310 | const TfLiteTensor *output = interpreter.output(i); |
| 311 | |
| 312 | if (expected.size != output->bytes) { |
Per Åstrand | 9045545 | 2021-02-25 11:10:08 +0100 | [diff] [blame] | 313 | printf("Expected tensor size does not match output size. job=%s, index=%u, expected=%zu, network=%zu\n", |
| 314 | job.name.c_str(), |
| 315 | i, |
| 316 | expected.size, |
| 317 | output->bytes); |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 318 | return true; |
| 319 | } |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 320 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 321 | for (unsigned int j = 0; j < output->bytes; ++j) { |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 322 | if (output->data.uint8[j] != static_cast<uint8_t *>(expected.data)[j]) { |
Per Åstrand | 9045545 | 2021-02-25 11:10:08 +0100 | [diff] [blame] | 323 | printf("Expected data does not match output data. job=%s, index=%u, offset=%u, " |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 324 | "expected=%02x, network=%02x\n", |
| 325 | job.name.c_str(), |
| 326 | i, |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 327 | j, |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 328 | static_cast<uint8_t *>(expected.data)[j], |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 329 | output->data.uint8[j]); |
Per Åstrand | 9045545 | 2021-02-25 11:10:08 +0100 | [diff] [blame] | 330 | return true; |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 331 | } |
| 332 | } |
| 333 | } |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 334 | } |
Kristofer Jonsson | 72fa50b | 2020-09-10 13:26:41 +0200 | [diff] [blame] | 335 | |
Kristofer Jonsson | 641c091 | 2020-08-31 11:34:14 +0200 | [diff] [blame] | 336 | printf("Finished running job: %s\n", job.name.c_str()); |
| 337 | |
| 338 | return false; |
| 339 | } |
| 340 | |
| 341 | bool InferenceProcess::run(bool exitOnEmpty) { |
| 342 | bool anyJobFailed = false; |
| 343 | |
| 344 | while (true) { |
| 345 | getLock(); |
| 346 | bool empty = inferenceJobQueue.empty(); |
| 347 | freeLock(); |
| 348 | |
| 349 | if (empty) { |
| 350 | if (exitOnEmpty) { |
| 351 | printf("Exit from InferenceProcess::run() on empty job queue!\n"); |
| 352 | break; |
| 353 | } |
| 354 | |
| 355 | continue; |
| 356 | } |
| 357 | |
| 358 | getLock(); |
| 359 | InferenceJob job = inferenceJobQueue.front(); |
| 360 | inferenceJobQueue.pop(); |
| 361 | freeLock(); |
| 362 | |
| 363 | if (runJob(job)) { |
| 364 | anyJobFailed = true; |
| 365 | continue; |
| 366 | } |
| 367 | } |
| 368 | |
| 369 | return anyJobFailed; |
| 370 | } |
| 371 | |
| 372 | } // namespace InferenceProcess |