Inference app: Call external context after allocate tensors

TFLM now mandates to set external context during prepare or invoke
state.

Change-Id: I87d3f386aac5c47b71a7abfb9a8c48ff0255fc67
diff --git a/applications/inference_process/src/inference_process.cpp b/applications/inference_process/src/inference_process.cpp
index a5bf642..3cbc8e0 100644
--- a/applications/inference_process/src/inference_process.cpp
+++ b/applications/inference_process/src/inference_process.cpp
@@ -158,11 +158,6 @@
     tflite::ArmProfiler profiler;
     tflite::MicroInterpreter interpreter(model, resolver, tensorArena, tensorArenaSize, nullptr, &profiler);
 
-    // Set external context
-    if (job.externalContext != nullptr) {
-        interpreter.SetMicroExternalContext(job.externalContext);
-    }
-
     // Allocate tensors
     TfLiteStatus status = interpreter.AllocateTensors();
     if (status != kTfLiteOk) {
@@ -170,6 +165,11 @@
         return true;
     }
 
+    // Set external context
+    if (job.externalContext != nullptr) {
+        interpreter.SetMicroExternalContext(job.externalContext);
+    }
+
     // Copy IFM data from job descriptor to TFLu arena
     if (copyIfm(job, interpreter)) {
         return true;