Inference process update

Split runJob() into smaller functions to make the code easier to
read and maintain.

Correct return type of Layer By Layer profiler GetTotalTicks().

Change-Id: Ie414522017b3b6c6da9a09182439dbe4b2efdb1f
diff --git a/applications/inference_process/include/inference_process.hpp b/applications/inference_process/include/inference_process.hpp
index b8b2775..6ab453c 100644
--- a/applications/inference_process/include/inference_process.hpp
+++ b/applications/inference_process/include/inference_process.hpp
@@ -24,6 +24,14 @@
 #include <string>
 #include <vector>
 
+struct TfLiteTensor;
+
+namespace tflite {
+// Forward declarations
+class MicroInterpreter;
+class MicroResourceVariables;
+} // namespace tflite
+
 namespace InferenceProcess {
 struct DataPtr {
     void *data;
@@ -33,6 +41,9 @@
 
     void invalidate();
     void clean();
+
+    char *begin() const;
+    char *end() const;
 };
 
 struct InferenceJob {
@@ -68,6 +79,13 @@
     bool runJob(InferenceJob &job);
 
 private:
+    static bool copyIfm(InferenceJob &job, tflite::MicroInterpreter &interpreter);
+    static bool copyOfm(InferenceJob &job, tflite::MicroInterpreter &interpreter);
+    static bool compareOfm(InferenceJob &job, tflite::MicroInterpreter &interpreter);
+    static void printJob(InferenceJob &job, tflite::MicroInterpreter &interpreter);
+    static void printOutputTensor(TfLiteTensor *output, size_t bytesToPrint);
+    static void tfluDebugLog(const char *s);
+
     uint8_t *tensorArena;
     const size_t tensorArenaSize;
 };