Make inference process reusable

Private members are protected and run job is virtual to make it more
flexible to customize running jobs.

Change-Id: I2e4a0760000a451eead062ef758b467ea248dffe
diff --git a/applications/inference_process/include/inference_process.hpp b/applications/inference_process/include/inference_process.hpp
index f8d7fd8..9bc0b56 100644
--- a/applications/inference_process/include/inference_process.hpp
+++ b/applications/inference_process/include/inference_process.hpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2019-2022 Arm Limited. All rights reserved.
+ * SPDX-FileCopyrightText: Copyright 2019-2023 Arm Limited and/or its affiliates <open-source-office@arm.com>
  *
  * SPDX-License-Identifier: Apache-2.0
  *
@@ -75,9 +75,9 @@
 public:
     InferenceProcess(uint8_t *_tensorArena, size_t _tensorArenaSize);
 
-    bool runJob(InferenceJob &job);
+    virtual bool runJob(InferenceJob &job);
 
-private:
+protected:
     static bool copyIfm(InferenceJob &job, tflite::MicroInterpreter &interpreter);
     static bool copyOfm(InferenceJob &job, tflite::MicroInterpreter &interpreter);
     static bool compareOfm(InferenceJob &job, tflite::MicroInterpreter &interpreter);