PMU multi NPU support

Update sample applications to use an external context for the PMU
configuration. The external context stored in the InferenceJob will
be set as TFLu external context and will be returned in the
ethosu_inference_begin() and ethosu_inference_end() callbacks.

Change-Id: Ief1f0943e322c2b50e8b964017af59161f67de6b
diff --git a/applications/baremetal/main.cpp b/applications/baremetal/main.cpp
index ea5f03c..e9b398a 100644
--- a/applications/baremetal/main.cpp
+++ b/applications/baremetal/main.cpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021 Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2022 Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: Apache-2.0
  *
@@ -39,6 +39,7 @@
 #endif
 
 using namespace std;
+using namespace InferenceProcess;
 
 /****************************************************************************
  * InferenceJob
@@ -50,7 +51,7 @@
 
 __attribute__((section(".bss.tensor_arena"), aligned(16))) uint8_t TFLuTensorArena[TENSOR_ARENA_SIZE];
 
-InferenceProcess::InferenceProcess inferenceProcess(TFLuTensorArena, TENSOR_ARENA_SIZE);
+class InferenceProcess inferenceProcess(TFLuTensorArena, TENSOR_ARENA_SIZE);
 
 uint8_t outputData[sizeof(expectedOutputData)] __attribute__((aligned(16), section("output_data_sec")));
 
@@ -97,7 +98,7 @@
     ethosuMonitor.monitorSample(ethosuDrv);
 }
 
-void ethosu_inference_begin(struct ethosu_driver *drv, const void *) {
+void ethosu_inference_begin(struct ethosu_driver *drv, void *) {
     ethosuDrv = drv;
     ethosuMonitor.configure(drv, pmuEventConfig);
 
@@ -105,7 +106,7 @@
     SysTick_Config(delayMs);
 }
 
-void ethosu_inference_end(struct ethosu_driver *drv, const void *) {
+void ethosu_inference_end(struct ethosu_driver *drv, void *) {
     // Disable polling
     SysTick->CTRL = 0;
 
@@ -117,25 +118,13 @@
 #endif
 
 int runInference() {
-    // Load inference data
-    vector<InferenceProcess::DataPtr> input;
-    input.push_back(InferenceProcess::DataPtr(inputData, sizeof(inputData)));
-
-    vector<InferenceProcess::DataPtr> output;
-    output.push_back(InferenceProcess::DataPtr(outputData, sizeof(outputData)));
-
-    vector<InferenceProcess::DataPtr> expected;
-    expected.push_back(InferenceProcess::DataPtr(expectedOutputData, sizeof(expectedOutputData)));
-
     // Create job
-    InferenceProcess::InferenceJob job(string(modelName),
-                                       InferenceProcess::DataPtr(networkModelData, sizeof(networkModelData)),
-                                       input,
-                                       output,
-                                       expected,
-                                       512,
-                                       std::vector<uint8_t>(4),
-                                       false);
+    InferenceJob job(string(modelName),
+                     DataPtr(networkModelData, sizeof(networkModelData)),
+                     {DataPtr(inputData, sizeof(inputData))},
+                     {DataPtr(outputData, sizeof(outputData))},
+                     {DataPtr(expectedOutputData, sizeof(expectedOutputData))},
+                     512);
 
     // Run job
     bool failed = inferenceProcess.runJob(job);