Support inferences with multiple inputs and outputs

Update inference process apis to support inferences with multiple inputs
and multiple outputs.

Update message process to handle new inference request message with an
array of input- and output buffers.

Change-Id: Ide0897385a1d829f58edace79140d01d8e3b85a3
diff --git a/applications/inference_process/include/inference_process.hpp b/applications/inference_process/include/inference_process.hpp
index a5fef2c..53b9331 100644
--- a/applications/inference_process/include/inference_process.hpp
+++ b/applications/inference_process/include/inference_process.hpp
@@ -21,6 +21,7 @@
 #include <queue>
 #include <stdlib.h>
 #include <string>
+#include <vector>
 
 namespace InferenceProcess {
 struct DataPtr {
@@ -33,17 +34,17 @@
 struct InferenceJob {
     std::string name;
     DataPtr networkModel;
-    DataPtr input;
-    DataPtr output;
-    DataPtr expectedOutput;
+    std::vector<DataPtr> input;
+    std::vector<DataPtr> output;
+    std::vector<DataPtr> expectedOutput;
     size_t numBytesToPrint;
 
     InferenceJob();
     InferenceJob(const std::string &name,
                  const DataPtr &networkModel,
-                 const DataPtr &input,
-                 const DataPtr &output,
-                 const DataPtr &expectedOutput,
+                 const std::vector<DataPtr> &input,
+                 const std::vector<DataPtr> &output,
+                 const std::vector<DataPtr> &expectedOutput,
                  size_t numBytesToPrint);
 };