Support inferences with multiple inputs and outputs

Update inference process apis to support inferences with multiple inputs
and multiple outputs.

Update message process to handle new inference request message with an
array of input- and output buffers.

Change-Id: Ide0897385a1d829f58edace79140d01d8e3b85a3
diff --git a/applications/message_process/include/message_process.hpp b/applications/message_process/include/message_process.hpp
index 8044f7c..51f474d 100644
--- a/applications/message_process/include/message_process.hpp
+++ b/applications/message_process/include/message_process.hpp
@@ -24,6 +24,7 @@
 
 #include <cstddef>
 #include <cstdio>
+#include <vector>
 
 namespace MessageProcess {
 
@@ -77,7 +78,7 @@
     void handleIrq();
     bool handleMessage();
     void sendPong();
-    void sendInferenceRsp(uint64_t userArg, size_t ofmSize, bool failed);
+    void sendInferenceRsp(uint64_t userArg, std::vector<InferenceProcess::DataPtr> &ofm, bool failed);
 
 private:
     QueueImpl queueIn;