MLECO-3174: Minor refactoring to implemented use case APIS

Looks large but it is mainly just many small adjustments
Removed the inference runner code as it wasn't used
Fixes to doc strings
Consistent naming e.g. Asr/Kws instead of ASR/KWS

Signed-off-by: Richard Burton <richard.burton@arm.com>
Change-Id: I43b620b5c51d7910a29a63b509ac4d8a82c3a8fc
diff --git a/source/use_case/vww/include/VisualWakeWordProcessing.hpp b/source/use_case/vww/include/VisualWakeWordProcessing.hpp
index b1d68ce..bef161f 100644
--- a/source/use_case/vww/include/VisualWakeWordProcessing.hpp
+++ b/source/use_case/vww/include/VisualWakeWordProcessing.hpp
@@ -34,9 +34,9 @@
     public:
         /**
          * @brief       Constructor
-         * @param[in]   model   Pointer to the the Image classification Model object.
+         * @param[in]   inputTensor   Pointer to the TFLite Micro input Tensor.
          **/
-        explicit VisualWakeWordPreProcess(Model* model);
+        explicit VisualWakeWordPreProcess(TfLiteTensor* inputTensor);
 
         /**
          * @brief       Should perform pre-processing of 'raw' input image data and load it into
@@ -46,6 +46,9 @@
          * @return      true if successful, false otherwise.
          **/
         bool DoPreProcess(const void* input, size_t inputSize) override;
+
+    private:
+        TfLiteTensor* m_inputTensor;
     };
 
     /**
@@ -56,6 +59,7 @@
     class VisualWakeWordPostProcess : public BasePostProcess {
 
     private:
+        TfLiteTensor* m_outputTensor;
         Classifier& m_vwwClassifier;
         const std::vector<std::string>& m_labels;
         std::vector<ClassificationResult>& m_results;
@@ -63,19 +67,20 @@
     public:
         /**
          * @brief       Constructor
-         * @param[in]   classifier   Classifier object used to get top N results from classification.
-         * @param[in]   model        Pointer to the VWW classification Model object.
-         * @param[in]   labels       Vector of string labels to identify each output of the model.
-         * @param[out]  results      Vector of classification results to store decoded outputs.
+         * @param[in]   outputTensor   Pointer to the TFLite Micro output Tensor.
+         * @param[in]   classifier     Classifier object used to get top N results from classification.
+         * @param[in]   model          Pointer to the VWW classification Model object.
+         * @param[in]   labels         Vector of string labels to identify each output of the model.
+         * @param[out]  results        Vector of classification results to store decoded outputs.
          **/
-        VisualWakeWordPostProcess(Classifier& classifier, Model* model,
+        VisualWakeWordPostProcess(TfLiteTensor* outputTensor, Classifier& classifier,
                 const std::vector<std::string>& labels,
                 std::vector<ClassificationResult>& results);
 
         /**
-         * @brief       Should perform post-processing of the result of inference then
-         *              populate classification result data for any later use.
-         * @return      true if successful, false otherwise.
+         * @brief    Should perform post-processing of the result of inference then
+         *           populate classification result data for any later use.
+         * @return   true if successful, false otherwise.
          **/
         bool DoPostProcess() override;
     };