IVGCVSW-3168 Refactor reference softmax workload into a single workload

Change-Id: Ie290efcbb9e3a6365cbd630cb2041e7b0f542505
Signed-off-by: nikraj01 <nikhil.raj@arm.com>
diff --git a/src/backends/reference/workloads/Softmax.cpp b/src/backends/reference/workloads/Softmax.cpp
index 4f1016e..6cb219a 100644
--- a/src/backends/reference/workloads/Softmax.cpp
+++ b/src/backends/reference/workloads/Softmax.cpp
@@ -12,16 +12,19 @@
 {
 
 /// Computes the softmax function on some inputs, into outputs, with a shape given by tensorInfo.
-void Softmax(const float* in, float* out, const TensorInfo& tensorInfo, float beta)
+void Softmax(Decoder<float>& in, Encoder<float>& out, const TensorInfo& inputTensorInfo, float beta)
 {
-    unsigned int numChannels = tensorInfo.GetShape()[1];
-    for (unsigned int n = 0; n < tensorInfo.GetShape()[0]; n++)
+    unsigned int numChannels = inputTensorInfo.GetShape()[1];
+
+    for (unsigned int n = 0; n < inputTensorInfo.GetShape()[0]; n++)
     {
         // Find maximum channel.
-        float max = in[n * numChannels];
+        in[n * numChannels];
+        float max = in.Get();
         for (unsigned int c = 1; c < numChannels; c++)
         {
-            float val = in[n * numChannels + c];
+            in[n * numChannels + c];
+            float val = in.Get();
             if (val > max)
             {
                 max = val;
@@ -33,7 +36,8 @@
         float              sum = 0.0f;
         for (unsigned int c = 0; c < numChannels; c++)
         {
-            float val       = in[n * numChannels + c];
+            in[n * numChannels + c];
+            float val = in.Get();
             exponentials[c] = expf((val - max) * beta);
             sum += exponentials[c];
         }
@@ -41,7 +45,8 @@
         // Divide exponentials by sum to give outputs.
         for (unsigned int c = 0; c < numChannels; c++)
         {
-            out[n * numChannels + c] = exponentials[c] / sum;
+            out[n * numChannels + c];
+            out.Set(exponentials[c] / sum);
         }
     }
 }