IVGCVSW-4440 : Add HARD_SWISH Support to Activation in CpuRef

* Add a new Activiation type of HardSwish.
* Add CpuRef support and tests.

Signed-off-by: Colm Donelan <Colm.Donelan@arm.com>
Change-Id: I68c3840aa45b7a27d5e416a5d50fe8f99f003ce8
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
index 1b6e782..6993b9e 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.cpp
@@ -1076,6 +1076,69 @@
 
 
 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
+LayerTestResult<T, 4> HardSwishTestCommon(
+    armnn::IWorkloadFactory& workloadFactory,
+    const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+    float qScale,
+    int32_t qOffset)
+{
+    std::vector<float> inputData = {
+        -0.1f, -0.2f, -0.3f, -0.4f,
+        0.1f,  0.2f,  0.3f,  0.4f,
+        -1.0f, -2.0f, -3.0f, -4.0f,
+        1.0f,  2.0f,  3.0f,  4.0f
+    };
+    // Calculate output values for input.
+    auto f = [](float x)
+        {
+            // Break down the calculation to help with verification.
+            // hard_swish(x) = x * relu6(x+3) / 6
+            // relu6(x) = min(max(x,0),6)
+            float reLu6_step1 = std::max((x + 3),0.0f);
+            float reLu6Complete = std::min(reLu6_step1, 6.0f);
+            float hardSwish_step1 = x * reLu6Complete;
+            float result = hardSwish_step1 / 6;
+            return result;
+        };
+    std::vector<float> outputExpectedData(inputData.size());
+    std::transform(inputData.begin(), inputData.end(), outputExpectedData.begin(), f);
+
+    return SimpleActivationTest<ArmnnType>(workloadFactory,
+                                           memoryManager,
+                                           armnn::ActivationFunction::HardSwish,
+                                           0.f,
+                                           0.f,
+                                           qScale,
+                                           qOffset,
+                                           inputData,
+                                           qScale,
+                                           qOffset,
+                                           outputExpectedData);
+}
+
+LayerTestResult<float, 4> HardSwishTest(
+    armnn::IWorkloadFactory& workloadFactory,
+    const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+    return HardSwishTestCommon<armnn::DataType::Float32>(workloadFactory, memoryManager, 0.1f, 0);
+}
+
+LayerTestResult<uint8_t, 4> HardSwishUint8Test(
+    armnn::IWorkloadFactory& workloadFactory,
+    const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+    return HardSwishTestCommon<armnn::DataType::QAsymmU8>(workloadFactory, memoryManager, 0.1f, 64);
+}
+
+LayerTestResult<int16_t, 4> HardSwishInt16Test(
+    armnn::IWorkloadFactory& workloadFactory,
+    const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager)
+{
+    return HardSwishTestCommon<armnn::DataType::QSymmS16>(workloadFactory, memoryManager, 0.1f, 0);
+}
+
+
+template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
 LayerTestResult<T,4> CompareActivationTestImpl(
     armnn::IWorkloadFactory& workloadFactory,
     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
diff --git a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
index 2830118..2bd5171 100644
--- a/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
+++ b/src/backends/backendsCommon/test/layerTests/ActivationTestImpl.hpp
@@ -217,6 +217,22 @@
         const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
 
 //
+// HardSwish
+//
+
+LayerTestResult<float, 4> HardSwishTest(
+    armnn::IWorkloadFactory& workloadFactory,
+    const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+LayerTestResult<uint8_t, 4> HardSwishUint8Test(
+    armnn::IWorkloadFactory& workloadFactory,
+    const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+LayerTestResult<int16_t, 4> HardSwishInt16Test(
+    armnn::IWorkloadFactory& workloadFactory,
+    const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager);
+
+//
 // Other
 //
 
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp
index 25334c3..7d5c3b5 100644
--- a/src/backends/reference/RefLayerSupport.cpp
+++ b/src/backends/reference/RefLayerSupport.cpp
@@ -109,6 +109,7 @@
                 case ActivationFunction::Abs:
                 case ActivationFunction::BoundedReLu:
                 case ActivationFunction::Elu:
+                case ActivationFunction::HardSwish:
                 case ActivationFunction::LeakyReLu:
                 case ActivationFunction::Linear:
                 case ActivationFunction::ReLu:
diff --git a/src/backends/reference/test/RefLayerTests.cpp b/src/backends/reference/test/RefLayerTests.cpp
index ed2b995..40bf600 100644
--- a/src/backends/reference/test/RefLayerTests.cpp
+++ b/src/backends/reference/test/RefLayerTests.cpp
@@ -466,6 +466,10 @@
 ARMNN_AUTO_TEST_CASE(Elu, EluTest)
 ARMNN_AUTO_TEST_CASE(EluUint8, EluUint8Test)
 ARMNN_AUTO_TEST_CASE(EluInt16, EluInt16Test)
+// HardSwish Activation
+ARMNN_AUTO_TEST_CASE(HardSwish, HardSwishTest)
+ARMNN_AUTO_TEST_CASE(HardSwishUint8, HardSwishUint8Test)
+ARMNN_AUTO_TEST_CASE(HardSwishInt16, HardSwishInt16Test)
 
 // Fully Connected
 ARMNN_AUTO_TEST_CASE(SimpleFullyConnected, FullyConnectedFloat32Test, false, false)
diff --git a/src/backends/reference/workloads/Activation.cpp b/src/backends/reference/workloads/Activation.cpp
index 82dd919..798c6e4 100644
--- a/src/backends/reference/workloads/Activation.cpp
+++ b/src/backends/reference/workloads/Activation.cpp
@@ -9,6 +9,7 @@
 
 namespace armnn
 {
+
 float Activation(float in,
                  ActivationFunction function,
                  float a,
@@ -74,6 +75,13 @@
             output = (in >= 0) ? in : a * (expf(in) - 1);
             break;
         }
+        case ActivationFunction::HardSwish:
+        {
+            // hard_swish(x) = x * relu6(x+3) / 6
+            // relu6(x) = min(max(x,0),6)
+            output = in * (std::min(std::max((in + 3),0.0f),6.0f)) / 6;
+            break;
+        }
         default:
         {
             throw InvalidArgumentException("Unsupported activation function");