Add and tidy up activation and elementwise binary end to end tests


Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com>
Change-Id: I9714c4c57e923ac775dcde2951de07cea35c40ee
diff --git a/src/backends/backendsCommon/test/ActivationEndToEndTestImpl.hpp b/src/backends/backendsCommon/test/ActivationEndToEndTestImpl.hpp
index 996e760..09d7192 100644
--- a/src/backends/backendsCommon/test/ActivationEndToEndTestImpl.hpp
+++ b/src/backends/backendsCommon/test/ActivationEndToEndTestImpl.hpp
@@ -57,12 +57,12 @@
 
     INetworkPtr net(INetwork::Create());
 
-    IConnectableLayer* input = net->AddInputLayer(0, "input");
-    IConnectableLayer* prelu = net->AddActivationLayer(descriptor, ActivationName);
-    IConnectableLayer* output = net->AddOutputLayer(0, "output");
+    IConnectableLayer* inputLayer = net->AddInputLayer(0, "input");
+    IConnectableLayer* activationLayer = net->AddActivationLayer(descriptor, ActivationName);
+    IConnectableLayer* outputLayer = net->AddOutputLayer(0, "output");
 
-    Connect(input, prelu, inputInfo, 0, 0);
-    Connect(prelu, output, outputInfo, 0, 0);
+    Connect(inputLayer, activationLayer, inputInfo, 0, 0);
+    Connect(activationLayer, outputLayer, outputInfo, 0, 0);
 
     return net;
 }
@@ -113,83 +113,169 @@
                                                 tolerance);
 }
 
-/** Executes an end to end test for Elu activation with specific input and expected-output data
- *
- * @tparam ArmnnType  The armnn data type for the input and expected-output data
- * @param backends  The backends on which to run the test
- */
-template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
-void EluEndToEndTest(const std::vector<BackendId>& backends)
+std::vector<float> Activation(const std::vector<float>& input,
+                              const ActivationDescriptor& descriptor)
 {
-    std::vector<float> floatInputData{ -2.0f, -1.0f, -0.0f, 0.0f,
-                                        1.0f,  2.0f,  3.0f, 4.0f };
+    float a = descriptor.m_A;
+    float b = descriptor.m_B;
 
-    std::vector<float> floatExpectedOutputData{ -0.86466471676f,  -0.63212055882f,  -0.0f, 0.0f,
-                                                 1.0f          ,   2.0f          ,   3.0f, 4.0f };
+    std::vector<float> output;
+    output.reserve(input.size());
 
-    float qScale = 1.0f;
-    int32_t qOffset = 0;
-    armnn::TensorInfo inputInfo({ 2, 2, 2, 1 }, ArmnnType, qScale, qOffset, true);
-    armnn::TensorInfo outputInfo({ 2, 2, 2, 1 }, ArmnnType, qScale, qOffset);
-
-    armnn::ActivationDescriptor descriptor(ActivationFunction::Elu, 1.0);
-
-    ActivationEndToEndImpl<ArmnnType>(backends,
-                                      floatInputData,
-                                      floatExpectedOutputData,
-                                      inputInfo,
-                                      outputInfo,
-                                      descriptor);
+    // Compute the result of the activation function.
+    switch (descriptor.m_Function)
+    {
+        case ActivationFunction::Linear:
+        {
+            for (auto in :input)
+            {
+                auto out = a * in + b;
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::Sigmoid:
+        {
+            for (auto in :input)
+            {
+                auto out = 1.f / (1.f + expf(-in));
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::ReLu:
+        {
+            for (auto in :input)
+            {
+                auto out = std::max(0.f, in);
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::BoundedReLu:
+        {
+            for (auto in :input)
+            {
+                auto out = std::min(a, std::max(b, in));
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::SoftReLu:
+        {
+            for (auto in :input)
+            {
+                auto out = logf(1.0f + expf(in));
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::LeakyReLu:
+        {
+            for (auto in :input)
+            {
+                auto out = in > 0.0f ? in : (in * a);
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::Abs:
+        {
+            for (auto in :input)
+            {
+                auto out = in < 0 ? -in : in;
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::Sqrt:
+        {
+            for (auto in :input)
+            {
+                auto out = sqrtf(in);
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::Square:
+        {
+            for (auto in :input)
+            {
+                auto out = in * in;
+                output.push_back(out);
+            }
+            break;
+       }
+        case ActivationFunction::TanH:
+        {
+            for (auto in :input)
+            {
+                auto out = a * tanhf(b * in);
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::Elu:
+        {
+            for (auto in: input) {
+                auto out = (in >= 0) ? in : a * (expf(in) - 1);
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::HardSwish:
+        {
+            for (auto in :input)
+            {
+                // hard_swish(x) = x * relu6(x+3) / 6
+                // relu6(x) = min(max(x,0),6)
+                auto out = in * (std::min(std::max((in + 3), 0.0f), 6.0f)) / 6;
+                output.push_back(out);
+            }
+            break;
+        }
+        case ActivationFunction::Gelu:
+        {
+            for (auto in :input)
+            {
+                // gelu(x) = x * 1/2 * (1 + erf(x / sqrt(2))),
+                // where erf is Gaussian error function
+                auto out = in * (0.5f * (1.0f + erff(static_cast<float>(in / std::sqrt(2)))));
+                output.push_back(out);
+            }
+            break;
+        }
+        default:
+        {
+            throw InvalidArgumentException("Unsupported activation function");
+        }
+    }
+    return output;
 }
 
-/** Executes an end to end test for HardSwish activation with specific input and expected-output data
+/** Executes an end to end test for activation layers with specific input and expected-output data
  *
  * @tparam ArmnnType  The armnn data type for the input and expected-output data
  * @param backends  The backends on which to run the test
  */
 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
-void HardSwishEndToEndTest(const std::vector<BackendId>& backends)
+void ActivationEndToEndTest(const std::vector<BackendId>& backends,
+                            const ActivationFunction activationFunction,
+                            const float qScale=1.0f,
+                            const int32_t qOffset=0,
+                            const float a = 1,
+                            const float b = 0)
 {
-    std::vector<float> floatInputData{ -2.0f, -1.0f, -0.5f, 0.0f,
+    std::vector<float> floatInputData{ -2.0f, -1.0f, -0.0f, 0.0f,
                                        1.0f,  2.0f,  3.0f, 4.0f };
 
-    std::vector<float> floatExpectedOutputData{ -0.33333333333f,  -0.33333333333f, -0.208333f, 0.0f,
-                                                 0.66666666667f,   1.66666666667f,  3.0f     , 4.0f };
+    ActivationDescriptor descriptor(activationFunction, a, b);
 
-    float qScale = 1.0f;
-    int32_t qOffset = 0;
-    armnn::TensorInfo inputInfo({ 2, 2, 2, 1 }, ArmnnType, qScale, qOffset, true);
-    armnn::TensorInfo outputInfo({ 2, 2, 2, 1 }, ArmnnType, qScale, qOffset);
-
-    armnn::ActivationDescriptor descriptor(ActivationFunction::HardSwish, 1.0);
-
-    ActivationEndToEndImpl<ArmnnType>(backends,
-                                      floatInputData,
-                                      floatExpectedOutputData,
-                                      inputInfo,
-                                      outputInfo,
-                                      descriptor);
-}
-
-/** Executes an end to end test for Leaky Relu activation with specific input and expected-output data
- *
- * @tparam ArmnnType  The armnn data type for the input and expected-output data
- * @param backends  The backends on which to run the test
- */
-template<armnn::DataType ArmnnType>
-void LeakyReluEndToEndTest(const std::vector<BackendId>& backends, const float qScale=1.0f, const int32_t qOffset=0)
-{
-    std::vector<float> floatInputData{ -2.0f, -1.0f, -0.0f, 0.0f,
-                                        1.0f,  2.0f,  3.0f, 4.0f };
-
-    std::vector<float> floatExpectedOutputData{ -0.02f, -0.01f, -0.0f, 0.0f,
-                                                 1.0f,   2.0f,   3.0f, 4.0f };
+    std::vector<float> floatExpectedOutputData = Activation(floatInputData, descriptor);
 
     armnn::TensorInfo inputInfo({ 2, 2, 2, 1 }, ArmnnType, qScale, qOffset, true);
     armnn::TensorInfo outputInfo({ 2, 2, 2, 1 }, ArmnnType, qScale, qOffset);
 
-    armnn::ActivationDescriptor descriptor(ActivationFunction::LeakyReLu, static_cast<float>(0.01));
-
     ActivationEndToEndImpl<ArmnnType>(backends,
                                       floatInputData,
                                       floatExpectedOutputData,
diff --git a/src/backends/cl/test/ClEndToEndTests.cpp b/src/backends/cl/test/ClEndToEndTests.cpp
index 3acd7dc..c48aa8a 100644
--- a/src/backends/cl/test/ClEndToEndTests.cpp
+++ b/src/backends/cl/test/ClEndToEndTests.cpp
@@ -39,18 +39,106 @@
 {
 std::vector<armnn::BackendId> clDefaultBackends = {armnn::Compute::GpuAcc};
 
+// Activations
+// Linear
+TEST_CASE("ClLinearEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(clDefaultBackends, ActivationFunction::Linear);
+}
+
+TEST_CASE("ClLinearEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<armnn::DataType::Float16>(clDefaultBackends, ActivationFunction::Linear);
+}
+
+// Sigmoid
+TEST_CASE("ClSigmoidEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(clDefaultBackends, ActivationFunction::Sigmoid);
+}
+
+// ReLu
+TEST_CASE("ClReLuEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(clDefaultBackends, ActivationFunction::ReLu);
+}
+
+TEST_CASE("ClReLuEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<armnn::DataType::Float16>(clDefaultBackends, ActivationFunction::ReLu);
+}
+
+// BoundedReLu
+TEST_CASE("ClBoundedReLuEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(clDefaultBackends, ActivationFunction::BoundedReLu);
+}
+
+TEST_CASE("ClBoundedReLuEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<armnn::DataType::Float16>(clDefaultBackends, ActivationFunction::BoundedReLu);
+}
+
+// SoftReLu
+TEST_CASE("ClSoftReLuEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(clDefaultBackends, ActivationFunction::SoftReLu);
+}
+
+// LeakyRelu
+TEST_CASE("ClLeakyReluActivationFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(clDefaultBackends, ActivationFunction::LeakyReLu, 1.f, 0, 0.01f);
+}
+
+TEST_CASE("ClLeakyReluActivationFloat16")
+{
+    ActivationEndToEndTest<DataType::Float16>(clDefaultBackends, ActivationFunction::LeakyReLu, 0.3f, 5, 0.01f);
+}
+
+// Elu
+TEST_CASE("ClEluEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(clDefaultBackends, ActivationFunction::Elu);
+}
+
+TEST_CASE("ClEluEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<DataType::Float16>(clDefaultBackends, ActivationFunction::Elu);
+}
+
+// HardSwish
+TEST_CASE("ClHardSwishEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(clDefaultBackends, ActivationFunction::HardSwish);
+}
+
+TEST_CASE("ClHardSwishEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<DataType::Float16>(clDefaultBackends, ActivationFunction::HardSwish);
+}
+
+// TanH
+TEST_CASE("ClTanHEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(clDefaultBackends, ActivationFunction::TanH, 1.f, 0, 2, 3);
+}
+
+TEST_CASE("ClTanHEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<DataType::Float16>(clDefaultBackends, ActivationFunction::TanH, 1.f, 0, 2, 3);
+}
+
 // ElementwiseUnary
 // Abs
 TEST_CASE("ClAbsEndToEndTestFloat32")
 {
-    ElementwiseUnarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends,
-                                                             UnaryOperation::Abs);
+    ElementwiseUnarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends, UnaryOperation::Abs);
 }
 // Rsqrt
 TEST_CASE("ClRsqrtEndToEndTestFloat32")
 {
-    ElementwiseUnarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends,
-                                                             UnaryOperation::Rsqrt);
+    ElementwiseUnarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends, UnaryOperation::Rsqrt);
 }
 
 // ElementwiseBinary
@@ -59,7 +147,6 @@
 {
     AdditionEndToEnd<armnn::DataType::Float32>(clDefaultBackends);
 }
-
 TEST_CASE("ClAdditionEndToEndUint8Test")
 {
     AdditionEndToEnd<armnn::DataType::QAsymmU8>(clDefaultBackends);
@@ -74,6 +161,52 @@
     ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float16>(clDefaultBackends, BinaryOperation::Add);
 }
 
+// Div
+TEST_CASE("ClDivEndToEndTestFloat32")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends, BinaryOperation::Div);
+}
+
+// Mul
+TEST_CASE("ClMulEndToEndTestFloat32")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends, BinaryOperation::Mul);
+}
+TEST_CASE("ClMulEndToEndTestUint8")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::QAsymmU8>(clDefaultBackends, BinaryOperation::Mul);
+}
+
+// Sub
+TEST_CASE("ClSubtractionEndToEndFloat32Simple3DTest")
+{
+    ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float32>(clDefaultBackends, BinaryOperation::Sub);
+}
+TEST_CASE("ClSubtractionEndToEndFloat16Simple3DTest")
+{
+    ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float16>(clDefaultBackends, BinaryOperation::Sub);
+}
+
+// Max
+TEST_CASE("ClMaximumEndToEndTestFloat32")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends, BinaryOperation::Maximum);
+}
+TEST_CASE("ClMaximumEndToEndTestUint8")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::QAsymmU8>(clDefaultBackends, BinaryOperation::Maximum);
+}
+
+// Min
+TEST_CASE("ClMinimumEndToEndTestFloat32")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends, BinaryOperation::Minimum);
+}
+TEST_CASE("ClMinimumEndToEndTestUint8")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::QAsymmU8>(clDefaultBackends, BinaryOperation::Minimum);
+}
+
 // Power
 TEST_CASE("ClPowerEndToEndTestFloat32")
 {
@@ -85,21 +218,11 @@
 {
     ElementwiseBinarySimpleEndToEnd<armnn::DataType::Float32>(clDefaultBackends, BinaryOperation::SqDiff);
 }
-
 TEST_CASE("ClSquaredDifferenceEndToEndTestUint8")
 {
     ElementwiseBinarySimpleEndToEnd<armnn::DataType::QAsymmU8>(clDefaultBackends, BinaryOperation::SqDiff);
 }
 
-TEST_CASE("ClSubtractionEndToEndFloat32Simple3DTest")
-{
-    ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float32>(clDefaultBackends, BinaryOperation::Sub);
-}
-TEST_CASE("ClSubtractionEndToEndFloat16Simple3DTest")
-{
-    ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float16>(clDefaultBackends, BinaryOperation::Sub);
-}
-
 // Batch Mat Mul
 TEST_CASE("ClBatchMatMulEndToEndFloat32Test")
 {
@@ -220,16 +343,6 @@
     StridedSliceInvalidSliceEndToEndTest(clDefaultBackends);
 }
 
-TEST_CASE("ClEluEndToEndTestFloat32")
-{
-    EluEndToEndTest<armnn::DataType::Float32>(clDefaultBackends);
-}
-
-TEST_CASE("ClEluEndToEndTestFloat16")
-{
-    EluEndToEndTest<armnn::DataType::Float16>(clDefaultBackends);
-}
-
 TEST_CASE("ClGreaterSimpleEndToEndTest")
 {
     const std::vector<uint8_t> expectedOutput({ 0, 0, 0, 0,  1, 1, 1, 1,
@@ -270,32 +383,6 @@
                                                            expectedOutput);
 }
 
-// HardSwish
-TEST_CASE("ClHardSwishEndToEndTestFloat32")
-{
-    HardSwishEndToEndTest<armnn::DataType::Float32>(clDefaultBackends);
-}
-
-TEST_CASE("ClHardSwishEndToEndTestFloat16")
-{
-    HardSwishEndToEndTest<armnn::DataType::Float16>(clDefaultBackends);
-}
-
-TEST_CASE("ClHardSwishEndToEndTestQAsymmS8")
-{
-    HardSwishEndToEndTest<armnn::DataType::QAsymmS8>(clDefaultBackends);
-}
-
-TEST_CASE("ClHardSwishEndToEndTestQAsymmU8")
-{
-    HardSwishEndToEndTest<armnn::DataType::QAsymmU8>(clDefaultBackends);
-}
-
-TEST_CASE("ClHardSwishEndToEndTestQSymmS16")
-{
-    HardSwishEndToEndTest<armnn::DataType::QSymmS16>(clDefaultBackends);
-}
-
 // InstanceNormalization
 TEST_CASE("ClInstanceNormalizationNhwcEndToEndTest1")
 {
@@ -366,7 +453,7 @@
     FillEndToEnd<armnn::DataType::Float32>(clDefaultBackends);
 }
 
-TEST_CASE("RefFillEndToEndTestFloat16")
+TEST_CASE("ClFillEndToEndTestFloat16")
 {
     FillEndToEnd<armnn::DataType::Float16>(clDefaultBackends);
 }
diff --git a/src/backends/neon/test/NeonEndToEndTests.cpp b/src/backends/neon/test/NeonEndToEndTests.cpp
index 1bf9344..f505b7f 100644
--- a/src/backends/neon/test/NeonEndToEndTests.cpp
+++ b/src/backends/neon/test/NeonEndToEndTests.cpp
@@ -161,6 +161,32 @@
     ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float16>(neonDefaultBackends, BinaryOperation::Add);
 }
 
+// Div
+TEST_CASE("NeonDivEndToEndTestFloat32")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::Float32>(neonDefaultBackends, BinaryOperation::Div);
+}
+
+// Mul
+TEST_CASE("NeonMulEndToEndTestFloat32")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::Float32>(neonDefaultBackends, BinaryOperation::Mul);
+}
+TEST_CASE("NeonMulEndToEndTestUint8")
+{
+    ElementwiseBinarySimpleEndToEnd<armnn::DataType::QAsymmU8>(neonDefaultBackends, BinaryOperation::Mul);
+}
+
+// Sub
+TEST_CASE("NeonSubtractionEndToEndFloat32Simple3DTest")
+{
+    ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float32>(neonDefaultBackends, BinaryOperation::Sub);
+}
+TEST_CASE("NeonSubtractionEndToEndFloat16Simple3DTest")
+{
+    ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float16>(neonDefaultBackends, BinaryOperation::Sub);
+}
+
 // Power
 TEST_CASE("NeonPowerEndToEndTestFloat32")
 {
@@ -178,15 +204,6 @@
     ElementwiseBinarySimpleEndToEnd<armnn::DataType::QAsymmU8>(neonDefaultBackends, BinaryOperation::SqDiff);
 }
 
-TEST_CASE("NeonSubtractionEndToEndFloat32Simple3DTest")
-{
-    ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float32>(neonDefaultBackends, BinaryOperation::Sub);
-}
-TEST_CASE("NeonSubtractionEndToEndFloat16Simple3DTest")
-{
-    ElementwiseBinarySimple3DEndToEnd<armnn::DataType::Float16>(neonDefaultBackends, BinaryOperation::Sub);
-}
-
 // Batch Mat Mul
 TEST_CASE("NeonBatchMatMulEndToEndFloat32Test")
 {
@@ -281,37 +298,56 @@
     DequantizeEndToEndOffset<armnn::DataType::QAsymmU8>(neonDefaultBackends);
 }
 
-TEST_CASE("NeonEluEndToEndTestFloat32")
+// Activations
+// Linear
+TEST_CASE("NeonLinearEndToEndTestFloat32")
 {
-    EluEndToEndTest<armnn::DataType::Float32>(neonDefaultBackends);
+    ActivationEndToEndTest<armnn::DataType::Float32>(neonDefaultBackends, ActivationFunction::Linear);
 }
 
-TEST_CASE("NeonEluEndToEndTestFloat16")
+// Sigmoid
+TEST_CASE("NeonSigmoidEndToEndTestFloat32")
 {
-    EluEndToEndTest<armnn::DataType::Float16>(neonDefaultBackends);
+    ActivationEndToEndTest<armnn::DataType::Float32>(neonDefaultBackends, ActivationFunction::Sigmoid);
+}
+
+// ReLu
+TEST_CASE("NeonReLuEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(neonDefaultBackends, ActivationFunction::ReLu);
+}
+
+// BoundedReLu
+TEST_CASE("NeonBoundedReLuEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(neonDefaultBackends, ActivationFunction::BoundedReLu);
+}
+
+// LeakyRelu
+TEST_CASE("NeonLeakyReluActivationFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(neonDefaultBackends, ActivationFunction::LeakyReLu, 1.f, 0,  0.01f);
+}
+
+// Elu
+TEST_CASE("NeonEluEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(neonDefaultBackends, ActivationFunction::Elu);
 }
 
 // HardSwish
 TEST_CASE("NeonHardSwishEndToEndTestFloat32")
 {
-    HardSwishEndToEndTest<armnn::DataType::Float32>(neonDefaultBackends);
+    ActivationEndToEndTest<DataType::Float32>(neonDefaultBackends, ActivationFunction::HardSwish);
 }
 
-TEST_CASE("NeonHardSwishEndToEndTestFloat16")
+// TanH
+TEST_CASE("NeonTanHEndToEndTestFloat32")
 {
-    HardSwishEndToEndTest<armnn::DataType::Float16>(neonDefaultBackends);
+    ActivationEndToEndTest<DataType::Float32>(neonDefaultBackends, ActivationFunction::TanH, 1.f, 0, 2, 3);
 }
 
-TEST_CASE("NeonHardSwishEndToEndTestQAsymmS8")
-{
-    HardSwishEndToEndTest<armnn::DataType::QAsymmS8>(neonDefaultBackends);
-}
-
-TEST_CASE("NeonHardSwishEndToEndTestQAsymmU8")
-{
-    HardSwishEndToEndTest<armnn::DataType::QAsymmU8>(neonDefaultBackends);
-}
-
+// Prelu
 TEST_CASE("NeonPreluEndToEndFloat32Test")
 {
     PreluEndToEndNegativeTest<armnn::DataType::Float32>(neonDefaultBackends);
@@ -322,6 +358,7 @@
     PreluEndToEndPositiveTest<armnn::DataType::QAsymmU8>(neonDefaultBackends);
 }
 
+// SpaceToDepth
 TEST_CASE("NeonSpaceToDepthNhwcEndToEndTest1")
 {
     SpaceToDepthNhwcEndToEndTest1(neonDefaultBackends);
@@ -342,6 +379,7 @@
     SpaceToDepthNchwEndToEndTest2(neonDefaultBackends);
 }
 
+// Split
 TEST_CASE("NeonSplitter1dEndToEndTest")
 {
     Splitter1dEndToEnd<armnn::DataType::Float32>(neonDefaultBackends);
diff --git a/src/backends/reference/test/RefEndToEndTests.cpp b/src/backends/reference/test/RefEndToEndTests.cpp
index 9f80059..2c5dc37 100644
--- a/src/backends/reference/test/RefEndToEndTests.cpp
+++ b/src/backends/reference/test/RefEndToEndTests.cpp
@@ -660,31 +660,6 @@
                                                                                        armnn::DataLayout::NHWC);
 }
 
-TEST_CASE("RefEluEndToEndTestFloat32")
-{
-    EluEndToEndTest<armnn::DataType::Float32>(defaultBackends);
-}
-
-TEST_CASE("RefEluEndToEndTestFloat16")
-{
-    EluEndToEndTest<armnn::DataType::Float16>(defaultBackends);
-}
-
-TEST_CASE("RefEluEndToEndTestQAsymmS8")
-{
-    EluEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends);
-}
-
-TEST_CASE("RefEluEndToEndTestQAsymmU8")
-{
-    EluEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends);
-}
-
-TEST_CASE("RefEluEndToEndTestQSymmS16")
-{
-    EluEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends);
-}
-
 TEST_CASE("RefFillEndToEndTest")
 {
     FillEndToEnd<armnn::DataType::Float32>(defaultBackends);
@@ -1026,56 +1001,239 @@
                                                                           1.0f, 1, 0.01f, 0, 0.5f, 0);
 }
 
-// HardSwish
-TEST_CASE("RefHardSwishEndToEndTestFloat32")
+// Activations
+// Linear
+TEST_CASE("RefLinearEndToEndTestFloat32")
 {
-    HardSwishEndToEndTest<armnn::DataType::Float32>(defaultBackends);
+    ActivationEndToEndTest<armnn::DataType::Float32>(defaultBackends, ActivationFunction::Linear);
 }
 
-TEST_CASE("RefHardSwishEndToEndTestFloat16")
+TEST_CASE("RefLinearEndToEndTestFloat16")
 {
-    HardSwishEndToEndTest<armnn::DataType::Float16>(defaultBackends);
+    ActivationEndToEndTest<armnn::DataType::Float16>(defaultBackends, ActivationFunction::Linear);
 }
 
-TEST_CASE("RefHardSwishEndToEndTestQAsymmS8")
+TEST_CASE("RefLinearEndToEndTestQAsymmS8")
 {
-    HardSwishEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends);
+    ActivationEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends, ActivationFunction::Linear);
 }
 
-TEST_CASE("RefHardSwishEndToEndTestQAsymmU8")
+TEST_CASE("RefLinearEndToEndTestQAsymmU8")
 {
-    HardSwishEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends);
+    ActivationEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends, ActivationFunction::Linear);
 }
 
-TEST_CASE("RefHardSwishEndToEndTestQSymmS16")
+TEST_CASE("RefLinearEndToEndTestQSymmS16")
 {
-    HardSwishEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends);
+    ActivationEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends, ActivationFunction::Linear);
+}
+
+// Sigmoid
+TEST_CASE("RefSigmoidEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(defaultBackends, ActivationFunction::Sigmoid);
+}
+
+TEST_CASE("RefSigmoidEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<armnn::DataType::Float16>(defaultBackends, ActivationFunction::Sigmoid);
+}
+
+TEST_CASE("RefSigmoidEndToEndTestQAsymmS8")
+{
+    ActivationEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends, ActivationFunction::Sigmoid);
+}
+
+TEST_CASE("RefSigmoidEndToEndTestQAsymmU8")
+{
+    ActivationEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends, ActivationFunction::Sigmoid, 0.12f, 15);
+}
+
+TEST_CASE("RefSigmoidEndToEndTestQSymmS16")
+{
+    ActivationEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends, ActivationFunction::Sigmoid);
+}
+
+// ReLu
+TEST_CASE("RefReLuEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(defaultBackends, ActivationFunction::ReLu);
+}
+
+TEST_CASE("RefReLuEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<armnn::DataType::Float16>(defaultBackends, ActivationFunction::ReLu);
+}
+
+TEST_CASE("RefReLuEndToEndTestQAsymmS8")
+{
+    ActivationEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends, ActivationFunction::ReLu);
+}
+
+TEST_CASE("RefReLuEndToEndTestQAsymmU8")
+{
+    ActivationEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends, ActivationFunction::ReLu);
+}
+
+TEST_CASE("RefReLuEndToEndTestQSymmS16")
+{
+    ActivationEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends, ActivationFunction::ReLu);
+}
+
+// BoundedReLu
+TEST_CASE("RefBoundedReLuEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(defaultBackends, ActivationFunction::BoundedReLu);
+}
+
+TEST_CASE("RefBoundedReLuEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<armnn::DataType::Float16>(defaultBackends, ActivationFunction::BoundedReLu);
+}
+
+TEST_CASE("RefBoundedReLuEndToEndTestQAsymmS8")
+{
+    ActivationEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends, ActivationFunction::BoundedReLu);
+}
+
+TEST_CASE("RefBoundedReLuEndToEndTestQAsymmU8")
+{
+    ActivationEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends, ActivationFunction::BoundedReLu);
+}
+
+TEST_CASE("RefBoundedReLuEndToEndTestQSymmS16")
+{
+    ActivationEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends, ActivationFunction::BoundedReLu);
+}
+
+// SoftReLu
+TEST_CASE("RefSoftReLuEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<armnn::DataType::Float32>(defaultBackends, ActivationFunction::SoftReLu);
+}
+
+TEST_CASE("RefSoftReLuEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<armnn::DataType::Float16>(defaultBackends, ActivationFunction::SoftReLu);
+}
+
+TEST_CASE("RefSoftReLuEndToEndTestQAsymmS8")
+{
+    ActivationEndToEndTest<armnn::DataType::QAsymmS8>(defaultBackends, ActivationFunction::SoftReLu);
+}
+
+TEST_CASE("RefSoftReLuEndToEndTestQAsymmU8")
+{
+    ActivationEndToEndTest<armnn::DataType::QAsymmU8>(defaultBackends, ActivationFunction::SoftReLu, 0.12f, 15);
+}
+
+TEST_CASE("RefSoftReLuEndToEndTestQSymmS16")
+{
+    ActivationEndToEndTest<armnn::DataType::QSymmS16>(defaultBackends, ActivationFunction::SoftReLu);
 }
 
 // LeakyRelu
 TEST_CASE("RefLeakyReluActivationFloat32")
 {
-    LeakyReluEndToEndTest<DataType::Float32>(defaultBackends);
+    ActivationEndToEndTest<DataType::Float32>(defaultBackends, ActivationFunction::LeakyReLu, 1.f, 0, 0.01f);
 }
 
 TEST_CASE("RefLeakyReluActivationFloat16")
 {
-    LeakyReluEndToEndTest<DataType::Float16>(defaultBackends, 0.3f, 5);
+    ActivationEndToEndTest<DataType::Float16>(defaultBackends, ActivationFunction::LeakyReLu, 0.3f, 5, 0.01f);
 }
 
 TEST_CASE("RefLeakyReluActivationInt8")
 {
-    LeakyReluEndToEndTest<DataType::QAsymmS8>(defaultBackends, 0.6f, 7);
+    ActivationEndToEndTest<DataType::QAsymmS8>(defaultBackends, ActivationFunction::LeakyReLu, 0.6f, 7, 0.01f);
 }
 
 TEST_CASE("RefLeakyReluActivationUInt8")
 {
-    LeakyReluEndToEndTest<DataType::QAsymmU8>(defaultBackends, 0.12f, 15);
+    ActivationEndToEndTest<DataType::QAsymmU8>(defaultBackends, ActivationFunction::LeakyReLu, 0.12f, 15, 0.01f);
 }
 
 TEST_CASE("RefLeakyReluActivationInt16")
 {
-    LeakyReluEndToEndTest<DataType::QSymmS16>(defaultBackends, 0.15f, 55);
+    ActivationEndToEndTest<DataType::QSymmS16>(defaultBackends, ActivationFunction::LeakyReLu, 0.15f, 55, 0.01f);
+}
+
+// Elu
+TEST_CASE("RefEluEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(defaultBackends, ActivationFunction::Elu);
+}
+
+TEST_CASE("RefEluEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<DataType::Float16>(defaultBackends, ActivationFunction::Elu);
+}
+
+TEST_CASE("RefEluEndToEndTestQAsymmS8")
+{
+    ActivationEndToEndTest<DataType::QAsymmS8>(defaultBackends, ActivationFunction::Elu);
+}
+
+TEST_CASE("RefEluEndToEndTestQAsymmU8")
+{
+    ActivationEndToEndTest<DataType::QAsymmU8>(defaultBackends, ActivationFunction::Elu);
+}
+
+TEST_CASE("RefEluEndToEndTestQSymmS16")
+{
+    ActivationEndToEndTest<DataType::QSymmS16>(defaultBackends, ActivationFunction::Elu);
+}
+
+// HardSwish
+TEST_CASE("RefHardSwishEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(defaultBackends, ActivationFunction::HardSwish);
+}
+
+TEST_CASE("RefHardSwishEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<DataType::Float16>(defaultBackends, ActivationFunction::HardSwish);
+}
+
+TEST_CASE("RefHardSwishEndToEndTestQAsymmS8")
+{
+    ActivationEndToEndTest<DataType::QAsymmS8>(defaultBackends, ActivationFunction::HardSwish);
+}
+
+TEST_CASE("RefHardSwishEndToEndTestQAsymmU8")
+{
+    ActivationEndToEndTest<DataType::QAsymmU8>(defaultBackends, ActivationFunction::HardSwish);
+}
+
+TEST_CASE("RefHardSwishEndToEndTestQSymmS16")
+{
+    ActivationEndToEndTest<DataType::QSymmS16>(defaultBackends, ActivationFunction::HardSwish);
+}
+
+// TanH
+TEST_CASE("RefTanHEndToEndTestFloat32")
+{
+    ActivationEndToEndTest<DataType::Float32>(defaultBackends, ActivationFunction::TanH, 1.f, 0, 2, 3);
+}
+
+TEST_CASE("RefTanHEndToEndTestFloat16")
+{
+    ActivationEndToEndTest<DataType::Float16>(defaultBackends, ActivationFunction::TanH, 1.f, 0, 2, 3);
+}
+
+TEST_CASE("RefTanHEndToEndTestQAsymmS8")
+{
+    ActivationEndToEndTest<DataType::QAsymmS8>(defaultBackends, ActivationFunction::TanH, 1.f, 0, 3, 2);
+}
+
+TEST_CASE("RefTanHEndToEndTestQAsymmU8")
+{
+    ActivationEndToEndTest<DataType::QAsymmU8>(defaultBackends, ActivationFunction::TanH, 1.f, 0, 3, 2);
+}
+
+TEST_CASE("RefTanHEndToEndTestQSymmS16")
+{
+    ActivationEndToEndTest<DataType::QSymmS16>(defaultBackends, ActivationFunction::TanH, 1.f, 0, 3, 2);
 }
 
 // LogSoftmax
@@ -1084,6 +1242,7 @@
     LogSoftmaxEndToEndTest(defaultBackends);
 }
 
+// Prelu
 TEST_CASE("RefPreluEndToEndTestFloat32")
 {
     PreluEndToEndNegativeTest<armnn::DataType::Float32>(defaultBackends);
diff --git a/src/backends/tosaCommon/test/OneToManyMappingTests.cpp b/src/backends/tosaCommon/test/OneToManyMappingTests.cpp
index dde4d79..cc129f3 100644
--- a/src/backends/tosaCommon/test/OneToManyMappingTests.cpp
+++ b/src/backends/tosaCommon/test/OneToManyMappingTests.cpp
@@ -197,14 +197,15 @@
 
 TEST_CASE("GetTosaMapping_ActivationFloat32")
 {
-    LeakyReluEndToEndTest<DataType::Float32>(tosaDefaultBackends);
+    ActivationEndToEndTest<DataType::Float32>(tosaDefaultBackends, ActivationFunction::LeakyReLu, 1.f, 0, 0.01f);
 }
 
-TEST_CASE("GetTosaMapping_ActivationFloat16")
+TEST_CASE("UNSUPPORTED_GetTosaMapping_ActivationFloat16")
 {
     try
     {
-        LeakyReluEndToEndTest<DataType::Float16>(tosaDefaultBackends);
+        ActivationEndToEndTest<DataType::Float16>(tosaDefaultBackends, ActivationFunction::LeakyReLu, 1.f, 0, 0.01f);
+        FAIL("An exception should have been thrown");
     }
     catch (armnn::Exception& e)
     {
@@ -214,24 +215,25 @@
 
 TEST_CASE("GetTosaMapping_ActivationInt32")
 {
-    LeakyReluEndToEndTest<DataType::Signed32>(tosaDefaultBackends, 0.15f, 0);
+    ActivationEndToEndTest<DataType::Signed32>(tosaDefaultBackends, ActivationFunction::LeakyReLu, 0.15f, 0, 0.01f);
 }
 
 TEST_CASE("GetTosaMapping_ActivationInt16")
 {
-    LeakyReluEndToEndTest<DataType::QSymmS16>(tosaDefaultBackends, 0.35f, 0);
+    ActivationEndToEndTest<DataType::QSymmS16>(tosaDefaultBackends, ActivationFunction::LeakyReLu, 0.35f, 0, 0.01f);
 }
 
 TEST_CASE("GetTosaMapping_ActivationInt8")
 {
-    LeakyReluEndToEndTest<DataType::QSymmS8>(tosaDefaultBackends, 0.75f, 0);
+    ActivationEndToEndTest<DataType::QSymmS8>(tosaDefaultBackends, ActivationFunction::LeakyReLu, 0.75f, 0, 0.01f);
 }
 
-TEST_CASE("GetTosaMapping_ActivationUInt8")
+TEST_CASE("UNSUPPORTED_GetTosaMapping_ActivationUInt8")
 {
     try
     {
-        LeakyReluEndToEndTest<DataType::QAsymmU8>(tosaDefaultBackends);
+        ActivationEndToEndTest<DataType::QAsymmU8>(tosaDefaultBackends, ActivationFunction::LeakyReLu, 1.f, 0, 0.01f);
+        FAIL("An exception should have been thrown");
     }
     catch (armnn::Exception& e)
     {