COMPMID-427: Port NEActivationLayer in 16bit fixed point.

Change-Id: Iebd61807f7b597c6bd990673bc7655c68ee16f4b
Reviewed-on: http://mpd-gerrit.cambridge.arm.com/79085
Reviewed-by: Moritz Pflanzer <moritz.pflanzer@arm.com>
Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com>
Reviewed-by: Gian Marco Iodice <gianmarco.iodice@arm.com>
diff --git a/tests/validation/Helpers.h b/tests/validation/Helpers.h
index d92699d..a551da7 100644
--- a/tests/validation/Helpers.h
+++ b/tests/validation/Helpers.h
@@ -90,7 +90,7 @@
             break;
         case ActivationLayerInfo::ActivationFunction::SQRT:
             // Reduce range as sqrt should take a non-negative number
-            bounds.first = (is_float) ? 0 : 1 << (fixed_point_position);
+            bounds.first = (is_float) ? 0 : 1;
             break;
         default:
             break;
diff --git a/tests/validation/NEON/ActivationLayer.cpp b/tests/validation/NEON/ActivationLayer.cpp
index 40be322..71dfcdc 100644
--- a/tests/validation/NEON/ActivationLayer.cpp
+++ b/tests/validation/NEON/ActivationLayer.cpp
@@ -193,10 +193,11 @@
 
 BOOST_AUTO_TEST_SUITE(Float)
 BOOST_TEST_DECORATOR(*boost::unit_test::label("precommit"))
-BOOST_DATA_TEST_CASE(RunSmall, boost::unit_test::data::make({ false, true }) * SmallShapes() * CNNFloatDataTypes() * ActivationFunctions(), in_place, shape, dt, act_function)
+BOOST_DATA_TEST_CASE(RunSmall, boost::unit_test::data::make({ false, true }) * SmallShapes() * CNNFloatDataTypes() * ActivationFunctions() * boost::unit_test::data::make({ 0.5f, 1.f }),
+                     in_place, shape, dt, act_function, alpha_beta)
 {
     // Create activation layer info
-    ActivationLayerInfo act_info(act_function, 1.f, 1.f);
+    ActivationLayerInfo act_info(act_function, alpha_beta, alpha_beta);
 
     // Compute function
     Tensor dst = compute_activation_layer(in_place, shape, dt, act_info);
@@ -209,10 +210,11 @@
 }
 
 BOOST_TEST_DECORATOR(*boost::unit_test::label("nightly"))
-BOOST_DATA_TEST_CASE(RunLarge, boost::unit_test::data::make({ false, true }) * LargeShapes() * CNNFloatDataTypes() * ActivationFunctions(), in_place, shape, dt, act_function)
+BOOST_DATA_TEST_CASE(RunLarge, boost::unit_test::data::make({ false, true }) * LargeShapes() * CNNFloatDataTypes() * ActivationFunctions() * boost::unit_test::data::make({ 0.5f, 1.f }),
+                     in_place, shape, dt, act_function, alpha_beta)
 {
     // Create activation layer info
-    ActivationLayerInfo act_info(act_function, 1.f, 1.f);
+    ActivationLayerInfo act_info(act_function, alpha_beta, alpha_beta);
 
     // Compute function
     Tensor dst = compute_activation_layer(in_place, shape, dt, act_info);
@@ -229,12 +231,13 @@
  *        cause overflowing issues in most of the transcendentals functions.
  */
 BOOST_AUTO_TEST_SUITE(Quantized)
+BOOST_AUTO_TEST_SUITE(QS8)
 BOOST_TEST_DECORATOR(*boost::unit_test::label("precommit"))
-BOOST_DATA_TEST_CASE(RunSmall, boost::unit_test::data::make({ false, true }) * SmallShapes() * ActivationFunctions() * boost::unit_test::data::xrange(3, 6, 1),
-                     in_place, shape, act_function, fixed_point_position)
+BOOST_DATA_TEST_CASE(RunSmall, boost::unit_test::data::make({ false, true }) * SmallShapes() * ActivationFunctions() * boost::unit_test::data::xrange(3, 6, 1) * boost::unit_test::data::make({ 0.5f, 1.f }),
+                     in_place, shape, act_function, fixed_point_position, alpha_beta)
 {
     // Create activation layer info
-    ActivationLayerInfo act_info(act_function, 1.f, 1.f);
+    ActivationLayerInfo act_info(act_function, alpha_beta, alpha_beta);
 
     // Compute function
     Tensor dst = compute_activation_layer(in_place, shape, DataType::QS8, act_info, fixed_point_position);
@@ -247,6 +250,27 @@
 }
 BOOST_AUTO_TEST_SUITE_END()
 
+BOOST_AUTO_TEST_SUITE(QS16)
+BOOST_TEST_DECORATOR(*boost::unit_test::label("precommit"))
+BOOST_DATA_TEST_CASE(RunSmall, boost::unit_test::data::make({ false, true }) * SmallShapes() * ActivationFunctions() * boost::unit_test::data::xrange(3, 6, 1) * boost::unit_test::data::make({ 0.5f, 1.f }),
+                     in_place, shape, act_function, fixed_point_position, alpha_beta)
+{
+    // Create activation layer info
+    ActivationLayerInfo act_info(act_function, alpha_beta, alpha_beta);
+
+    // Compute function
+    Tensor dst = compute_activation_layer(in_place, shape, DataType::QS16, act_info, fixed_point_position);
+
+    // Compute reference
+    RawTensor ref_dst = Reference::compute_reference_activation_layer(shape, DataType::QS16, act_info, fixed_point_position);
+
+    // Validate output
+    validate(NEAccessor(dst), ref_dst, activation_layer_tolerance(act_function, fixed_point_position));
+}
+BOOST_AUTO_TEST_SUITE_END()
+
+BOOST_AUTO_TEST_SUITE_END()
+
 BOOST_AUTO_TEST_SUITE_END()
 BOOST_AUTO_TEST_SUITE_END()
 #endif /* DOXYGEN_SKIP_THIS */
diff --git a/tests/validation/TensorOperations.h b/tests/validation/TensorOperations.h
index 0d752ee..adac709 100644
--- a/tests/validation/TensorOperations.h
+++ b/tests/validation/TensorOperations.h
@@ -930,7 +930,7 @@
                 out[i] = mul(x, x).raw();
                 break;
             case ActivationLayerInfo::ActivationFunction::TANH:
-                out[i] = tanh(x).raw();
+                out[i] = mul(a, tanh(mul(b, x))).raw();
                 break;
             default:
                 ARM_COMPUTE_ERROR("Activation function not recognised");