COMPMID-415: Use absolute and relative tolerance

Change-Id: Ib779fa307e05fa67172ddaf521239b4c746debc8
Reviewed-on: http://mpd-gerrit.cambridge.arm.com/82229
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com>
diff --git a/tests/validation_new/CL/ActivationLayer.cpp b/tests/validation_new/CL/ActivationLayer.cpp
index e1cc4e5..7f9bccc 100644
--- a/tests/validation_new/CL/ActivationLayer.cpp
+++ b/tests/validation_new/CL/ActivationLayer.cpp
@@ -51,46 +51,48 @@
  *
  * @return Tolerance depending on the activation function.
  */
-float tolerance(ActivationLayerInfo::ActivationFunction activation, DataType data_type)
+AbsoluteTolerance<float> tolerance(ActivationLayerInfo::ActivationFunction activation, DataType data_type)
 {
+    constexpr float epsilon = std::numeric_limits<float>::epsilon();
+
     switch(activation)
     {
         case ActivationLayerInfo::ActivationFunction::LINEAR:
-            return data_type == DataType::F16 ? 0.2f : 0.f;
+            return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.2f : epsilon);
         case ActivationLayerInfo::ActivationFunction::SQUARE:
-            return data_type == DataType::F16 ? 0.1f : 0.f;
+            return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.1f : epsilon);
         case ActivationLayerInfo::ActivationFunction::LOGISTIC:
             if(is_data_type_fixed_point(data_type))
             {
-                return 5.f;
+                return AbsoluteTolerance<float>(5.f);
             }
             else
             {
-                return data_type == DataType::F16 ? 0.001f : 0.f;
+                return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : epsilon);
             }
         case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
-            return data_type == DataType::F16 ? 0.00001f : 0.f;
+            return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.00001f : epsilon);
         case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
         case ActivationLayerInfo::ActivationFunction::SQRT:
             if(is_data_type_fixed_point(data_type))
             {
-                return 5.f;
+                return AbsoluteTolerance<float>(5.f);
             }
             else
             {
-                return data_type == DataType::F16 ? 0.01f : 0.00001f;
+                return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.01f : 0.00001f);
             }
         case ActivationLayerInfo::ActivationFunction::TANH:
             if(is_data_type_fixed_point(data_type))
             {
-                return 5.f;
+                return AbsoluteTolerance<float>(5.f);
             }
             else
             {
-                return data_type == DataType::F16 ? 0.001f : 0.00001f;
+                return AbsoluteTolerance<float>(data_type == DataType::F16 ? 0.001f : 0.00001f);
             }
         default:
-            return 0.f;
+            return AbsoluteTolerance<float>(epsilon);
     }
 }