COMPMID-417: Add Leaky RELU support for both NEON/CL.

-Adds parametrizable leaky relu (x>0) ? x : a*x.

Change-Id: Ief19a435b5832a30b56f4aaaf55125787addee94
Reviewed-on: http://mpd-gerrit.cambridge.arm.com/80575
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
Tested-by: Kaizen <jeremy.johnson+kaizengerrit@arm.com>
diff --git a/tests/validation/TensorOperations.h b/tests/validation/TensorOperations.h
index e274724..27c50cf 100644
--- a/tests/validation/TensorOperations.h
+++ b/tests/validation/TensorOperations.h
@@ -868,9 +868,6 @@
             case ActivationLayerInfo::ActivationFunction::ABS:
                 out[i] = std::abs(x);
                 break;
-            case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
-                out[i] = std::min<T>(a, std::max<T>(0, x));
-                break;
             case ActivationLayerInfo::ActivationFunction::LINEAR:
                 out[i] = a * x + b;
                 break;
@@ -880,6 +877,12 @@
             case ActivationLayerInfo::ActivationFunction::RELU:
                 out[i] = std::max<T>(0, x);
                 break;
+            case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
+                out[i] = std::min<T>(a, std::max<T>(0, x));
+                break;
+            case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
+                out[i] = (x > 0) ? x : a * x;
+                break;
             case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
                 out[i] = std::log(static_cast<T>(1) + std::exp(x));
                 break;
@@ -919,9 +922,6 @@
             case ActivationLayerInfo::ActivationFunction::ABS:
                 out[i] = abs(x).raw();
                 break;
-            case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
-                out[i] = min(a, max(const_0, x)).raw();
-                break;
             case ActivationLayerInfo::ActivationFunction::LINEAR:
                 out[i] = add(b, mul(a, x)).raw();
                 break;
@@ -931,6 +931,12 @@
             case ActivationLayerInfo::ActivationFunction::RELU:
                 out[i] = max(const_0, x).raw();
                 break;
+            case ActivationLayerInfo::ActivationFunction::BOUNDED_RELU:
+                out[i] = min(a, max(const_0, x)).raw();
+                break;
+            case ActivationLayerInfo::ActivationFunction::LEAKY_RELU:
+                out[i] = (x > const_0) ? x.raw() : mul(a, x).raw();
+                break;
             case ActivationLayerInfo::ActivationFunction::SOFT_RELU:
                 out[i] = log(const_1 + exp(x)).raw();
                 break;