COMPMID-3862: Add support QASYMM8 LEAKY RELU activation

- LEAKY RELU activation is supported for QASYMM8 data type
- vquantize on NEON side has been modified to match with
  other backends (OpenCL and reference)

Change-Id: I194631225c8d4f3cc96027d64812ec2be2b4328a
Signed-off-by: Sang-Hoon Park <sang-hoon.park@arm.com>
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4593
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Georgios Pinitas <georgios.pinitas@arm.com>
Reviewed-by: Manuel Bottini <manuel.bottini@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
diff --git a/tests/validation/CL/ActivationLayer.cpp b/tests/validation/CL/ActivationLayer.cpp
index 9b725a4..fa95594 100644
--- a/tests/validation/CL/ActivationLayer.cpp
+++ b/tests/validation/CL/ActivationLayer.cpp
@@ -168,8 +168,10 @@
 using CLActivationLayerQuantizedFixture = ActivationValidationQuantizedFixture<CLTensor, CLAccessor, CLActivationLayer, T>;
 
 const auto QuantizedActivationDataset8 = combine(combine(framework::dataset::make("InPlace", { false }),
-                                                         concat(datasets::ActivationFunctionsQuantized(), framework::dataset::make("ActivationFunction", ActivationLayerInfo::ActivationFunction::HARD_SWISH))),
-                                                 framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
+                                                         concat(datasets::ActivationFunctionsQuantized(),
+                                                                framework::dataset::make("ActivationFunction",
+{ ActivationLayerInfo::ActivationFunction::HARD_SWISH, ActivationLayerInfo::ActivationFunction::LEAKY_RELU }))),
+framework::dataset::make("AlphaBeta", { 0.5f, 1.f }));
 
 const auto QuantizedActivationDataset16 = combine(combine(framework::dataset::make("InPlace", { false }),
                                                           datasets::ActivationFunctionsQuantized()),