Remove (CL/NE)UpsampleLayer in favor to (NE/CL)Scale

Upsample functions and kernels can be replaced with the Scale as they
provide same functionality

Partially resolves: COMPMID-3996

Signed-off-by: Georgios Pinitas <georgios.pinitas@arm.com>
Change-Id: Ic2f9ba352c183aa87d69d551d5c172d0f22119e8
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/4679
Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Tested-by: Arm Jenkins <bsgcomp@arm.com>
diff --git a/examples/graph_yolov3.cpp b/examples/graph_yolov3.cpp
index 9eb24a1..54aaf20 100644
--- a/examples/graph_yolov3.cpp
+++ b/examples/graph_yolov3.cpp
@@ -187,7 +187,7 @@
                     0.000001f)
                 .set_name("conv2d_59/BatchNorm")
                 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LEAKY_RELU, 0.1f)).set_name("conv2d_60/LeakyRelu")
-                << UpsampleLayer(Size2D(2, 2), InterpolationPolicy::NEAREST_NEIGHBOR).set_name("Upsample_60");
+                << ResizeLayer(InterpolationPolicy::NEAREST_NEIGHBOR, 2, 2).set_name("Upsample_60");
         SubStream concat_1(route_1);
         concat_1 << ConcatLayer(std::move(route_1), std::move(intermediate_layers.second)).set_name("Route1")
                  << ConvolutionLayer(
@@ -298,7 +298,7 @@
                     0.000001f)
                 .set_name("conv2d_66/BatchNorm")
                 << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::LEAKY_RELU, 0.1f)).set_name("conv2d_68/LeakyRelu")
-                << UpsampleLayer(Size2D(2, 2), InterpolationPolicy::NEAREST_NEIGHBOR).set_name("Upsample_68");
+                << ResizeLayer(InterpolationPolicy::NEAREST_NEIGHBOR, 2, 2).set_name("Upsample_68");
         SubStream concat_2(route_2);
         concat_2 << ConcatLayer(std::move(route_2), std::move(intermediate_layers.first)).set_name("Route2")
                  << ConvolutionLayer(