COMPMID-2500: Report error in Int8 Conv2d if multiplier > 1.

Change-Id: I7d0263eddfb4f9cf0145e94b35d5f9e18737cd2d
Signed-off-by: Georgios Pinitas <georgios.pinitas@arm.com>
Reviewed-on: https://review.mlplatform.org/c/1653
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Pablo Marquez <pablo.tello@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
diff --git a/src/runtime/NEON/functions/NEGEMMConvolutionLayer.cpp b/src/runtime/NEON/functions/NEGEMMConvolutionLayer.cpp
index bd46944..e94c893 100644
--- a/src/runtime/NEON/functions/NEGEMMConvolutionLayer.cpp
+++ b/src/runtime/NEON/functions/NEGEMMConvolutionLayer.cpp
@@ -187,7 +187,7 @@
         float multiplier = iqinfo.scale * wqinfo.scale / oqinfo.scale;
         int   output_multiplier;
         int   output_shift;
-        quantization::calculate_quantized_multiplier_less_than_one(multiplier, &output_multiplier, &output_shift);
+        ARM_COMPUTE_RETURN_ON_ERROR(quantization::calculate_quantized_multiplier_less_than_one(multiplier, &output_multiplier, &output_shift));
 
         // Merge activation with output stage
         int min_activation = 0;
@@ -492,6 +492,7 @@
     // Output tensor auto inizialization if not yet initialized
     ARM_COMPUTE_RETURN_ON_ERROR(NEConvolutionLayerReshapeWeights::validate(weights, biases_to_use, nullptr));
     weights_reshaped_info = TensorInfo(compute_weights_reshaped_shape(*weights, (append_bias && !skip_im2col)), 1, data_type);
+    weights_reshaped_info.set_quantization_info(weights->quantization_info());
     weights_to_use        = &weights_reshaped_info;
 
     if(!skip_im2col)