COMPMID-1995: Fixed graph fusion mutator for float types.

-Fixes precondition checks for fusing activation with other nodes.
-Fixes is_relu6 check to capture bounded relu as well.

Change-Id: Iba193af51491b537c884a35ca85172151534f3ec
Signed-off-by: Georgios Pinitas <georgios.pinitas@arm.com>
Reviewed-on: https://review.mlplatform.org/c/918
Reviewed-by: Michele Di Giorgio <michele.digiorgio@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Tested-by: Arm Jenkins <bsgcomp@arm.com>
diff --git a/arm_compute/core/utils/misc/InfoHelpers.h b/arm_compute/core/utils/misc/InfoHelpers.h
index 704e178..8197862 100644
--- a/arm_compute/core/utils/misc/InfoHelpers.h
+++ b/arm_compute/core/utils/misc/InfoHelpers.h
@@ -52,9 +52,11 @@
  */
 inline bool is_relu6(ActivationLayerInfo activation_info)
 {
-    return activation_info.enabled()
-           && activation_info.activation() == ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU
-           && activation_info.a() == 6.f && activation_info.b() == 0.f;
+    const bool is_lu_bounded_relu = activation_info.activation() == ActivationLayerInfo::ActivationFunction::LU_BOUNDED_RELU
+                                    && activation_info.a() == 6.f && activation_info.b() == 0.f;
+    const bool is_bounded_relu = activation_info.activation() == ActivationLayerInfo::ActivationFunction::BOUNDED_RELU
+                                 && activation_info.a() == 6.f;
+    return activation_info.enabled() && (is_lu_bounded_relu || is_bounded_relu);
 }
 } // namespace info_helpers
 } // namespace utils