COMPMID-3101 Fuse activation with floating point elementwise operation layers in CL

Signed-off-by: Giorgio Arena <giorgio.arena@arm.com>
Change-Id: I1693f8664ba7c0dc8c076bbe7365cef1e667bd25
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/2718
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Gian Marco Iodice <gianmarco.iodice@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
diff --git a/src/graph/nodes/EltwiseLayerNode.cpp b/src/graph/nodes/EltwiseLayerNode.cpp
index a83a5fb..92d183e 100644
--- a/src/graph/nodes/EltwiseLayerNode.cpp
+++ b/src/graph/nodes/EltwiseLayerNode.cpp
@@ -52,6 +52,16 @@
     return descriptor.r_policy;
 }
 
+ActivationLayerInfo EltwiseLayerNode::fused_activation() const
+{
+    return descriptor.fused_activation;
+}
+
+void EltwiseLayerNode::set_fused_activation(ActivationLayerInfo fused_activation)
+{
+    descriptor.fused_activation = fused_activation;
+}
+
 bool EltwiseLayerNode::forward_descriptors()
 {
     if((input_id(0) != NullTensorID) && (output_id(0) != NullTensorID))