COMPMID-1188: Add quantization info support in graph FC layer.

Change-Id: Ie9a6a896da142198243139fb9f8be0f83b87ccce
Reviewed-on: https://eu-gerrit-1.euhpc.arm.com/142130
Tested-by: Jenkins <bsgcomp@arm.com>
Reviewed-by: Vidhya Sudhan Loganathan <vidhyasudhan.loganathan@arm.com>
Reviewed-by: Anthony Barbier <anthony.barbier@arm.com>
diff --git a/arm_compute/graph/GraphBuilder.h b/arm_compute/graph/GraphBuilder.h
index 191848c..5bb1df4 100644
--- a/arm_compute/graph/GraphBuilder.h
+++ b/arm_compute/graph/GraphBuilder.h
@@ -212,17 +212,21 @@
     static NodeID add_flatten_node(Graph &g, NodeParams params, NodeIdxPair input);
     /** Adds a fully connected layer node to the graph
      *
-     * @param[in] g                Graph to add the layer to
-     * @param[in] params           Common node parameters
-     * @param[in] input            Input to the fully connected layer node as a NodeID-Index pair
-     * @param[in] num_outputs      Number of output neurons
-     * @param[in] weights_accessor (Optional) Accessor of the weights node data
-     * @param[in] bias_accessor    (Optional) Accessor of the bias node data
+     * @param[in] g                  Graph to add the layer to
+     * @param[in] params             Common node parameters
+     * @param[in] input              Input to the fully connected layer node as a NodeID-Index pair
+     * @param[in] num_outputs        Number of output neurons
+     * @param[in] weights_accessor   (Optional) Accessor of the weights node data
+     * @param[in] bias_accessor      (Optional) Accessor of the bias node data
+     * @param[in] weights_quant_info (Optional) Weights quantization info
+     * @param[in] out_quant_info     (Optional) Output quantization info
      *
      * @return Node ID of the created node, EmptyNodeID in case of error
      */
     static NodeID add_fully_connected_layer(Graph &g, NodeParams params, NodeIdxPair input, unsigned int num_outputs,
-                                            ITensorAccessorUPtr weights_accessor = nullptr, ITensorAccessorUPtr bias_accessor = nullptr);
+                                            ITensorAccessorUPtr weights_accessor = nullptr, ITensorAccessorUPtr bias_accessor = nullptr,
+                                            const QuantizationInfo weights_quant_info = QuantizationInfo(),
+                                            const QuantizationInfo out_quant_info     = QuantizationInfo());
     /** Adds a normalization layer node to the graph
      *
      * @param[in] g         Graph to add the node to
diff --git a/arm_compute/graph/backends/FunctionHelpers.h b/arm_compute/graph/backends/FunctionHelpers.h
index 172f002..b7c9c57 100644
--- a/arm_compute/graph/backends/FunctionHelpers.h
+++ b/arm_compute/graph/backends/FunctionHelpers.h
@@ -541,6 +541,8 @@
     ARM_COMPUTE_LOG_GRAPH_INFO("Instantiated " << node.type()
                                << " Target " << TargetInfo::TargetType
                                << " Data Type: " << input->info()->data_type()
+                               << " Input QuantInfo: " << input->info()->quantization_info()
+                               << " Weights QuantInfo: " << weights->info()->quantization_info()
                                << " Input shape: " << input->info()->tensor_shape()
                                << " Weights shape: " << weights->info()->tensor_shape()
                                << " Output shape: " << output->info()->tensor_shape()
diff --git a/arm_compute/graph/frontend/Layers.h b/arm_compute/graph/frontend/Layers.h
index 02ef569..a222c85 100644
--- a/arm_compute/graph/frontend/Layers.h
+++ b/arm_compute/graph/frontend/Layers.h
@@ -381,14 +381,22 @@
 public:
     /** Construct a fully connected layer.
      *
-     * @param[in] num_outputs Number of outputs.
-     * @param[in] weights     Accessor to get weights from.
-     * @param[in] bias        Accessor to get bias from.
+     * @param[in] num_outputs        Number of outputs.
+     * @param[in] weights            Accessor to get weights from.
+     * @param[in] bias               Accessor to get bias from.
+     * @param[in] weights_quant_info (Optional) Weights quantization information
+     * @param[in] out_quant_info     (Optional) Output quantization info
      */
-    FullyConnectedLayer(unsigned int        num_outputs,
-                        ITensorAccessorUPtr weights,
-                        ITensorAccessorUPtr bias)
-        : _num_outputs(num_outputs), _weights(std::move(weights)), _bias(std::move(bias))
+    FullyConnectedLayer(unsigned int           num_outputs,
+                        ITensorAccessorUPtr    weights,
+                        ITensorAccessorUPtr    bias,
+                        const QuantizationInfo weights_quant_info = QuantizationInfo(),
+                        const QuantizationInfo out_quant_info     = QuantizationInfo())
+        : _num_outputs(num_outputs),
+          _weights(std::move(weights)),
+          _bias(std::move(bias)),
+          _weights_quant_info(std::move(weights_quant_info)),
+          _out_quant_info(std::move(out_quant_info))
     {
     }
 
@@ -397,13 +405,16 @@
         NodeParams  common_params = { name(), s.hints().target_hint };
         NodeIdxPair input         = { s.tail_node(), 0 };
         return GraphBuilder::add_fully_connected_layer(s.graph(), common_params, input, _num_outputs,
-                                                       std::move(_weights), std::move(_bias));
+                                                       std::move(_weights), std::move(_bias),
+                                                       std::move(_weights_quant_info), std::move(_out_quant_info));
     }
 
 private:
-    unsigned int        _num_outputs;
-    ITensorAccessorUPtr _weights;
-    ITensorAccessorUPtr _bias;
+    unsigned int           _num_outputs;
+    ITensorAccessorUPtr    _weights;
+    ITensorAccessorUPtr    _bias;
+    const QuantizationInfo _weights_quant_info;
+    const QuantizationInfo _out_quant_info;
 };
 
 /** Normalization Layer */
diff --git a/arm_compute/graph/nodes/FullyConnectedLayerNode.h b/arm_compute/graph/nodes/FullyConnectedLayerNode.h
index 79201c8..1bff600 100644
--- a/arm_compute/graph/nodes/FullyConnectedLayerNode.h
+++ b/arm_compute/graph/nodes/FullyConnectedLayerNode.h
@@ -36,30 +36,39 @@
 public:
     /** Constructor
      *
-     * @param[in] num_outputs Number of neurons in the layer
-     * @param[in] fc_info     (Optional) Additional information about the fully connected layer
+     * @param[in] num_outputs    Number of neurons in the layer
+     * @param[in] out_quant_info (Optional) Output quantization info
+     * @param[in] fc_info        (Optional) Additional information about the fully connected layer
      */
-    FullyConnectedLayerNode(unsigned int num_outputs, FullyConnectedLayerInfo fc_info = FullyConnectedLayerInfo());
+    FullyConnectedLayerNode(unsigned int            num_outputs,
+                            QuantizationInfo        out_quant_info = QuantizationInfo(),
+                            FullyConnectedLayerInfo fc_info        = FullyConnectedLayerInfo());
     /** Computes weights descriptor
      *
      * @warning Works for inputs with 1D batch space
      *
-     * @param[in] input_descriptor Input descriptor
-     * @param[in] num_outputs      Number of output neurons
+     * @param[in] input_descriptor   Input descriptor
+     * @param[in] num_outputs        Number of output neurons
+     * @param[in] weights_quant_info (Optional) Weights quantization info
      *
      * @return Weights descriptor
      */
-    static TensorDescriptor compute_weights_descriptor(const TensorDescriptor &input_descriptor, unsigned int num_outputs);
+    static TensorDescriptor compute_weights_descriptor(const TensorDescriptor &input_descriptor,
+                                                       unsigned int            num_outputs,
+                                                       QuantizationInfo        weights_quant_info = QuantizationInfo());
     /** Computes fully connected layer output descriptor
      *
      * @warning Works for inputs with 1D batch space
      *
      * @param[in] input_descriptor Input descriptor
      * @param[in] num_outputs      Number of output neurons
+     * @param[in] out_quant_info   (Optional) Weights quantization info
      *
      * @return Output descriptor
      */
-    static TensorDescriptor compute_output_descriptor(const TensorDescriptor &input_descriptor, unsigned int num_outputs);
+    static TensorDescriptor compute_output_descriptor(const TensorDescriptor &input_descriptor,
+                                                      unsigned int            num_outputs,
+                                                      QuantizationInfo        out_quant_info = QuantizationInfo());
     /** Fully connected layer addition information
      *
      * @return Additional information about the fully connected layer
@@ -74,6 +83,7 @@
 
 private:
     unsigned int            _num_outputs;
+    QuantizationInfo        _out_quant_info;
     FullyConnectedLayerInfo _info;
 };
 } // namespace graph