Add Bias to MatMul Kernels and add support for use in Fully Connected Layer

Resolves: [COMPMID-6316]
Signed-off-by: Mohammed Suhail Munshi <MohammedSuhail.Munshi@arm.com>
Change-Id: I08e6bac9e6b46b76978da0dc6a48ccfe3dde5086
Reviewed-on: https://review.mlplatform.org/c/ml/ComputeLibrary/+/9833
Tested-by: Arm Jenkins <bsgcomp@arm.com>
Reviewed-by: Gunes Bayir <gunes.bayir@arm.com>
Comments-Addressed: Arm Jenkins <bsgcomp@arm.com>
Benchmark: Arm Jenkins <bsgcomp@arm.com>
diff --git a/src/gpu/cl/kernels/ClMatMulNativeKernel.h b/src/gpu/cl/kernels/ClMatMulNativeKernel.h
index 02d8ac3..fe2b787 100644
--- a/src/gpu/cl/kernels/ClMatMulNativeKernel.h
+++ b/src/gpu/cl/kernels/ClMatMulNativeKernel.h
@@ -43,15 +43,16 @@
     /** Initialise the kernel's input and output.
      *
      * @param[in]  compile_context    The compile context to be used.
-     * @param[in]  lhs                Input tensor for the LHS matrix. Data type supported: F32/F16.
+     * @param[in]  lhs                Input tensor info for the LHS matrix. Data type supported: F32/F16.
      *                                Dimensions above 2 are collapsed onto dimension 2 and represent the batch.
-     * @param[in]  rhs                Input tensor for the RHS matrix. Data type supported: same as @p lhs.
+     * @param[in]  rhs                Input tensor info for the RHS matrix. Data type supported: same as @p lhs.
      *                                Dimensions above 2 are collapsed onto dimension 2 and represent the batch.
+     * @param[in]  bias               Bias tensor info for bias matrix. Can be nullptr. Data type supported: same as @p lhs.
      * @param[out] dst                Output tensor info. Data type supported: same as @p lhs
      * @param[in]  matmul_kernel_info Attributes for Batch MatMul Kernel
-     * @param[in]  act_info           Specifies activation function to use after Matrix multiplication. Default is Identity function.
+     * @param[in]  act_info           (Optional) Specifies activation function to use after Matrix multiplication. Default is Identity function.
      */
-    void configure(const ClCompileContext &compile_context, ITensorInfo *lhs, ITensorInfo *rhs, ITensorInfo *dst, const MatMulKernelInfo &matmul_kernel_info,
+    void configure(const ClCompileContext &compile_context, ITensorInfo *lhs, ITensorInfo *rhs, ITensorInfo *bias, ITensorInfo *dst, const MatMulKernelInfo &matmul_kernel_info,
                    const ActivationLayerInfo &act_info = ActivationLayerInfo());
     /** Static function to check if given info will lead to a valid configuration
      *
@@ -59,7 +60,8 @@
      *
      * @return a status
      */
-    static Status validate(const ITensorInfo *lhs, const ITensorInfo *rhs, const ITensorInfo *dst, const MatMulKernelInfo &matmul_kernel_info, const ActivationLayerInfo &act_info = ActivationLayerInfo());
+    static Status validate(const ITensorInfo *lhs, const ITensorInfo *rhs, const ITensorInfo *bias, const ITensorInfo *dst, const MatMulKernelInfo &matmul_kernel_info,
+                           const ActivationLayerInfo &act_info = ActivationLayerInfo());
 
     // Inherited methods overridden:
     void run_op(ITensorPack &tensors, const Window &window, cl::CommandQueue &queue) override;