IVGCVSW-5252 Use CreateTensorHandle() function from TensorHandleFactory in the tests for layers between G-L

Signed-off-by: Finn Williams <Finn.Williams@arm.com>
Change-Id: I197351a479fb211787bd12a73c9618d2ded95898
diff --git a/src/backends/backendsCommon/test/layerTests/InstanceNormalizationTestImpl.cpp b/src/backends/backendsCommon/test/layerTests/InstanceNormalizationTestImpl.cpp
index 58ac0e7..2e205dd 100644
--- a/src/backends/backendsCommon/test/layerTests/InstanceNormalizationTestImpl.cpp
+++ b/src/backends/backendsCommon/test/layerTests/InstanceNormalizationTestImpl.cpp
@@ -26,6 +26,7 @@
 LayerTestResult<T, 4> InstanceNormTestImpl(
     armnn::IWorkloadFactory& workloadFactory,
     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+    const armnn::ITensorHandleFactory& tensorHandleFactory,
     const armnn::TensorInfo& inputTensorInfo,
     const armnn::TensorInfo& outputTensorInfo,
     const std::vector<float>& inputValues,
@@ -42,10 +43,8 @@
     result.outputExpected = MakeTensor<T, 4>(outputTensorInfo,
                                              armnnUtils::QuantizedVector<T>(expectedOutputValues, qScale, qOffset));
 
-    ARMNN_NO_DEPRECATE_WARN_BEGIN
-    std::unique_ptr<armnn::ITensorHandle> inputHandle  = workloadFactory.CreateTensorHandle(inputTensorInfo);
-    std::unique_ptr<armnn::ITensorHandle> outputHandle = workloadFactory.CreateTensorHandle(outputTensorInfo);
-    ARMNN_NO_DEPRECATE_WARN_END
+    std::unique_ptr<armnn::ITensorHandle> inputHandle  = tensorHandleFactory.CreateTensorHandle(inputTensorInfo);
+    std::unique_ptr<armnn::ITensorHandle> outputHandle = tensorHandleFactory.CreateTensorHandle(outputTensorInfo);
 
     armnn::WorkloadInfo info;
 
@@ -71,6 +70,7 @@
 LayerTestResult<T, 4> InstanceNormTest(
     armnn::IWorkloadFactory& workloadFactory,
     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+    const armnn::ITensorHandleFactory& tensorHandleFactory,
     armnn::DataLayout dataLayout)
 {
     // BatchSize: 2
@@ -143,6 +143,7 @@
     return InstanceNormTestImpl<ArmnnType>(
         workloadFactory,
         memoryManager,
+        tensorHandleFactory,
         inputTensorInfo,
         outputTensorInfo,
         inputValues,
@@ -154,6 +155,7 @@
 LayerTestResult<T, 4> InstanceNormTest2(
     armnn::IWorkloadFactory& workloadFactory,
     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+    const armnn::ITensorHandleFactory& tensorHandleFactory,
     armnn::DataLayout dataLayout)
 {
     // BatchSize: 2
@@ -227,6 +229,7 @@
     return InstanceNormTestImpl<ArmnnType>(
         workloadFactory,
         memoryManager,
+        tensorHandleFactory,
         inputTensorInfo,
         outputTensorInfo,
         inputValues,
@@ -239,31 +242,35 @@
 LayerTestResult<float, 4> InstanceNormFloat32Test(
     armnn::IWorkloadFactory& workloadFactory,
     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+    const armnn::ITensorHandleFactory& tensorHandleFactory,
     armnn::DataLayout dataLayout)
 {
-    return InstanceNormTest<armnn::DataType::Float32>(workloadFactory, memoryManager, dataLayout);
+    return InstanceNormTest<armnn::DataType::Float32>(workloadFactory, memoryManager, tensorHandleFactory, dataLayout);
 }
 
 LayerTestResult<armnn::Half, 4> InstanceNormFloat16Test(
     armnn::IWorkloadFactory& workloadFactory,
     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+    const armnn::ITensorHandleFactory& tensorHandleFactory,
     armnn::DataLayout dataLayout)
 {
-    return InstanceNormTest<armnn::DataType::Float16>(workloadFactory, memoryManager, dataLayout);
+    return InstanceNormTest<armnn::DataType::Float16>(workloadFactory, memoryManager, tensorHandleFactory, dataLayout);
 }
 
 LayerTestResult<float, 4> InstanceNormFloat32Test2(
     armnn::IWorkloadFactory& workloadFactory,
     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+    const armnn::ITensorHandleFactory& tensorHandleFactory,
     armnn::DataLayout dataLayout)
 {
-    return InstanceNormTest2<armnn::DataType::Float32>(workloadFactory, memoryManager, dataLayout);
+    return InstanceNormTest2<armnn::DataType::Float32>(workloadFactory, memoryManager, tensorHandleFactory, dataLayout);
 }
 
 LayerTestResult<armnn::Half, 4> InstanceNormFloat16Test2(
     armnn::IWorkloadFactory& workloadFactory,
     const armnn::IBackendInternal::IMemoryManagerSharedPtr& memoryManager,
+    const armnn::ITensorHandleFactory& tensorHandleFactory,
     armnn::DataLayout dataLayout)
 {
-    return InstanceNormTest2<armnn::DataType::Float16>(workloadFactory, memoryManager, dataLayout);
+    return InstanceNormTest2<armnn::DataType::Float16>(workloadFactory, memoryManager, tensorHandleFactory, dataLayout);
 }