IVGCVSW-6632 OptimizationViews: has INetwork rather than Graph for holding layers

  * Deprecate the GetGraph() function in OptimizationViews & remove/fix
    occurances where OptimizationViews.GetGraph() is called.
  * OptimizationViews has member INetworkPtr.
  * OptimizationViews has GetINetwork() method.
  * Unit test added to OptimizationViewsTests.cpp.

Signed-off-by: Cathal Corbett <cathal.corbett@arm.com>
Change-Id: Ifc1e53f1c34d786502279631942f0472f401038e
diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp
index 3198c22..c0cfe42 100644
--- a/src/armnn/Network.cpp
+++ b/src/armnn/Network.cpp
@@ -215,10 +215,11 @@
 }
 
 IConnectableLayer* INetwork::AddPrecompiledLayer(const PreCompiledDescriptor& preCompiledDescriptor,
-                                                    CompiledBlobPtr& compiledBlobPtr,
-                                                    const Optional<BackendId>& backend)
+                                                 CompiledBlobPtr& compiledBlobPtr,
+                                                 const Optional<BackendId>& backend,
+                                                 const char* name)
 {
-    return pNetworkImpl->AddPrecompiledLayer(preCompiledDescriptor, compiledBlobPtr, backend);
+    return pNetworkImpl->AddPrecompiledLayer(preCompiledDescriptor, compiledBlobPtr, backend, name);
 }
 
 IConnectableLayer* INetwork::AddActivationLayer(const ActivationDescriptor& activationDescriptor,
@@ -2772,10 +2773,19 @@
 
 IConnectableLayer* NetworkImpl::AddPrecompiledLayer(const PreCompiledDescriptor& preCompiledDescriptor,
                                                     CompiledBlobPtr& compiledBlobPtr,
-                                                    const Optional<BackendId>& backend)
+                                                    const Optional<BackendId>& backend,
+                                                    const char* name)
 {
     // Method use is for backend users.
-    const auto layer = m_Graph->AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
+    PreCompiledLayer* layer;
+    if (name)
+    {
+        layer = m_Graph->AddLayer<PreCompiledLayer>(preCompiledDescriptor, name);
+    }
+    else
+    {
+        layer = m_Graph->AddLayer<PreCompiledLayer>(preCompiledDescriptor, "pre-compiled");
+    }
 
     // Assign the pre-compiled object to layer
     // Pass only one compiled network, Arm NN does not handle multiple
diff --git a/src/armnn/Network.hpp b/src/armnn/Network.hpp
index 3fdc140..db9c374 100644
--- a/src/armnn/Network.hpp
+++ b/src/armnn/Network.hpp
@@ -172,7 +172,8 @@
 
     IConnectableLayer* AddPrecompiledLayer(const PreCompiledDescriptor& preCompiledDescriptor,
                                            CompiledBlobPtr& compiledBlobPtr,
-                                           const Optional<BackendId>& backend);
+                                           const Optional<BackendId>& backend,
+                                           const char* name = nullptr);
 
     IConnectableLayer* AddPreluLayer(const char* name = nullptr);
 
diff --git a/src/armnn/test/SubgraphViewTests.cpp b/src/armnn/test/SubgraphViewTests.cpp
index a1a57c1..639e832 100644
--- a/src/armnn/test/SubgraphViewTests.cpp
+++ b/src/armnn/test/SubgraphViewTests.cpp
@@ -200,7 +200,9 @@
 
     // Construct dummy pre-compiled layer
     INetworkPtr network = INetwork::Create();
-    IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor, compiledBlobPtr, backend);
+    IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
+                                                                       compiledBlobPtr,
+                                                                       backend);
 
     // Substitute sub-graph with pre-compiled layer
     graph.SubstituteSubgraph(*subgraph, preCompiledLayer);
@@ -242,7 +244,9 @@
 
     // Construct dummy pre-compiled layer
     INetworkPtr network = INetwork::Create();
-    IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor, compiledBlobPtr, backend);
+    IConnectableLayer* preCompiledLayer = network->AddPrecompiledLayer(preCompiledDescriptor,
+                                                                       compiledBlobPtr,
+                                                                       backend);
     SubgraphView substituteSubgraph(preCompiledLayer);
 
     // Substitute sub-graph with pre-compiled layer
diff --git a/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp b/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
index 521c17c..4367de1 100644
--- a/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
+++ b/src/backends/aclCommon/ArmComputeSubgraphUtils.hpp
@@ -91,15 +91,14 @@
 }
 
 template<typename LayerType>
-LayerType* FuseLayerWithoutParameters(OptimizationViews& optimizationViews,
-                                      LayerType* baseLayer,
-                                      ActivationLayer* activationLayer,
-                                      ActivationDescriptor& activationDesc,
-                                      std::string name)
+LayerType* FuseLayer(OptimizationViews& optimizationViews,
+                     LayerType* baseLayer,
+                     LayerType* replacementLayer,
+                     ActivationLayer* activationLayer,
+                     ActivationDescriptor& activationDesc)
 {
-    LayerType* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(name.c_str());
-
-    replacementLayer->SetAdditionalInfoForObject(std::make_shared<ActivationDescriptor>(activationDesc));
+    replacementLayer->SetAdditionalInfoForObject(
+        std::make_shared<ActivationDescriptor>(activationDesc));
 
     SubgraphView substitutionSubgraph(CreateInputsFrom({baseLayer}),
                                       CreateOutputsFrom({activationLayer}),
@@ -107,42 +106,204 @@
     SubgraphView replacementSubgraph(replacementLayer);
 
     optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
+
     return replacementLayer;
 }
 
 template<typename LayerType>
-LayerType* FuseLayerWithParameters(OptimizationViews& optimizationViews,
+LayerType* FuseAdditionLayer(OptimizationViews& optimizationViews,
+                             LayerType* baseLayer,
+                             ActivationLayer* activationLayer,
+                             ActivationDescriptor& activationDesc,
+                             std::string name)
+{
+    IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddAdditionLayer(name.c_str());
+    LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+    FuseLayer(optimizationViews,
+              baseLayer,
+              replacementLayer,
+              activationLayer,
+              activationDesc);
+
+    return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseSubtractionLayer(OptimizationViews& optimizationViews,
+                                LayerType* baseLayer,
+                                ActivationLayer* activationLayer,
+                                ActivationDescriptor& activationDesc,
+                                std::string name)
+{
+    IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddSubtractionLayer(name.c_str());
+    LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+    FuseLayer(optimizationViews,
+              baseLayer,
+              replacementLayer,
+              activationLayer,
+              activationDesc);
+
+    return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseDivisionLayer(OptimizationViews& optimizationViews,
+                             LayerType* baseLayer,
+                             ActivationLayer* activationLayer,
+                             ActivationDescriptor& activationDesc,
+                             std::string name)
+{
+    IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddDivisionLayer(name.c_str());
+    LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+    FuseLayer(optimizationViews,
+              baseLayer,
+              replacementLayer,
+              activationLayer,
+              activationDesc);
+
+    return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseMultiplicationLayer(OptimizationViews& optimizationViews,
                                    LayerType* baseLayer,
                                    ActivationLayer* activationLayer,
                                    ActivationDescriptor& activationDesc,
                                    std::string name)
 {
-    LayerType* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(baseLayer->GetParameters(),
-                                                                                   name.c_str());
+    IConnectableLayer* replacement = optimizationViews.GetINetwork()->AddMultiplicationLayer(name.c_str());
+    LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
 
-    replacementLayer->SetAdditionalInfoForObject(std::make_shared<ActivationDescriptor>(activationDesc));
+    FuseLayer(optimizationViews,
+              baseLayer,
+              replacementLayer,
+              activationLayer,
+              activationDesc);
 
-    SubgraphView substitutionSubgraph(CreateInputsFrom({baseLayer}),
-                                      CreateOutputsFrom({activationLayer}),
-                                      {baseLayer, activationLayer});
-    SubgraphView replacementSubgraph(replacementLayer);
-
-    optimizationViews.AddSubstitution({substitutionSubgraph, replacementSubgraph});
     return replacementLayer;
 }
 
 template<typename LayerType>
-LayerType* FuseLayerWithWeightsAndBiases(OptimizationViews& optimizationViews,
-                                         LayerType* baseLayer,
-                                         ActivationLayer* activationLayer,
-                                         ActivationDescriptor& activationDesc,
-                                         std::string name)
+LayerType* FuseBatchNormalizationLayer(OptimizationViews& optimizationViews,
+                                       LayerType* baseLayer,
+                                       ActivationLayer* activationLayer,
+                                       ActivationDescriptor& activationDesc,
+                                       std::string name)
 {
-    LayerType* replacementLayer = FuseLayerWithParameters(optimizationViews,
-                                                          baseLayer,
-                                                          activationLayer,
-                                                          activationDesc,
-                                                          name);
+    IConnectableLayer* replacement =
+        optimizationViews.GetINetwork()->AddBatchNormalizationLayer(baseLayer->GetParameters(),
+                                                                    ConstTensor(),
+                                                                    ConstTensor(),
+                                                                    ConstTensor(),
+                                                                    ConstTensor(),
+                                                                    name.c_str());
+    LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+    FuseLayer(optimizationViews,
+              baseLayer,
+              replacementLayer,
+              activationLayer,
+              activationDesc);
+
+    return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseConvolution2dLayer(OptimizationViews& optimizationViews,
+                                  LayerType* baseLayer,
+                                  ActivationLayer* activationLayer,
+                                  ActivationDescriptor& activationDesc,
+                                  std::string name)
+{
+    std::shared_ptr<ConstTensorHandle> weightHandle = baseLayer->m_Weight;
+    TensorInfo weightInfo = weightHandle->GetTensorInfo();
+
+    std::shared_ptr<ConstTensorHandle> biasHandle = baseLayer->m_Bias;
+    ConstTensor biasTensor;
+    if (!biasHandle)
+    {
+        biasTensor = ConstTensor();
+    }
+    else
+    {
+        biasTensor = ConstTensor(biasHandle->GetTensorInfo(), biasHandle->Map(true));
+    }
+
+    IConnectableLayer* replacement =
+        optimizationViews.GetINetwork()->
+            AddConvolution2dLayer(baseLayer->GetParameters(),
+                                  ConstTensor(weightInfo, weightHandle->Map(true)),
+                                  Optional<ConstTensor>(biasTensor),
+                                  name.c_str());
+    LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+    FuseLayer(optimizationViews,
+              baseLayer,
+              replacementLayer,
+              activationLayer,
+              activationDesc);
+
+    return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseDepthwiseConvolution2dLayer(OptimizationViews& optimizationViews,
+                                           LayerType* baseLayer,
+                                           ActivationLayer* activationLayer,
+                                           ActivationDescriptor& activationDesc,
+                                           std::string name)
+{
+    std::shared_ptr<ConstTensorHandle> weightHandle = baseLayer->m_Weight;
+    TensorInfo weightInfo = weightHandle->GetTensorInfo();
+
+    std::shared_ptr<ConstTensorHandle> biasHandle = baseLayer->m_Bias;
+    ConstTensor biasTensor;
+    if (!biasHandle)
+    {
+        biasTensor = ConstTensor();
+    }
+    else
+    {
+        biasTensor = ConstTensor(biasHandle->GetTensorInfo(), biasHandle->Map(true));
+    }
+
+    IConnectableLayer* replacement =
+        optimizationViews.GetINetwork()->
+            AddDepthwiseConvolution2dLayer(baseLayer->GetParameters(),
+                                           ConstTensor(weightInfo, weightHandle->Map(true)),
+                                           Optional<ConstTensor>(biasTensor),
+                                           name.c_str());
+    LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+    FuseLayer(optimizationViews,
+              baseLayer,
+              replacementLayer,
+              activationLayer,
+              activationDesc);
+
+    return replacementLayer;
+}
+
+template<typename LayerType>
+LayerType* FuseFullyConnectedLayer(OptimizationViews& optimizationViews,
+                                   LayerType* baseLayer,
+                                   ActivationLayer* activationLayer,
+                                   ActivationDescriptor& activationDesc,
+                                   std::string name)
+{
+    IConnectableLayer* replacement =
+        optimizationViews.GetINetwork()->AddFullyConnectedLayer(baseLayer->GetParameters(),
+                                                                name.c_str());
+    LayerType* replacementLayer = PolymorphicDowncast<LayerType*>(replacement);
+
+    FuseLayer(optimizationViews,
+              baseLayer,
+              replacementLayer,
+              activationLayer,
+              activationDesc);
 
     replacementLayer->m_Weight = std::move(baseLayer->m_Weight);
     replacementLayer->m_Bias   = std::move(baseLayer->m_Bias);
@@ -187,8 +348,9 @@
 
         // Add new layer to graph.
         std::string layerName = "reduce_layer_" + std::to_string(i);
-        Layer* replacementLayer = optimizationViews.GetGraph().AddLayer<LayerType>(newReduceDescriptor,
-                                                                                   layerName.c_str());
+        Layer* replacementLayer = PolymorphicDowncast<Layer*>(
+            optimizationViews.GetINetwork()->AddReduceLayer(newReduceDescriptor,
+                                                            layerName.c_str()));
         // Connect previous layer with new layer.
         // The first and last layer will be connected when the subgraph is replaced.
         if (!layers.empty())
diff --git a/src/backends/backendsCommon/test/MockBackend.cpp b/src/backends/backendsCommon/test/MockBackend.cpp
index 4bdb8ce..a34cfe9 100644
--- a/src/backends/backendsCommon/test/MockBackend.cpp
+++ b/src/backends/backendsCommon/test/MockBackend.cpp
@@ -187,12 +187,16 @@
         {
             ARMNN_ASSERT(supportedSubgraph != nullptr);
 
-            PreCompiledLayer* preCompiledLayer =
-                optimizationViews.GetGraph().AddLayer<PreCompiledLayer>(
+            CompiledBlobPtr blobPtr;
+            BackendId backend = MockBackendId();
+
+            IConnectableLayer* preCompiledLayer =
+                optimizationViews.GetINetwork()->AddPrecompiledLayer(
                         PreCompiledDescriptor(supportedSubgraph->GetNumInputSlots(),
                                               supportedSubgraph->GetNumOutputSlots()),
-                        "pre-compiled");
-            preCompiledLayer->SetBackendId(MockBackendId());
+                                              blobPtr,
+                                              backend,
+                                              nullptr);
 
             SubgraphView substitutionSubgraph(*supportedSubgraph);
             SubgraphView replacementSubgraph(preCompiledLayer);
diff --git a/src/backends/backendsCommon/test/OptimizationViewsTests.cpp b/src/backends/backendsCommon/test/OptimizationViewsTests.cpp
index bbae229..c40c513 100644
--- a/src/backends/backendsCommon/test/OptimizationViewsTests.cpp
+++ b/src/backends/backendsCommon/test/OptimizationViewsTests.cpp
@@ -55,7 +55,7 @@
 {
     OptimizationViews view;
     // Construct a graph with 3 layers
-    Graph& baseGraph = view.GetGraph();
+    Graph baseGraph;
 
     Layer* const inputLayer = baseGraph.AddLayer<InputLayer>(0, "input");
 
@@ -119,11 +119,78 @@
     CHECK(view.Validate(*originalSubgraph));
 }
 
+
+TEST_CASE("OptimizedViewsSubgraphLayerCountUsingGetINetwork")
+{
+    OptimizationViews view;
+
+    IConnectableLayer* const inputLayer = view.GetINetwork()->AddInputLayer(0, "input");
+
+    DepthwiseConvolution2dDescriptor convDescriptor;
+    PreCompiledDescriptor substitutionLayerDescriptor(1, 1);
+    CompiledBlobPtr blobPtr;
+    BackendId backend = Compute::CpuRef;
+
+    Layer* convLayer1 = PolymorphicDowncast<Layer*>(
+        view.GetINetwork()->AddDepthwiseConvolution2dLayer(convDescriptor,
+                                                           ConstTensor(),
+                                                           Optional<ConstTensor>(),
+                                                           "conv1"));
+
+    Layer* convLayer2 = PolymorphicDowncast<Layer*>(
+        view.GetINetwork()->AddDepthwiseConvolution2dLayer(convDescriptor,
+                                                           ConstTensor(),
+                                                           Optional<ConstTensor>(),
+                                                           "conv2"));
+
+    IConnectableLayer* const outputLayer = view.GetINetwork()->AddOutputLayer(0, "output");
+
+    inputLayer->GetOutputSlot(0).Connect(convLayer1->GetInputSlot(0));
+    convLayer1->GetOutputSlot(0).Connect(convLayer2->GetInputSlot(0));
+    convLayer2->GetOutputSlot(0).Connect(outputLayer->GetInputSlot(0));
+
+    // Subgraph for a failed layer
+    SubgraphViewSelector::SubgraphViewPtr failedSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}),
+                                                                                  CreateOutputsFrom({convLayer1}),
+                                                                                  {convLayer1});
+    // Subgraph for an untouched layer
+    SubgraphViewSelector::SubgraphViewPtr untouchedSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer2}),
+                                                                                     CreateOutputsFrom({convLayer2}),
+                                                                                     {convLayer2});
+
+    // Create a Network containing a layer to substitute in
+    NetworkImpl net;
+    Layer* substitutionpreCompiledLayer = PolymorphicDowncast<Layer*>(
+        net.AddPrecompiledLayer(substitutionLayerDescriptor, blobPtr, backend));
+
+    // Subgraph for a substitution layer
+    SubgraphViewSelector::SubgraphViewPtr substitutionSubgraph =
+        CreateSubgraphViewFrom(CreateInputsFrom({substitutionpreCompiledLayer}),
+                                                CreateOutputsFrom({substitutionpreCompiledLayer}),
+                                                {substitutionpreCompiledLayer});
+
+    view.AddFailedSubgraph(SubgraphView(*failedSubgraph));
+    view.AddUntouchedSubgraph(SubgraphView(*untouchedSubgraph));
+
+    SubgraphViewSelector::SubgraphViewPtr baseSubgraph = CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}),
+                                                                                CreateOutputsFrom({convLayer2}),
+                                                                                {substitutionpreCompiledLayer});
+    view.AddSubstitution({*baseSubgraph, *substitutionSubgraph});
+
+    // Construct original subgraph to compare against
+    SubgraphViewSelector::SubgraphViewPtr originalSubgraph =
+        CreateSubgraphViewFrom(CreateInputsFrom({convLayer1}),
+                                                CreateOutputsFrom({convLayer2}),
+                                                {convLayer1, convLayer2, substitutionpreCompiledLayer});
+
+    CHECK(view.Validate(*originalSubgraph));
+}
+
 TEST_CASE("OptimizedViewsSubgraphLayerCountFailValidate")
 {
     OptimizationViews view;
     // Construct a graph with 3 layers
-    Graph& baseGraph = view.GetGraph();
+    Graph baseGraph;
 
     Layer* const inputLayer = baseGraph.AddLayer<InputLayer>(0, "input");
 
diff --git a/src/backends/cl/ClBackend.cpp b/src/backends/cl/ClBackend.cpp
index 5c58269..339c1aa 100644
--- a/src/backends/cl/ClBackend.cpp
+++ b/src/backends/cl/ClBackend.cpp
@@ -304,11 +304,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithWeightsAndBiases<Convolution2dLayer>(optimizationViews,
-                                                                                      baseLayer,
-                                                                                      activationLayer,
-                                                                                      activationDesc,
-                                                                                      name);
+                                    FuseConvolution2dLayer<Convolution2dLayer>(optimizationViews,
+                                                                               baseLayer,
+                                                                               activationLayer,
+                                                                               activationDesc,
+                                                                               name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -335,11 +335,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithWeightsAndBiases<DepthwiseConvolution2dLayer>(optimizationViews,
-                                                                                               baseLayer,
-                                                                                               activationLayer,
-                                                                                               activationDesc,
-                                                                                               name);
+                                    FuseDepthwiseConvolution2dLayer<DepthwiseConvolution2dLayer>(optimizationViews,
+                                                                                                 baseLayer,
+                                                                                                 activationLayer,
+                                                                                                 activationDesc,
+                                                                                                 name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -358,11 +358,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithWeightsAndBiases<FullyConnectedLayer>(optimizationViews,
-                                                                                       baseLayer,
-                                                                                       activationLayer,
-                                                                                       activationDesc,
-                                                                                       name);
+                                    FuseFullyConnectedLayer<FullyConnectedLayer>(optimizationViews,
+                                                                                 baseLayer,
+                                                                                 activationLayer,
+                                                                                 activationDesc,
+                                                                                 name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -385,7 +385,7 @@
                                 if (status)
                                 {
                                     BatchNormalizationLayer* replacementLayer =
-                                            FuseLayerWithParameters<BatchNormalizationLayer>(optimizationViews,
+                                        FuseBatchNormalizationLayer<BatchNormalizationLayer>(optimizationViews,
                                                                                              baseLayer,
                                                                                              activationLayer,
                                                                                              activationDesc,
@@ -411,11 +411,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithoutParameters<AdditionLayer>(optimizationViews,
-                                                                              baseLayer,
-                                                                              activationLayer,
-                                                                              activationDesc,
-                                                                              name);
+                                    FuseAdditionLayer<AdditionLayer>(optimizationViews,
+                                                                     baseLayer,
+                                                                     activationLayer,
+                                                                     activationDesc,
+                                                                     name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -432,11 +432,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithoutParameters<DivisionLayer>(optimizationViews,
-                                                                              baseLayer,
-                                                                              activationLayer,
-                                                                              activationDesc,
-                                                                              name);
+                                    FuseDivisionLayer<DivisionLayer>(optimizationViews,
+                                                                     baseLayer,
+                                                                     activationLayer,
+                                                                     activationDesc,
+                                                                     name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -453,11 +453,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithoutParameters<MultiplicationLayer>(optimizationViews,
-                                                                                    baseLayer,
-                                                                                    activationLayer,
-                                                                                    activationDesc,
-                                                                                    name);
+                                    FuseMultiplicationLayer<MultiplicationLayer>(optimizationViews,
+                                                                                 baseLayer,
+                                                                                 activationLayer,
+                                                                                 activationDesc,
+                                                                                 name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -474,11 +474,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithoutParameters<SubtractionLayer>(optimizationViews,
-                                                                                 baseLayer,
-                                                                                 activationLayer,
-                                                                                 activationDesc,
-                                                                                 name);
+                                    FuseSubtractionLayer<SubtractionLayer>(optimizationViews,
+                                                                           baseLayer,
+                                                                           activationLayer,
+                                                                           activationDesc,
+                                                                           name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
diff --git a/src/backends/neon/NeonBackend.cpp b/src/backends/neon/NeonBackend.cpp
index 0500ee3..aa5ba03 100644
--- a/src/backends/neon/NeonBackend.cpp
+++ b/src/backends/neon/NeonBackend.cpp
@@ -196,11 +196,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithWeightsAndBiases<Convolution2dLayer>(optimizationViews,
-                                                                                      baseLayer,
-                                                                                      activationLayer,
-                                                                                      activationDesc,
-                                                                                      name);
+                                    FuseConvolution2dLayer<Convolution2dLayer>(optimizationViews,
+                                                                               baseLayer,
+                                                                               activationLayer,
+                                                                               activationDesc,
+                                                                               name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -227,11 +227,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithWeightsAndBiases<DepthwiseConvolution2dLayer>(optimizationViews,
-                                                                                               baseLayer,
-                                                                                               activationLayer,
-                                                                                               activationDesc,
-                                                                                               name);
+                                    FuseDepthwiseConvolution2dLayer<DepthwiseConvolution2dLayer>(optimizationViews,
+                                                                                                 baseLayer,
+                                                                                                 activationLayer,
+                                                                                                 activationDesc,
+                                                                                                 name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -250,11 +250,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithWeightsAndBiases<FullyConnectedLayer>(optimizationViews,
-                                                                                       baseLayer,
-                                                                                       activationLayer,
-                                                                                       activationDesc,
-                                                                                       name);
+                                    FuseFullyConnectedLayer<FullyConnectedLayer>(optimizationViews,
+                                                                                 baseLayer,
+                                                                                 activationLayer,
+                                                                                 activationDesc,
+                                                                                 name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -277,12 +277,11 @@
                                 if (status)
                                 {
                                     BatchNormalizationLayer* replacementLayer =
-                                            FuseLayerWithParameters<BatchNormalizationLayer>(
-                                                    optimizationViews,
-                                                    baseLayer,
-                                                    activationLayer,
-                                                    activationDesc,
-                                                    name);
+                                        FuseBatchNormalizationLayer<BatchNormalizationLayer>(optimizationViews,
+                                                                                             baseLayer,
+                                                                                             activationLayer,
+                                                                                             activationDesc,
+                                                                                             name);
 
                                     replacementLayer->m_Beta     = std::move(baseLayer->m_Beta);
                                     replacementLayer->m_Gamma    = std::move(baseLayer->m_Gamma);
@@ -304,11 +303,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithoutParameters<AdditionLayer>(optimizationViews,
-                                                                              baseLayer,
-                                                                              activationLayer,
-                                                                              activationDesc,
-                                                                              name);
+                                    FuseAdditionLayer<AdditionLayer>(optimizationViews,
+                                                                     baseLayer,
+                                                                     activationLayer,
+                                                                     activationDesc,
+                                                                     name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -325,11 +324,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithoutParameters<DivisionLayer>(optimizationViews,
-                                                                              baseLayer,
-                                                                              activationLayer,
-                                                                              activationDesc,
-                                                                              name);
+                                    FuseDivisionLayer<DivisionLayer>(optimizationViews,
+                                                                     baseLayer,
+                                                                     activationLayer,
+                                                                     activationDesc,
+                                                                     name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -346,11 +345,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithoutParameters<MultiplicationLayer>(optimizationViews,
-                                                                                    baseLayer,
-                                                                                    activationLayer,
-                                                                                    activationDesc,
-                                                                                    name);
+                                    FuseMultiplicationLayer<MultiplicationLayer>(optimizationViews,
+                                                                                 baseLayer,
+                                                                                 activationLayer,
+                                                                                 activationDesc,
+                                                                                 name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }
@@ -367,11 +366,11 @@
 
                                 if (status)
                                 {
-                                    FuseLayerWithoutParameters<SubtractionLayer>(optimizationViews,
-                                                                                 baseLayer,
-                                                                                 activationLayer,
-                                                                                 activationDesc,
-                                                                                 name);
+                                    FuseSubtractionLayer<SubtractionLayer>(optimizationViews,
+                                                                           baseLayer,
+                                                                           activationLayer,
+                                                                           activationDesc,
+                                                                           name);
                                     untouched.erase(baseLayer->GetGuid());
                                     untouched.erase(activationLayer->GetGuid());
                                 }