IVGCVSW-6069 Fold PAD into Depthwise Convolution

Signed-off-by: Teresa Charlin <teresa.charlinreyes@arm.com>
Change-Id: Ib01629256309cfe17f341909d5b9bbbb09361422
diff --git a/src/armnn/Network.cpp b/src/armnn/Network.cpp
index f097e67..2c14136 100644
--- a/src/armnn/Network.cpp
+++ b/src/armnn/Network.cpp
@@ -1610,6 +1610,7 @@
                                                 TransposeAsReshape(),
                                                 OptimizeConsecutiveReshapes(),
                                                 FoldPadIntoConvolution2d(),
+                                                FoldPadIntoDepthwiseConvolution2d(),
                                                 FoldPadIntoPooling2d(),
                                                 PermuteAndBatchToSpaceAsDepthToSpace(),
                                                 TransposeAndBatchToSpaceAsDepthToSpace(),
diff --git a/src/armnn/optimizations/FoldPadIntoLayer2d.hpp b/src/armnn/optimizations/FoldPadIntoLayer2d.hpp
index 637f2b3..7318888 100644
--- a/src/armnn/optimizations/FoldPadIntoLayer2d.hpp
+++ b/src/armnn/optimizations/FoldPadIntoLayer2d.hpp
@@ -58,6 +58,13 @@
     return tensorValue == GetZeroElement(tensorInfo);
 }
 
+inline bool IsNeutralElement(const DepthwiseConvolution2dDescriptor&,
+                             const TensorInfo& tensorInfo,
+                             const float tensorValue)
+{
+    return tensorValue == GetZeroElement(tensorInfo);
+}
+
 inline bool IsNeutralElement(
     const Pooling2dDescriptor& descriptor, const TensorInfo& tensorInfo, const float tensorValue)
 {
@@ -179,6 +186,35 @@
     ~FoldPadIntoConvolution2dImpl() = default;
 };
 
+class FoldPadIntoDepthwiseConvolution2dImpl
+{
+public:
+    void Run(Graph& graph, InputSlot& connection) const
+    {
+        const auto newConv2dLayer = FoldPadIntoLayer2dImpl<DepthwiseConvolution2dLayer>(graph, connection);
+
+        if (newConv2dLayer != nullptr)
+        {
+            const auto conv2dLayer = PolymorphicDowncast<DepthwiseConvolution2dLayer*>(&connection.GetOwningLayer());
+            // Copy weights and bias to the new convolution layer
+            ARMNN_ASSERT_MSG(conv2dLayer->m_Weight != nullptr,
+                             "FoldPadIntoDepthwiseConvolution2d: Weights data should not be null.");
+            newConv2dLayer->m_Weight = std::move(conv2dLayer->m_Weight);
+
+            if (conv2dLayer->GetParameters().m_BiasEnabled)
+            {
+                ARMNN_ASSERT_MSG(conv2dLayer->m_Bias != nullptr,
+                                "FoldPadIntoDepthwiseConvolution2d: Bias data should not be null if bias is enabled.");
+                newConv2dLayer->m_Bias = std::move(conv2dLayer->m_Bias);
+            }
+        }
+    }
+
+protected:
+    FoldPadIntoDepthwiseConvolution2dImpl() =  default;
+    ~FoldPadIntoDepthwiseConvolution2dImpl() = default;
+};
+
 class FoldPadIntoPooling2dImpl
 {
 public:
@@ -195,6 +231,10 @@
 
 using FoldPadIntoConvolution2d =
     OptimizeForExclusiveConnection<PadLayer, Convolution2dLayer, pad_fold::FoldPadIntoConvolution2dImpl>;
+using FoldPadIntoDepthwiseConvolution2d =
+    OptimizeForExclusiveConnection <PadLayer,
+                                    DepthwiseConvolution2dLayer,
+                                    pad_fold::FoldPadIntoDepthwiseConvolution2dImpl>;
 using FoldPadIntoPooling2d =
     OptimizeForExclusiveConnection<PadLayer, Pooling2dLayer, pad_fold::FoldPadIntoPooling2dImpl>;
 
diff --git a/src/armnn/test/OptimizerTests.cpp b/src/armnn/test/OptimizerTests.cpp
index d0734d8..110b283 100644
--- a/src/armnn/test/OptimizerTests.cpp
+++ b/src/armnn/test/OptimizerTests.cpp
@@ -615,6 +615,87 @@
                              &IsLayerOfType<armnn::OutputLayer>));
 }
 
+BOOST_AUTO_TEST_CASE(FoldPadLayerIntoDepthwiseConvolution2dLayer)
+{
+    Graph              graph;
+    const unsigned int inputShape[]   = {1, 2, 2, 3};
+    const unsigned int paddedShape[]  = {1, 6, 6, 3};
+    const unsigned int weightsShape[] = {1, 2, 3, 3};
+    const unsigned int outputShape[]  = {1, 2, 1, 3};
+
+    armnn::TensorInfo inputInfo(4, inputShape, DataType::Float32);
+    armnn::TensorInfo paddedInfo(4, paddedShape, DataType::Float32);
+    armnn::TensorInfo outputInfo(4, outputShape, DataType::Float32);
+
+    Layer* input = graph.AddLayer<InputLayer>(0, "input");
+    input->GetOutputSlot().SetTensorInfo(inputInfo);
+
+    PadDescriptor padDescriptor({{0, 0},
+                                 {2, 2},
+                                 {2, 2},
+                                 {0, 0}});
+
+    PadLayer* padLayer = graph.AddLayer<PadLayer>(padDescriptor, "pad");
+    padLayer->GetOutputSlot().SetTensorInfo(paddedInfo);
+
+    DepthwiseConvolution2dDescriptor depthwiseConvolution2dDescriptor;
+    depthwiseConvolution2dDescriptor.m_BiasEnabled = false;
+    depthwiseConvolution2dDescriptor.m_StrideX     = 1;
+    depthwiseConvolution2dDescriptor.m_StrideY     = 1;
+    depthwiseConvolution2dDescriptor.m_DataLayout  = DataLayout::NHWC;
+
+    std::vector<float> weightsVector(18);
+    armnn::ConstTensor weights(armnn::TensorInfo(4, weightsShape, armnn::DataType::Float32), weightsVector);
+
+    auto* depthwiseConv2dLayer = graph
+        .AddLayer<DepthwiseConvolution2dLayer>(depthwiseConvolution2dDescriptor, "depthwiseConv2d");
+    depthwiseConv2dLayer->m_Weight = std::make_unique<armnn::ScopedTensorHandle>(weights);
+    depthwiseConv2dLayer->GetOutputSlot().SetTensorInfo(outputInfo);
+
+    Layer* output = graph.AddLayer<OutputLayer>(0, "output");
+
+    // Connect up layers - input -> pad -> depthwiseConv2d -> output
+    input->GetOutputSlot().Connect(padLayer->GetInputSlot(0));
+    padLayer->GetOutputSlot().Connect(depthwiseConv2dLayer->GetInputSlot(0));
+    depthwiseConv2dLayer->GetOutputSlot().Connect(output->GetInputSlot(0));
+
+    auto checkSimpleDepthwiseConv2d = [](const armnn::Layer* const layer)->bool {
+        const auto depthwiseConv2dLayer       = static_cast<const armnn::DepthwiseConvolution2dLayer*>(layer);
+        const auto depthwiseConv2dLayerParams = depthwiseConv2dLayer->GetParameters();
+        return IsLayerOfType<armnn::DepthwiseConvolution2dLayer>(layer) && (layer->GetNameStr() == "depthwiseConv2d")&&
+            (depthwiseConv2dLayerParams.m_PadLeft == 0) && (depthwiseConv2dLayerParams.m_PadRight == 0) &&
+            (depthwiseConv2dLayerParams.m_PadTop == 0) && (depthwiseConv2dLayerParams.m_PadBottom == 0) &&
+            (depthwiseConv2dLayerParams.m_BiasEnabled == false) && (depthwiseConv2dLayerParams.m_StrideX == 1) &&
+            (depthwiseConv2dLayerParams.m_StrideY == 1)
+            && (depthwiseConv2dLayerParams.m_DataLayout == DataLayout::NHWC);
+    };
+
+    BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
+                             &IsLayerOfType<armnn::InputLayer>,
+                             &IsLayerOfType<armnn::PadLayer>,
+                             checkSimpleDepthwiseConv2d,
+                             &IsLayerOfType<armnn::OutputLayer>));
+
+    armnn::Optimizer::Pass(graph, armnn::MakeOptimizations(FoldPadIntoDepthwiseConvolution2d()));
+
+    auto checkPadFoldedIntoDepthwiseConv2d = [](const armnn::Layer* const layer)->bool {
+        const auto depthwiseConv2dLayer       = static_cast<const armnn::DepthwiseConvolution2dLayer*>(layer);
+        const auto depthwiseConv2dLayerParams = depthwiseConv2dLayer->GetParameters();
+        return IsLayerOfType<armnn::DepthwiseConvolution2dLayer>(layer)
+            && (layer->GetNameStr() == "folded-pad-into-depthwiseConv2d") &&
+            (depthwiseConv2dLayerParams.m_PadLeft == 2) && (depthwiseConv2dLayerParams.m_PadRight == 2) &&
+            (depthwiseConv2dLayerParams.m_PadTop == 2) && (depthwiseConv2dLayerParams.m_PadBottom == 2) &&
+            (depthwiseConv2dLayerParams.m_BiasEnabled == false) && (depthwiseConv2dLayerParams.m_StrideX == 1) &&
+            (depthwiseConv2dLayerParams.m_StrideY == 1)
+            && (depthwiseConv2dLayerParams.m_DataLayout == DataLayout::NHWC);
+    };
+
+    BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
+                             &IsLayerOfType<armnn::InputLayer>,
+                             checkPadFoldedIntoDepthwiseConv2d,
+                             &IsLayerOfType<armnn::OutputLayer>));
+}
+
 BOOST_AUTO_TEST_CASE(FoldPadLayerIntoPooling2dLayer)
 {
     Graph graph;