IVGCVSW-5754 Change the behaviour of the AddBroadcastReshapeLayer Optimisation when the input is a const tensor

Signed-off-by: Finn Williams <Finn.Williams@arm.com>
Change-Id: I8b1357bdefc45880d064d7e448af364ac8644c0d
diff --git a/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp b/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp
index 6bb53d0..26661cf 100644
--- a/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp
+++ b/src/armnn/optimizations/AddBroadcastReshapeLayer.hpp
@@ -8,6 +8,7 @@
 
 #include <armnn/utility/IgnoreUnused.hpp>
 #include <armnn/utility/PolymorphicDowncast.hpp>
+#include <backendsCommon/CpuTensorHandle.hpp>
 
 namespace armnn
 {
@@ -65,6 +66,20 @@
             std::copy_backward (reshapedDim.begin(), reshapedDim.end(), reshapedDimensions.end());
 
             reshapeInfo.SetShape(armnn::TensorShape{ numDimensions, reshapedDimensions.data() });
+
+            // If the parent layer is a Constant layer we just change the tensor info rather than adding a reshape layer
+            Layer& parentLayer = layer.GetInputSlot(reshapeSlot).GetConnectedOutputSlot()->GetOwningLayer();
+            if (parentLayer.GetType() == armnn::LayerType::Constant)
+            {
+                ConstantLayer& constantLayer = static_cast<ConstantLayer&>(parentLayer);
+
+                constantLayer.m_LayerOutput = std::make_unique<ScopedCpuTensorHandle>(
+                                ConstTensor(reshapeInfo,constantLayer.m_LayerOutput.get()->GetTensor<void>()));
+                constantLayer.GetOutputSlot().SetTensorInfo(reshapeInfo);
+
+                return;
+            }
+
             const std::string layerName = "Reshape_for:" + layer.GetNameStr() + "-" + std::to_string(reshapeSlot);
             const ReshapeDescriptor descriptor{reshapeInfo.GetShape()};
             ReshapeLayer *reshapeLayer = graph.InsertNewLayer<ReshapeLayer>(layer.GetInputSlot(reshapeSlot),
diff --git a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp
index fe3cc31..594b172 100644
--- a/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp
+++ b/src/armnn/test/optimizations/AddBroadcastReshapeLayerTests.cpp
@@ -285,4 +285,53 @@
     BOOST_TEST(!reshapeLayer);
 }
 
+BOOST_AUTO_TEST_CASE(ReshapeParentConstLayerTest)
+{
+    Graph graph;
+    const TensorInfo info0({ 1, 2, 3, 5 }, DataType::QAsymmU8);
+    const TensorInfo info1({ 5 }, DataType::QAsymmU8);
+    const TensorInfo outputInfo({ 1, 2, 3, 5 }, DataType::QAsymmU8);
+
+    auto input = graph.AddLayer<InputLayer>(0, "input");
+    auto constant = graph.AddLayer<ConstantLayer>("constant");
+    auto mul = graph.AddLayer<MultiplicationLayer>("mul");
+    auto output = graph.AddLayer<OutputLayer>(0, "output");
+
+    uint8_t tensor[] = { 1, 1, 1, 1, 1 };
+
+    constant->m_LayerOutput = std::make_unique<ScopedCpuTensorHandle>(ConstTensor(info1, &tensor));
+
+    input->GetOutputSlot().SetTensorInfo(info0);
+    constant->GetOutputSlot().SetTensorInfo(info1);
+    mul->GetOutputSlot().SetTensorInfo(outputInfo);
+
+    input->GetOutputSlot().Connect(mul->GetInputSlot(0));
+    constant->GetOutputSlot().Connect(mul->GetInputSlot(1));
+    mul->GetOutputSlot().Connect(output->GetInputSlot(0));
+
+    BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
+                             &IsLayerOfType<InputLayer>,
+                             &IsLayerOfType<ConstantLayer>,
+                             &IsLayerOfType<MultiplicationLayer>,
+                             &IsLayerOfType<OutputLayer>));
+
+    // Run optimizer
+    armnn::Optimizer::Pass(graph, MakeOptimizations(AddBroadcastReshapeLayer()));
+
+    // Broadcast reshape layer has not been added to the graph
+    BOOST_TEST(CheckSequence(graph.cbegin(), graph.cend(),
+                             &IsLayerOfType<InputLayer>,
+                             &IsLayerOfType<ConstantLayer>,
+                             &IsLayerOfType<MultiplicationLayer>,
+                             &IsLayerOfType<OutputLayer>));
+
+    TensorShape expectedShape = TensorShape{ 1, 1, 1, 5 };
+    BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetShape() == expectedShape);
+
+    BOOST_TEST(constant->m_LayerOutput.get()->GetTensorInfo().GetNumDimensions() == info0.GetNumDimensions());
+
+    Layer* const reshapeLayer = GetFirstLayerWithName(graph, "Reshape_for:mul-0");
+    BOOST_TEST(!reshapeLayer);
+}
+
 BOOST_AUTO_TEST_SUITE_END()
\ No newline at end of file