IVGCVSW-4262 Add Calls to IsReshapeSupported and IsPermuteSupported

!armnn:2486

* Add calls before addition of these layers in ConvertConcatenation
* Add outputInfo parameter wherever needed for IsReshapeSupported

Signed-off-by: Kevin May <kevin.may@arm.com>
Change-Id: Ic5d142ea046161960ff2fc137bd261ebb4e6ac0c
diff --git a/1.2/HalPolicy.cpp b/1.2/HalPolicy.cpp
index e6f8acb..2cb8497 100644
--- a/1.2/HalPolicy.cpp
+++ b/1.2/HalPolicy.cpp
@@ -702,6 +702,7 @@
                                data.m_Backends,
                                isSupported,
                                input.GetTensorInfo(),
+                               outputInfo,
                                reshapeDescriptor);
 
     if (!isSupported)
@@ -1299,7 +1300,7 @@
 
     IConnectableLayer* layer = data.m_Network->AddMaximumLayer();
     assert(layer != nullptr);
-    bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+    bool isReshapeSupported = BroadcastTensor(input0, input1, outInfo, layer, data);
     if (!isReshapeSupported)
     {
         return false;
@@ -1354,7 +1355,7 @@
 
     IConnectableLayer* const layer = data.m_Network->AddMinimumLayer();
     assert(layer != nullptr);
-    bool isReshapeSupported = BroadcastTensor(input0, input1, layer, data);
+    bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, layer, data);
     if (!isReshapeSupported)
     {
         return false;
@@ -1517,7 +1518,7 @@
         return Fail("%s: AddPreluLayer failed", __func__);
     }
 
-    bool isReshapeSupported = BroadcastTensor(input, alpha, layer, data);
+    bool isReshapeSupported = BroadcastTensor(input, alpha, outputInfo, layer, data);
     if (!isReshapeSupported)
     {
         return false;
diff --git a/ConversionUtils.hpp b/ConversionUtils.hpp
index b695aa6..4e4b4d6 100644
--- a/ConversionUtils.hpp
+++ b/ConversionUtils.hpp
@@ -236,7 +236,7 @@
     return *reshapeLayer;
 }
 
-bool BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1,
+bool BroadcastTensor(LayerInputHandle& input0, LayerInputHandle& input1, const armnn::TensorInfo& outputInfo,
                      armnn::IConnectableLayer* startLayer, ConversionData& data)
 {
     BOOST_ASSERT(startLayer != nullptr);
@@ -287,6 +287,7 @@
                                data.m_Backends,
                                isSupported,
                                reshapedInfo,
+                               outputInfo,
                                reshapeDescriptor);
     if (!isSupported)
     {
@@ -551,6 +552,41 @@
     }
 }
 
+bool CheckReshapeSupported(ConversionData& data,
+                           std::vector<LayerInputHandle>& inputs,
+                           std::vector<armnn::TensorShape>& inputShapes,
+                           const armnn::PermutationVector& mapping,
+                           const armnn::TensorInfo& outputInfo)
+{
+    if (!mapping.IsEqual(IdentityPermutation4D))
+    {
+        size_t nInputs = inputs.size();
+        for (size_t i=0; i<nInputs; ++i)
+        {
+            // check permute layer
+            armnn::PermuteDescriptor permuteDesc;
+            permuteDesc.m_DimMappings = mapping;
+
+            bool isSupported = false;
+            FORWARD_LAYER_SUPPORT_FUNC(__func__,
+                                       IsPermuteSupported,
+                                       data.m_Backends,
+                                       isSupported,
+                                       inputs[i].GetTensorInfo(),
+                                       outputInfo,
+                                       permuteDesc);
+            if (!isSupported)
+            {
+                return false;
+            }
+
+        }
+        SwizzleInputs(*data.m_Network, inputs, inputShapes, mapping);
+    }
+    return true;
+}
+
+
 bool CreateConcatPermutationParameters(const unsigned int numberOfDimensions,
                                        int32_t & concatDimension,
                                        std::pair<armnn::PermutationVector, armnn::PermutationVector> & permutationPair)
@@ -1548,7 +1584,7 @@
 
     if (endLayer != nullptr)
     {
-        bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+        bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
         if (!isReshapeSupported)
         {
             return false;
@@ -1733,6 +1769,22 @@
                 tensorDimensionsAdded = 2;
             }
 
+            armnn::ReshapeDescriptor reshapeDescriptor;
+            reshapeDescriptor.m_TargetShape = reshapeInfo.GetShape();
+
+            bool isSupported = false;
+            FORWARD_LAYER_SUPPORT_FUNC(__func__,
+                                       IsReshapeSupported,
+                                       data.m_Backends,
+                                       isSupported,
+                                       operandInputHandle.GetTensorInfo(),
+                                       reshapeInfo,
+                                       reshapeDescriptor);
+            if (!isSupported)
+            {
+                return false;
+            }
+
             armnn::IConnectableLayer& newReshape = AddReshapeLayer(
                     *data.m_Network,
                     operandInputHandle,
@@ -1788,7 +1840,10 @@
 
     // this is no-op for identity swizzles, otherwise it replaces both
     // the handles and shapes with the swizzled layer output handles and shapes
-    SwizzleInputs(*data.m_Network, inputHandles, inputShapes, permutationPair.first);
+    if (!CheckReshapeSupported(data, inputHandles, inputShapes, permutationPair.first, outputInfo))
+    {
+        return false;
+    }
 
     // Create an armnn concat layer descriptor - this will also perform validation on the input shapes
     armnn::OriginsDescriptor concatDescriptor;
@@ -1844,6 +1899,21 @@
 
     if (needPermute)
     {
+        armnn::PermuteDescriptor permuteDesc;
+        permuteDesc.m_DimMappings = permutationPair.second;
+
+        bool isSupported = false;
+        FORWARD_LAYER_SUPPORT_FUNC(__func__,
+                                   IsPermuteSupported,
+                                   data.m_Backends,
+                                   isSupported,
+                                   layer->GetOutputSlot(0).GetTensorInfo(),
+                                   outputInfo,
+                                   permuteDesc);
+        if (!isSupported)
+        {
+            return false;
+        }
         // Add permutation layer and connect the output to it, the permutation becomes the output layer
         armnn::IConnectableLayer& deswizzleLayer = AddPermuteLayer(*data.m_Network,
                                                                    layer->GetOutputSlot(0),
@@ -1866,6 +1936,22 @@
             afterConcatInfo.SetShape(armnn::TensorShape({ afterConcatInfo.GetShape()[2] }));
         }
 
+        armnn::ReshapeDescriptor reshapeDescriptor;
+        reshapeDescriptor.m_TargetShape = afterConcatInfo.GetShape();
+
+        bool isSupported = false;
+        FORWARD_LAYER_SUPPORT_FUNC(__func__,
+                                   IsReshapeSupported,
+                                   data.m_Backends,
+                                   isSupported,
+                                   layer->GetOutputSlot(0).GetTensorInfo(),
+                                   afterConcatInfo,
+                                   reshapeDescriptor);
+        if (!isSupported)
+        {
+            return false;
+        }
+
         layer = &AddReshapeLayer(
                 *data.m_Network,
                 layer->GetOutputSlot(0),
@@ -2312,7 +2398,7 @@
 
     if (endLayer)
     {
-        bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+        bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
         if (!isReshapeSupported)
         {
             return false;
@@ -2888,7 +2974,7 @@
 
     if (endLayer != nullptr)
     {
-        bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+        bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
         if (!isReshapeSupported)
         {
             return false;
@@ -3027,6 +3113,7 @@
                                data.m_Backends,
                                isSupported,
                                input.GetTensorInfo(),
+                               GetTensorInfoForOperand(*outputOperand),
                                reshapeDescriptor);
     if (!isSupported)
     {
@@ -3096,7 +3183,7 @@
 
     if (endLayer)
     {
-        bool isReshapeSupported = BroadcastTensor(input0, input1, startLayer, data);
+        bool isReshapeSupported = BroadcastTensor(input0, input1, outputInfo, startLayer, data);
         if (!isReshapeSupported)
         {
             return false;
@@ -3180,6 +3267,7 @@
                                data.m_Backends,
                                isSupported,
                                inputInfo,
+                               outputInfo,
                                reshapeDesc);
     if (!isSupported)
     {