IVGCVBENCH-1337 Added additional layer parameters to dot file and -v option

* Generic layer parameters now show up in dot file
* Convolution layer parameters have also been added to dot file
* ExecucteNetwork has an additional -v flag which generated dot file if there

Change-Id: I210bb19b45384eb3639b7e488c7a89049fa6f18d
Signed-off-by: Andre Ghattas <andre.ghattas@arm.com>
Signed-off-by: Szilard Papp <szilard.papp@arm.com>
diff --git a/src/armnn/Layer.cpp b/src/armnn/Layer.cpp
index 528020b..1e38433 100644
--- a/src/armnn/Layer.cpp
+++ b/src/armnn/Layer.cpp
@@ -400,4 +400,23 @@
     }
     return inputShapes;
 }
+
+void Layer::SerializeLayerParameters(ParameterStringifyFunction& fn) const
+{
+    std::string layerType = GetLayerTypeAsCString(m_Type);
+    std::string backendId = std::string(m_BackendId);
+    if(!(m_LayerName.compare("") == 0) && !m_LayerName.empty())
+    {
+        fn("LayerName",m_LayerName);
+    }
+    if(!(layerType.compare("") == 0) && !layerType.empty())
+    {
+        fn("LayerType",layerType);
+    }
+    if(!(backendId.compare("") == 0) && !backendId.empty())
+    {
+        fn("BackendID",backendId);
+    }
+}
+
 } // namespace armnn
diff --git a/src/armnn/Layer.hpp b/src/armnn/Layer.hpp
index 5944ea8..c571e50 100644
--- a/src/armnn/Layer.hpp
+++ b/src/armnn/Layer.hpp
@@ -281,7 +281,7 @@
 
     /// Helper to serialize the layer parameters to string.
     /// (currently used in DotSerializer and company).
-    virtual void SerializeLayerParameters(ParameterStringifyFunction &) const {}
+    virtual void SerializeLayerParameters(ParameterStringifyFunction& fn) const;
 
     // Free up the constant source data
     virtual void ReleaseConstantData();
diff --git a/src/armnn/SerializeLayerParameters.cpp b/src/armnn/SerializeLayerParameters.cpp
index d416a28..1b0ec02 100644
--- a/src/armnn/SerializeLayerParameters.cpp
+++ b/src/armnn/SerializeLayerParameters.cpp
@@ -68,6 +68,7 @@
     }
 
     fn("BiasEnabled",(desc.m_BiasEnabled?"true":"false"));
+    fn("DataLayout",GetDataLayoutName(desc.m_DataLayout));
 }
 
 void
@@ -95,6 +96,7 @@
     }
 
     fn("BiasEnabled",(desc.m_BiasEnabled?"true":"false"));
+    fn("DataLayout",std::to_string(int(desc.m_DataLayout)));
 }
 
 void
diff --git a/src/armnn/layers/Convolution2dLayer.cpp b/src/armnn/layers/Convolution2dLayer.cpp
index 2c7a570..4300d55 100644
--- a/src/armnn/layers/Convolution2dLayer.cpp
+++ b/src/armnn/layers/Convolution2dLayer.cpp
@@ -9,7 +9,7 @@
 #include <armnn/TypesUtils.hpp>
 #include <backendsCommon/CpuTensorHandle.hpp>
 #include <backendsCommon/WorkloadFactory.hpp>
-
+#include <string>
 #include <DataLayoutIndexed.hpp>
 
 using namespace armnnUtils;
@@ -20,6 +20,27 @@
 Convolution2dLayer::Convolution2dLayer(const Convolution2dDescriptor& param, const char* name)
     : LayerWithParameters(1, 1, LayerType::Convolution2d, param, name)
 {
+
+}
+
+void Convolution2dLayer::SerializeLayerParameters(ParameterStringifyFunction& fn) const
+{
+    //using DescriptorType = Parameters;
+    const std::vector<TensorShape>& inputShapes =
+    {
+        GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape(),
+        m_Weight->GetTensorInfo().GetShape()
+    };
+    const TensorShape filterShape = inputShapes[1];
+    DataLayoutIndexed dataLayoutIndex(m_Param.m_DataLayout);
+    unsigned int filterWidth = filterShape[dataLayoutIndex.GetWidthIndex()];
+    unsigned int filterHeight = filterShape[dataLayoutIndex.GetHeightIndex()];
+    unsigned int outChannels = filterShape[0];
+
+    fn("OutputChannels",std::to_string(outChannels));
+    fn("FilterWidth",std::to_string(filterWidth));
+    fn("FilterHeight",std::to_string(filterHeight));
+    LayerWithParameters<Convolution2dDescriptor>::SerializeLayerParameters(fn);
 }
 
 std::unique_ptr<IWorkload> Convolution2dLayer::CreateWorkload(const Graph& graph, const IWorkloadFactory& factory) const
diff --git a/src/armnn/layers/Convolution2dLayer.hpp b/src/armnn/layers/Convolution2dLayer.hpp
index 05a26da..0e85b33 100644
--- a/src/armnn/layers/Convolution2dLayer.hpp
+++ b/src/armnn/layers/Convolution2dLayer.hpp
@@ -15,6 +15,7 @@
 class Convolution2dLayer : public LayerWithParameters<Convolution2dDescriptor>
 {
 public:
+
     /// A unique pointer to store Weight values.
     std::unique_ptr<ScopedCpuTensorHandle> m_Weight;
     /// A unique pointer to store Bias values.
@@ -43,6 +44,8 @@
 
     void Accept(ILayerVisitor& visitor) const override;
 
+    void SerializeLayerParameters(ParameterStringifyFunction& fn) const override;
+
 protected:
     /// Constructor to create a Convolution2dLayer.
     /// @param [in] param Convolution2dDescriptor to configure the convolution2d operation.
diff --git a/src/armnn/layers/DepthwiseConvolution2dLayer.cpp b/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
index e49c179..a50a0f6 100644
--- a/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
+++ b/src/armnn/layers/DepthwiseConvolution2dLayer.cpp
@@ -10,7 +10,7 @@
 
 #include <backendsCommon/CpuTensorHandle.hpp>
 #include <backendsCommon/WorkloadFactory.hpp>
-
+#include <string>
 #include <DataLayoutIndexed.hpp>
 
 using namespace armnnUtils;
@@ -24,6 +24,28 @@
 {
 }
 
+void DepthwiseConvolution2dLayer::SerializeLayerParameters(ParameterStringifyFunction& fn) const
+{
+    const std::vector<TensorShape>& inputShapes =
+    {
+        GetInputSlot(0).GetConnection()->GetTensorInfo().GetShape(),
+        m_Weight->GetTensorInfo().GetShape()
+    };
+    const TensorShape filterShape = inputShapes[1];
+    DataLayoutIndexed dataLayoutIndex(m_Param.m_DataLayout);
+    unsigned int inputChannels = filterShape[1];
+    unsigned int filterWidth = filterShape[3];
+    unsigned int filterHeight = filterShape[2];
+    unsigned int depthMultiplier = filterShape[0];
+
+    fn("FilterWidth",std::to_string(filterWidth));
+    fn("FilterHeight",std::to_string(filterHeight));
+    fn("DepthMultiplier",std::to_string(depthMultiplier));
+    fn("InputChannels",std::to_string(inputChannels));
+
+    LayerWithParameters<DepthwiseConvolution2dDescriptor>::SerializeLayerParameters(fn);
+}
+
 std::unique_ptr<IWorkload> DepthwiseConvolution2dLayer::CreateWorkload(const Graph& graph,
                                                                        const IWorkloadFactory& factory) const
 {
diff --git a/src/armnn/layers/DepthwiseConvolution2dLayer.hpp b/src/armnn/layers/DepthwiseConvolution2dLayer.hpp
index 065ba6c..f575910 100644
--- a/src/armnn/layers/DepthwiseConvolution2dLayer.hpp
+++ b/src/armnn/layers/DepthwiseConvolution2dLayer.hpp
@@ -43,6 +43,8 @@
 
     void Accept(ILayerVisitor& visitor) const override;
 
+    void SerializeLayerParameters(ParameterStringifyFunction& fn) const override;
+
 protected:
     /// Constructor to create a DepthwiseConvolution2dLayer.
     /// @param [in] param DepthwiseConvolution2dDescriptor to configure the depthwise convolution2d.
diff --git a/src/armnn/layers/LayerWithParameters.hpp b/src/armnn/layers/LayerWithParameters.hpp
index ba43d6f..cce9ca2 100644
--- a/src/armnn/layers/LayerWithParameters.hpp
+++ b/src/armnn/layers/LayerWithParameters.hpp
@@ -19,9 +19,10 @@
 
     /// Helper to serialize the layer parameters to string
     /// (currently used in DotSerializer and company).
-    void SerializeLayerParameters(ParameterStringifyFunction & fn) const
+    void SerializeLayerParameters(ParameterStringifyFunction& fn) const override
     {
         StringifyLayerParameters<Parameters>::Serialize(fn, m_Param);
+        Layer::SerializeLayerParameters(fn);
     }
 
 protected:
diff --git a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
index 7b6135d..cbe74b8 100644
--- a/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
+++ b/src/backends/backendsCommon/test/OptimizedNetworkTests.cpp
@@ -51,9 +51,9 @@
         "digraph Optimized {\n"
         "    node [shape=\"record\"];\n"
         "    edge [fontsize=8 fontcolor=\"blue\" fontname=\"arial-bold\"];\n"
-        "    " << inputId << " [label=\"{Input}\"];\n"
-        "    " << addId << " [label=\"{Addition}\"];\n"
-        "    " << outputId << " [label=\"{Output}\"];\n"
+        "    " << inputId << " [label=\"{Input|LayerType : Input\\lBackendID : CpuRef\\l}\"];\n"
+        "    " << addId << " [label=\"{Addition|LayerType : Addition\\lBackendID : CpuRef\\l}\"];\n"
+        "    " << outputId << " [label=\"{Output|LayerType : Output\\lBackendID : CpuRef\\l}\"];\n"
         "    " << inputId << " -> " << addId << " [label=< [4] >];\n"
         "    " << inputId << " -> " << addId << " [label=< [4] >];\n"
         "    " << addId << " -> " << outputId << " [label=< [4] >];\n"
diff --git a/src/backends/reference/test/RefOptimizedNetworkTests.cpp b/src/backends/reference/test/RefOptimizedNetworkTests.cpp
index 68617b9..1a29e73 100644
--- a/src/backends/reference/test/RefOptimizedNetworkTests.cpp
+++ b/src/backends/reference/test/RefOptimizedNetworkTests.cpp
@@ -200,9 +200,9 @@
              "digraph Optimized {\n"
              "    node [shape=\"record\"];\n"
              "    edge [fontsize=8 fontcolor=\"blue\" fontname=\"arial-bold\"];\n"
-             "    " << inputId << " [label=\"{Input}\"];\n"
-             "    " << floorId << " [label=\"{Floor}\"];\n"
-             "    " << outputId << " [label=\"{Output}\"];\n"
+             "    " << inputId << " [label=\"{Input|LayerType : Input\\lBackendID : CpuRef\\l}\"];\n"
+             "    " << floorId << " [label=\"{Floor|LayerType : Floor\\lBackendID : CpuRef\\l}\"];\n"
+             "    " << outputId << " [label=\"{Output|LayerType : Output\\lBackendID : CpuRef\\l}\"];\n"
              "    " << inputId << " -> " << floorId << " [label=< [4] >];\n"
              "    " << floorId << " -> " << outputId << " [label=< [4] >];\n"
              "}\n";