IVGCVSW-2621 Add static quantization of Merger

Change-Id: I19f01698a6f9b361cd1737d76e6ec2020fab77a6
Signed-off-by: Jim Flynn <jim.flynn@arm.com>
diff --git a/src/armnn/QuantizerVisitor.cpp b/src/armnn/QuantizerVisitor.cpp
index 8e3265f..8806206 100644
--- a/src/armnn/QuantizerVisitor.cpp
+++ b/src/armnn/QuantizerVisitor.cpp
@@ -233,6 +233,15 @@
     SetQuantizedInputConnections(layer, newLayer);
 }
 
+void QuantizerVisitor::VisitMergerLayer(const IConnectableLayer* layer,
+                                        const OriginsDescriptor& mergerDescriptor,
+                                        const char* name)
+{
+    IConnectableLayer* newLayer = m_QuantizedNetwork->AddMergerLayer(mergerDescriptor, name);
+    RecordLayer(layer, newLayer);
+    SetQuantizedInputConnections(layer, newLayer);
+}
+
 void QuantizerVisitor::VisitSoftmaxLayer(const IConnectableLayer* layer,
                                          const SoftmaxDescriptor& softmaxDescriptor,
                                          const char* name)
diff --git a/src/armnn/QuantizerVisitor.hpp b/src/armnn/QuantizerVisitor.hpp
index 1beaf5a..ac90bfb 100644
--- a/src/armnn/QuantizerVisitor.hpp
+++ b/src/armnn/QuantizerVisitor.hpp
@@ -84,6 +84,10 @@
                             const ConstTensor& input,
                             const char* name = nullptr) override;
 
+    void VisitMergerLayer(const IConnectableLayer* layer,
+                          const OriginsDescriptor& mergerDescriptor,
+                          const char* name = nullptr) override;
+
     /// Extract the quantized network
     INetworkPtr RetrieveFinalNetwork() { return std::move(m_QuantizedNetwork); }
 
diff --git a/src/armnn/StaticRangeVisitor.cpp b/src/armnn/StaticRangeVisitor.cpp
index cc1255e..44b05ca 100644
--- a/src/armnn/StaticRangeVisitor.cpp
+++ b/src/armnn/StaticRangeVisitor.cpp
@@ -177,7 +177,24 @@
         min = std::min(min, inputValue);
         max = std::max(max, inputValue);
     }
+    SetRange(layer, 0, min, max);
+}
 
+void StaticRangeVisitor::VisitMergerLayer(const IConnectableLayer* layer,
+                                          const OriginsDescriptor& mergerDescriptor,
+                                          const char* name)
+{
+    float min = std::numeric_limits<float>::max();
+    float max = std::numeric_limits<float>::lowest();
+    for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
+    {
+        const IOutputSlot* outputSlot = layer->GetInputSlot(i).GetConnection();
+        LayerGuid layerId = outputSlot->GetOwningLayerGuid();
+        unsigned int slotIndex = outputSlot->CalculateIndexOnOwner();
+        RangeTracker::MinMaxRange range = m_RangeTracker.GetRange(layerId, slotIndex);
+        min = std::min(min, range.first);
+        max = std::max(max, range.second);
+    }
     SetRange(layer, 0, min, max);
 }
 
diff --git a/src/armnn/StaticRangeVisitor.hpp b/src/armnn/StaticRangeVisitor.hpp
index 2f80dcb..7576e96 100644
--- a/src/armnn/StaticRangeVisitor.hpp
+++ b/src/armnn/StaticRangeVisitor.hpp
@@ -75,6 +75,10 @@
                             const ConstTensor& input,
                             const char* name = nullptr) override;
 
+    void VisitMergerLayer(const IConnectableLayer* layer,
+                          const OriginsDescriptor& mergerDescriptor,
+                          const char* name = nullptr) override;
+
 private:
     /// Set the range for an output slot on a layer
     void SetRange(const IConnectableLayer* layer, unsigned int outputIdx, float min, float max);
diff --git a/src/armnn/test/QuantizerTest.cpp b/src/armnn/test/QuantizerTest.cpp
index 6820e14..a46b443 100644
--- a/src/armnn/test/QuantizerTest.cpp
+++ b/src/armnn/test/QuantizerTest.cpp
@@ -293,7 +293,7 @@
         BOOST_TEST((info.GetQuantizationOffset() == 64));
 
         // Based off parent LeakyReLu [-5.f, 15.f]
-        BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 20.0f/255.0f, 0.000001f);
+        BOOST_CHECK_CLOSE(info.GetQuantizationScale(), 20.0f/g_QuantizationBase, g_TestTolerance);
     }
 };
 
@@ -977,5 +977,80 @@
     VisitLayersTopologically(quantizedNetwork.get(), validator);
 }
 
+BOOST_AUTO_TEST_CASE(QuantizeMerger)
+{
+    class TestMergerVisitor : public LayerVisitorBase<VisitorThrowingPolicy>
+    {
+    public:
+        TestMergerVisitor(float min, float max) : m_Min(min), m_Max(max) {}
+
+        virtual void VisitInputLayer(const IConnectableLayer* layer,
+                                     LayerBindingId id,
+                                     const char* name = nullptr)
+        {}
+        virtual void VisitOutputLayer(const IConnectableLayer* layer,
+                                      LayerBindingId id,
+                                      const char* name = nullptr)
+        {}
+        virtual void VisitMergerLayer(const IConnectableLayer* layer,
+                                      const OriginsDescriptor& mergerDescriptor,
+                                      const char* name = nullptr)
+        {
+            std::pair<int, float> expectedValues = ComputeQAsymmParams(8, m_Min, m_Max);
+
+            TensorInfo info = layer->GetOutputSlot(0).GetTensorInfo();
+
+            BOOST_TEST((info.GetDataType() == DataType::QuantisedAsymm8));
+
+            BOOST_TEST((info.GetQuantizationOffset() == expectedValues.first));
+
+            BOOST_CHECK_CLOSE(info.GetQuantizationScale(), expectedValues.second, 0.000001f);
+        }
+
+    private:
+        float m_Min;
+        float m_Max;
+    };
+
+    INetworkPtr network = INetwork::Create();
+
+    IConnectableLayer* input0 = network->AddInputLayer(0);
+    IConnectableLayer* input1 = network->AddInputLayer(1);
+    IConnectableLayer* input2 = network->AddInputLayer(2);
+
+    OriginsDescriptor descriptor(3, 1);
+    IConnectableLayer* merger = network->AddMergerLayer(descriptor);
+
+    IConnectableLayer* output0 = network->AddOutputLayer(3);
+
+    // Establish connections
+    input0->GetOutputSlot(0).Connect(merger->GetInputSlot(0));
+    input1->GetOutputSlot(0).Connect(merger->GetInputSlot(1));
+    input2->GetOutputSlot(0).Connect(merger->GetInputSlot(2));
+    merger->GetOutputSlot(0).Connect(output0->GetInputSlot(0));
+
+    // Set TensorInfo
+    TensorShape shape{1U};
+    TensorInfo info(shape, DataType::Float32);
+
+    input0->GetOutputSlot(0).SetTensorInfo(info);
+    input1->GetOutputSlot(0).SetTensorInfo(info);
+    input2->GetOutputSlot(0).SetTensorInfo(info);
+    merger->GetOutputSlot(0).SetTensorInfo(info);
+
+    INetworkQuantizerPtr quantizerPtr =  INetworkQuantizer::Create(network.get());
+    // Override the input ranges
+    float min = -15.5f;
+    float max = 45.3f;
+
+    quantizerPtr->OverrideInputRange(0, (min + 2.1f), (max - 3.2f));
+    quantizerPtr->OverrideInputRange(1, (min + 6.7f), max);
+    quantizerPtr->OverrideInputRange(2, min, (max - 7.8f));
+
+    auto quantizedNetwork = quantizerPtr->ExportNetwork();
+    TestMergerVisitor validator(min, max);
+    VisitLayersTopologically(quantizedNetwork.get(), validator);
+}
+
 BOOST_AUTO_TEST_SUITE_END()
 } // namespace armnn