IVGCVSW-5985 Remove deprecated code

 * Removes deprecated AddLayer, IsLayerSupported functions
 * Marks the whole LayerVisitor class as deprecated not just the
   constructor. This required to wrap all Accept functions in a
   no deprecate macro because the LayerVisitor is used as a parameter in
   there
 * Removes usage of deprecated LayerVisitor and replaces it
   with ExecuteStrategy. This required a few structural changes
   in the unit tests
 * Adds a default implementation for IStrategy called StrategyBase
 * Changes pyarmnn to use non deprecated constructor for
   INetworkProperties and adds related unit test
 * Marks usage of deprecated code in pyarmnn as deprecated. This
   required to extend INetworkProperties to allow backwards compatibility
 * Removes deprecated functions from CpuAcc, GpuAcc and Ref backends

Note: This patch breaks compatibility with backends that are not
      updated in this patch

!android-nn-driver:6325

Signed-off-by: Jan Eilers <jan.eilers@arm.com>
Change-Id: Id13b6f37a74d26eadeda2da1dc92915e725ed5a5
diff --git a/src/backends/reference/RefLayerSupport.cpp b/src/backends/reference/RefLayerSupport.cpp
index c0ede67..b80aa99 100644
--- a/src/backends/reference/RefLayerSupport.cpp
+++ b/src/backends/reference/RefLayerSupport.cpp
@@ -58,15 +58,6 @@
 
 } // anonymous namespace
 
-bool RefLayerSupport::IsAbsSupported(const TensorInfo& input, const TensorInfo& output,
-                                     Optional<std::string&> reasonIfUnsupported) const
-{
-    return IsElementwiseUnarySupported(input,
-                                       output,
-                                       ElementwiseUnaryDescriptor(UnaryOperation::Abs),
-                                       reasonIfUnsupported);
-}
-
 bool RefLayerSupport::IsActivationSupported(const TensorInfo& input,
                                             const TensorInfo& output,
                                             const ActivationDescriptor& descriptor,
@@ -565,15 +556,12 @@
     const DataType inputType = input.GetDataType();
     if (IsQuantized8BitType(inputType))
     {
-        ARMNN_NO_DEPRECATE_WARN_BEGIN
-        std::array<DataType, 4> supportedWeightTypes =
+        std::array<DataType, 3> supportedWeightTypes =
         {
             DataType::QAsymmS8,
             DataType::QAsymmU8,
-            DataType::QSymmS8,
-            DataType::QuantizedSymm8PerAxis // deprecated
+            DataType::QSymmS8
         };
-        ARMNN_NO_DEPRECATE_WARN_END
 
         supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
                                       "Reference Convolution2d: weights type not supported for quantized input.");
@@ -769,15 +757,12 @@
     const DataType inputType = input.GetDataType();
     if (IsQuantized8BitType(inputType))
     {
-        ARMNN_NO_DEPRECATE_WARN_BEGIN
-        std::array<DataType, 4> supportedWeightTypes =
+        std::array<DataType, 3> supportedWeightTypes =
                 {
                         DataType::QAsymmS8,
                         DataType::QAsymmU8,
                         DataType::QSymmS8,
-                        DataType::QuantizedSymm8PerAxis // deprecated
                 };
-        ARMNN_NO_DEPRECATE_WARN_END
 
         supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
                                        "Reference DepthwiseConvolution2d: weights type not supported for "
@@ -977,18 +962,6 @@
     return supported;
 }
 
-bool RefLayerSupport::IsEqualSupported(const TensorInfo& input0,
-                                       const TensorInfo& input1,
-                                       const TensorInfo& output,
-                                       Optional<std::string&> reasonIfUnsupported) const
-{
-    return IsComparisonSupported(input0,
-                                 input1,
-                                 output,
-                                 ComparisonDescriptor(ComparisonOperation::Equal),
-                                 reasonIfUnsupported);
-}
-
 bool RefLayerSupport::IsFakeQuantizationSupported(const TensorInfo& input,
                                                   const FakeQuantizationDescriptor& descriptor,
                                                   Optional<std::string&> reasonIfUnsupported) const
@@ -1173,18 +1146,6 @@
     return supported;
 }
 
-bool RefLayerSupport::IsGreaterSupported(const TensorInfo& input0,
-                                         const TensorInfo& input1,
-                                         const TensorInfo& output,
-                                         Optional<std::string&> reasonIfUnsupported) const
-{
-    return IsComparisonSupported(input0,
-                                 input1,
-                                 output,
-                                 ComparisonDescriptor(ComparisonOperation::Greater),
-                                 reasonIfUnsupported);
-}
-
 bool RefLayerSupport::IsInputSupported(const TensorInfo& /*input*/,
                                        Optional<std::string&> /*reasonIfUnsupported*/) const
 {
@@ -1523,14 +1484,6 @@
     return supported;
 }
 
-bool RefLayerSupport::IsMergerSupported(const std::vector<const TensorInfo*> inputs,
-                                        const TensorInfo& output,
-                                        const MergerDescriptor& descriptor,
-                                        Optional<std::string&> reasonIfUnsupported) const
-{
-    return IsConcatSupported(inputs, output, descriptor, reasonIfUnsupported);
-}
-
 bool RefLayerSupport::IsMemCopySupported(const TensorInfo &input,
                                          const TensorInfo &output,
                                          Optional<std::string &> reasonIfUnsupported) const
@@ -1897,33 +1850,6 @@
         "Reference reshape: input type not supported.");
 }
 
-bool RefLayerSupport::IsResizeBilinearSupported(const TensorInfo& input,
-                                                const TensorInfo& output,
-                                                Optional<std::string&> reasonIfUnsupported) const
-{
-    bool supported = true;
-    std::array<DataType,6> supportedTypes =
-    {
-        DataType::BFloat16,
-        DataType::Float32,
-        DataType::Float16,
-        DataType::QAsymmS8,
-        DataType::QAsymmU8,
-        DataType::QSymmS16
-    };
-
-    supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
-                                  "Reference ResizeBilinear: input type not supported");
-
-    supported &= CheckSupportRule(TypeAnyOf(output, supportedTypes), reasonIfUnsupported,
-                                  "Reference ResizeBilinear: output type not supported");
-
-    supported &= CheckSupportRule(TypesAreEqual(input, output), reasonIfUnsupported,
-                                  "Reference ResizeBilinear: input and output types not matching");
-
-    return supported;
-}
-
 bool RefLayerSupport::IsResizeSupported(const TensorInfo& input,
                                         const TensorInfo& output,
                                         const ResizeDescriptor& descriptor,
@@ -1953,16 +1879,6 @@
     return supported;
 }
 
-bool RefLayerSupport::IsRsqrtSupported(const TensorInfo& input,
-                                       const TensorInfo& output,
-                                       Optional<std::string&> reasonIfUnsupported) const
-{
-    return IsElementwiseUnarySupported(input,
-                                       output,
-                                       ElementwiseUnaryDescriptor(UnaryOperation::Rsqrt),
-                                       reasonIfUnsupported);
-}
-
 bool RefLayerSupport::IsShapeSupported(const TensorInfo& input,
                                        const TensorInfo& output,
                                        Optional<std::string&> reasonIfUnsupported) const
@@ -2101,28 +2017,6 @@
 }
 
 bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
-                                          const ViewsDescriptor& descriptor,
-                                          Optional<std::string&> reasonIfUnsupported) const
-{
-    IgnoreUnused(descriptor);
-    bool supported = true;
-    std::array<DataType,6> supportedTypes =
-    {
-        DataType::BFloat16,
-        DataType::Float32,
-        DataType::Float16,
-        DataType::QAsymmS8,
-        DataType::QAsymmU8,
-        DataType::QSymmS16
-    };
-
-    supported &= CheckSupportRule(TypeAnyOf(input, supportedTypes), reasonIfUnsupported,
-                                  "Reference splitter: input type not supported");
-
-    return supported;
-}
-
-bool RefLayerSupport::IsSplitterSupported(const TensorInfo& input,
                                           const std::vector<std::reference_wrapper<TensorInfo>>& outputs,
                                           const ViewsDescriptor& descriptor,
                                           Optional<std::string&> reasonIfUnsupported) const
@@ -2322,15 +2216,12 @@
     const DataType inputType = input.GetDataType();
     if (IsQuantized8BitType(inputType))
     {
-        ARMNN_NO_DEPRECATE_WARN_BEGIN
-        std::array<DataType, 4> supportedWeightTypes =
+        std::array<DataType, 3> supportedWeightTypes =
         {
             DataType::QAsymmS8,
             DataType::QAsymmU8,
-            DataType::QSymmS8,
-            DataType::QuantizedSymm8PerAxis //Deprecated
+            DataType::QSymmS8
         };
-        ARMNN_NO_DEPRECATE_WARN_END
 
         supported &= CheckSupportRule(TypeAnyOf(weights, supportedWeightTypes), reasonIfUnsupported,
                                       "Reference TransposeConvolution2d: weights type not supported for "