IVGCVSW-5595 Fix incorrect padding value for asymmetric quantized type

Signed-off-by: Narumol Prangnawarat <narumol.prangnawarat@arm.com>
Change-Id: I85f0c30757043f8c27c78d607f0f9dbbdd35b9fb
diff --git a/src/armnnTfLiteParser/test/Pad.cpp b/src/armnnTfLiteParser/test/Pad.cpp
index bdc8478..aab1536 100644
--- a/src/armnnTfLiteParser/test/Pad.cpp
+++ b/src/armnnTfLiteParser/test/Pad.cpp
@@ -14,10 +14,13 @@
 
 struct PadFixture : public ParserFlatbuffersFixture
 {
-    explicit PadFixture(const std::string & inputShape,
-                        const std::string & outputShape,
-                        const std::string & padListShape,
-                        const std::string & padListData)
+    explicit PadFixture(const std::string& inputShape,
+                        const std::string& outputShape,
+                        const std::string& padListShape,
+                        const std::string& padListData,
+                        const std::string& dataType = "FLOAT32",
+                        const std::string& scale = "1.0",
+                        const std::string& offset = "0")
     {
         m_JsonString = R"(
             {
@@ -27,26 +30,26 @@
                     "tensors": [
                         {
                             "shape": )" + inputShape + R"(,
-                            "type": "FLOAT32",
+                            "type": )" + dataType + R"(,
                             "buffer": 0,
                             "name": "inputTensor",
                             "quantization": {
                                 "min": [ 0.0 ],
                                 "max": [ 255.0 ],
-                                "scale": [ 1.0 ],
-                                "zero_point": [ 0 ],
+                                "scale": [ )" + scale + R"( ],
+                                "zero_point": [ )" + offset + R"( ],
                             }
                         },
                         {
                              "shape": )" + outputShape + R"(,
-                             "type": "FLOAT32",
+                             "type": )" + dataType + R"(,
                              "buffer": 1,
                              "name": "outputTensor",
                              "quantization": {
                                 "min": [ 0.0 ],
                                 "max": [ 255.0 ],
-                                "scale": [ 1.0 ],
-                                "zero_point": [ 0 ],
+                                "scale": [ )" + scale + R"( ],
+                                "zero_point": [ )" + offset + R"( ],
                             }
                         },
                         {
@@ -101,4 +104,40 @@
                               0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f }}});
 }
 
+struct Uint8PadFixture : public PadFixture
+{
+    Uint8PadFixture() : PadFixture("[ 2, 3 ]", "[ 4, 7 ]", "[ 2, 2 ]",
+                                  "[  1,0,0,0, 1,0,0,0, 2,0,0,0, 2,0,0,0 ]",
+                                  "UINT8", "-2.0", "3") {}
+};
+
+BOOST_FIXTURE_TEST_CASE(ParsePadUint8, Uint8PadFixture)
+{
+    RunTest<2, armnn::DataType::QAsymmU8>
+        (0,
+         {{ "inputTensor",  { 1, 2, 3, 4, 5, 6 }}},
+         {{ "outputTensor", { 3, 3, 3, 3, 3, 3, 3,
+                              3, 3, 1, 2, 3, 3, 3,
+                              3, 3, 4, 5, 6, 3, 3,
+                              3, 3, 3, 3, 3, 3, 3 }}});
+}
+
+struct Int8PadFixture : public PadFixture
+{
+    Int8PadFixture() : PadFixture("[ 2, 3 ]", "[ 4, 7 ]", "[ 2, 2 ]",
+                                    "[  1,0,0,0, 1,0,0,0, 2,0,0,0, 2,0,0,0 ]",
+                                    "INT8", "-2.0", "3") {}
+};
+
+BOOST_FIXTURE_TEST_CASE(ParsePadInt8, Int8PadFixture)
+{
+    RunTest<2, armnn::DataType::QAsymmS8>
+        (0,
+         {{ "inputTensor",  { 1, -2, 3, 4, 5, -6 }}},
+         {{ "outputTensor", { 3, 3, 3, 3, 3, 3, 3,
+                              3, 3, 1, -2, 3, 3, 3,
+                              3, 3, 4, 5, -6, 3, 3,
+                              3, 3, 3, 3, 3, 3, 3 }}});
+}
+
 BOOST_AUTO_TEST_SUITE_END()