Added framework tests for Relu1 Operator legalization

Signed-off-by: Jerry Ge <jerry.ge@arm.com>
Change-Id: I36ea46e79a0ce42c2dbe47c816b5b5572058c0f2
diff --git a/verif/frameworks/test_builder.py b/verif/frameworks/test_builder.py
index a47cf5c..97b9085 100644
--- a/verif/frameworks/test_builder.py
+++ b/verif/frameworks/test_builder.py
@@ -76,6 +76,16 @@
         def eval(self, a):
             return tf.nn.relu(a, name=self.result_name)
 
+    class Relu1:
+        def __init__(self, name):
+            self.result_name = name
+
+        def eval(self, a):
+            # TF doesn't have relu_n1_to_1 operator,
+            # use min and max as a workaround
+            # alternatively, we can use clip_by_value
+            return tf.math.minimum(1.0, tf.math.maximum(-1.0, a))
+
     class Relu6:
         def __init__(self, name):
             self.result_name = name