MLBEDSW-2745 Support relus with differing scales

In the event we have a relu op with different input and output scales,
we need to fuse it with a nop avgpool.
Also refactor the existing avgpool nop code to a common function.

Signed-off-by: Michael McGeagh <michael.mcgeagh@arm.com>
Change-Id: Iedf4513e7595ee4ee1777ba0b1eb38a8df8aed5e
diff --git a/ethosu/vela/operation.py b/ethosu/vela/operation.py
index e7fd97c..6bc5a32 100644
--- a/ethosu/vela/operation.py
+++ b/ethosu/vela/operation.py
@@ -28,6 +28,21 @@
     ReduceSum = 6
 
 
+def create_avgpool_nop(name):
+    op = Operation("AvgPool", name)
+    op.attrs["padding"] = b"VALID"
+    op.attrs["npu_block_type"] = NpuBlockType.Pooling
+    op.attrs["stride_w"] = 1
+    op.attrs["stride_h"] = 1
+    op.attrs["filter_width"] = 1
+    op.attrs["filter_height"] = 1
+    op.attrs["strides"] = [1, 1, 1, 1]
+    op.attrs["ksize"] = [1, 1, 1, 1]
+    op.attrs["skirt"] = [0, 0, 0, 0]
+    op.attrs["explicit_padding"] = [0, 0, 0, 0]
+    return op
+
+
 class Operation:
     """Class representing a Neural Network operation. Has a name, a type,
 input and output tensors, as well as an attribute dictionary."""