MLBEDSW-2811: Add rescaling to ReLus with different scaling

If IFM/OFM is not 4d rescaling ops are added to ReLus with
different scaling.

Change-Id: I631d44fc8a51fb476b9f62ef90eda26eef3d35f3
Signed-off-by: Andreas Nevalainen <andreas.nevalainen@arm.com>
diff --git a/ethosu/vela/graph_optimiser.py b/ethosu/vela/graph_optimiser.py
index e5fbc25..6847330 100644
--- a/ethosu/vela/graph_optimiser.py
+++ b/ethosu/vela/graph_optimiser.py
@@ -657,6 +657,15 @@
             relu_fused_op.attrs["fused_activation_function"] = op.type
             # Tidy up and assign the ifm and ofm to the new op
             ifm.consumer_list.remove(op)
+
+            # if not 4d, reshape ifm/ofm
+            if len(ifm.shape) < 4:
+                ifm_shaped = create_reshape_tensor(ifm, full_shape(4, ifm.shape, 1))
+                ifm = ifm_shaped
+            if len(ofm.shape) < 4:
+                ofm_shaped = create_reshape_tensor(ofm, full_shape(4, ofm.shape, 1), False)
+                ofm = ofm_shaped
+
             relu_fused_op.add_input_tensor(ifm)
             relu_fused_op.set_output_tensor(ofm)
             op = relu_fused_op