MLBEDSW-2729: Add restrictions for shapeless tensors

Vela often fails when encountering operators that have
inputs or outputs with shape == []. Only for elementwise
ops where shape is broadcasted from IFM2 to IFM1 is this
supported.
This commit adds a restriction which places ops with
shape [] tensors on the CPU except in the special case
of broadcasting for elemwise ops.

Signed-off-by: Dwight Lidman <dwight.lidman@arm.com>
Change-Id: I5b0855233e3b83870209f4da00fb2dbd0184fee0
diff --git a/ethosu/vela/supported_operators.py b/ethosu/vela/supported_operators.py
index fdf0c6b..c418601 100644
--- a/ethosu/vela/supported_operators.py
+++ b/ethosu/vela/supported_operators.py
@@ -138,9 +138,23 @@
 
     def check_generic_restrictions(self, op):
         # check fully defined shapes
-        for t in op.inputs + op.outputs:
+        for t in op.inputs:
             if not t.has_fully_defined_shape():
-                print("Warning:", op, "has inputs/outputs of undefined shape, placing on CPU")
+                print("Warning:", op.type, "has input(s) of undefined shape, placing on CPU")
+                return False
+            if t.shape == [] and op.type not in self.binary_elem_wise_main_ops:
+                print("Warning:", op.type, "has input(s) of shape [].",
+                      "Scalar input or broadcasting is not supported for this operator,",
+                      "placing on CPU")
+                return False
+        for t in op.outputs:
+            if not t.has_fully_defined_shape():
+                print("Warning:", op.type, "has output(s) of undefined shape, placing on CPU")
+                return False
+            if t.shape == []:
+                print("Warning:", op.type, "has output(s) of shape [].",
+                      "Scalar input or broadcasting is not supported for this operator,",
+                      "placing on CPU")
                 return False
 
         # check data type