MLBEDSW-2813: Handle non-const weights and check shapes

- Added check for non-constant weights in supported operators
- Added check ifm & ifm2 shapes
- Handle None tensors for CPU operators
- Handle missing attributes for Cast operator

Signed-off-by: Andreas Nevalainen <andreas.nevalainen@arm.com>
Change-Id: I2f16d3d44d0c6da5237550b39273cdb9cc3c7607
diff --git a/ethosu/vela/tflite_writer.py b/ethosu/vela/tflite_writer.py
index cb208d7..68af487 100644
--- a/ethosu/vela/tflite_writer.py
+++ b/ethosu/vela/tflite_writer.py
@@ -90,9 +90,13 @@
                     if op.type not in self.ops_to_ignore:
                         all_ops.append(op)
                     if op.type.startswith("Conv2D") or op.type.startswith("DepthwiseConv2d"):
-                        self.tensors_to_reshape[op.inputs[1]] = (3, 0, 1, 2)
+                        # If values are None op has non-constant weights
+                        if op.inputs[1].values is not None:
+                            self.tensors_to_reshape[op.inputs[1]] = (3, 0, 1, 2)
                     if op.type.startswith("FullyConnected"):
-                        self.tensors_to_reshape[op.inputs[1]] = (1, 0)
+                        # If values are None op has non-constant weights
+                        if op.inputs[1].values is not None:
+                            self.tensors_to_reshape[op.inputs[1]] = (1, 0)
 
         self.operator_codes = list(sorted(set(op.type for op in all_ops)))
         self.operator_code_map = {}
@@ -314,7 +318,8 @@
         # e.g. due to an empty graph containing no ops
         for op in all_ops + placeholder_ops:
             for tens in op.inputs + op.outputs:
-                tensor_set.add(tens)
+                if tens is not None:
+                    tensor_set.add(tens)
 
         all_tensors = [tens for nm, idx, tens in sorted((tens.name, idx, tens) for idx, tens in enumerate(tensor_set))]