MLBEDSW-2788 Fix crash on non-constant weight tensors

Change-Id: I750ec63a0e37b38feaf4cbdcc883fdbef92bccdf
Signed-off-by: Andreas Nevalainen <andreas.nevalainen@arm.com>
diff --git a/ethosu/vela/supported_operators.py b/ethosu/vela/supported_operators.py
index b0afa2c..63eb01b 100644
--- a/ethosu/vela/supported_operators.py
+++ b/ethosu/vela/supported_operators.py
@@ -215,6 +215,11 @@
         if dilated_weight_w > 64 or dilated_weight_h > 64:
             return False
 
+        # check non const weights
+        if weight_tensor.values is None:
+            print("Warning:", op.type, "has non-const weights, placing on CPU")
+            return False
+
         # check weight sums over [HWI]
         zero_point = weight_tensor.quantization.zero_point
         quant_weights = weight_tensor.quant_values.astype(np.int64)
@@ -228,11 +233,6 @@
         if ifm_tensor.shape[0] != 1:
             return False
 
-        # check non const weights
-        if weight_tensor.values is None:
-            print("Warning:", op.type, "has non-const weights, placing on CPU")
-            return False
-
         return True
 
     def check_depthwise_convolution_restrictions(self, op):