MLBEDSW-3148: Refactor Operation

- op.type is now an enum instead of a string
- Removed unused operator codes
- Refactored some attributes like npu_block_type, fused_activation_function
- Refactored operator index calculation
- Refactored a number of operator sets

Change-Id: I641f65ee375794b7aec42abc0664251ae37d78e8
Signed-off-by: Louis Verhaard <louis.verhaard@arm.com>
diff --git a/ethosu/vela/live_range.py b/ethosu/vela/live_range.py
index 9a8ee58..23026c7 100644
--- a/ethosu/vela/live_range.py
+++ b/ethosu/vela/live_range.py
@@ -18,6 +18,7 @@
 # Can work with either a pass packed subgraph or a scheduled subgraph.
 from .high_level_command_stream_generator import calc_allowed_ofm_ifm_overlap_for_cascaded_pass
 from .nn_graph import PassPlacement
+from .operation import Op
 from .tensor import MemType
 from .tensor import Tensor
 
@@ -262,7 +263,11 @@
 
         cps_primary_op = cps.passes[0].primary_op
 
-        if cps_primary_op and cps_primary_op.type == "NpuOp" and MemType.Permanent_CPU not in target_mem_type_set:
+        if (
+            cps_primary_op
+            and cps_primary_op.type == Op.CustomNpuOp
+            and MemType.Permanent_CPU not in target_mem_type_set
+        ):
             # If the primary-op is an NpuOp that means this is where an Npu subgraph
             # is called. Go into said subgraph and extract live ranges before continuing.
             # Use default allocation alignment of 16 for Npu tensors