MLBEDSW-2306 Added more supported mem-cfgs

Additional supported memory configurations:
-Permanent_storage = DRAM
-Tensor arena either in DRAM or SRAM

Signed-off-by: Patrik Gustavsson <patrik.gustavsson@arm.com>
Change-Id: I20beb7151e306bfdba540e7c0b2a7b478b4d94e1
diff --git a/ethosu/vela/mark_tensors.py b/ethosu/vela/mark_tensors.py
index c4f2bae..705f839 100644
--- a/ethosu/vela/mark_tensors.py
+++ b/ethosu/vela/mark_tensors.py
@@ -18,6 +18,7 @@
 from . import rewrite_graph
 from . import weight_compressor
 from .errors import OperatorError
+from .tensor import MemType
 from .tensor import TensorFormat
 from .tensor import TensorPurpose
 from .tflite_mapping import custom_prefix
@@ -254,11 +255,13 @@
         else:
             assert 0, "Cannot resolve tensor purpose %s and %s for tensor %s" % (tens.purpose, purpose, tens)
         tens.mem_area = arch.tensor_storage_mem_area[tens.purpose]
+        tens.mem_type = arch.tensor_storage_mem_type[tens.purpose]
 
         if len(tens.ops) == 1 and tens.ops[0].type == "Const":
             tens.mem_area = (
                 arch.permanent_storage_mem_area
             )  # special case constants, as they must be in permanent storage
+            tens.mem_type = MemType.Permanent_NPU
 
     def rewrite_mark_tensor_purpose(op, arch):
         # find disconnected outputs and mark as parameters