blob: f76c59d7e604c361b2025e684ee6d9b2d238090d [file] [log] [blame]
erik.andersson@arm.com460c6892021-02-24 14:38:09 +01001# Copyright (C) 2020-2021 Arm Limited or its affiliates. All rights reserved.
Tim Hall79d07d22020-04-27 18:20:16 +01002#
3# SPDX-License-Identifier: Apache-2.0
4#
5# Licensed under the Apache License, Version 2.0 (the License); you may
6# not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an AS IS BASIS, WITHOUT
13# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
Tim Hall79d07d22020-04-27 18:20:16 +010016# Description:
Louis Verhaard17afa282020-10-14 08:32:41 +020017# Mark purpose and select formats for Tensors.
Jonas Ohlsson0957e3e2021-09-01 15:57:21 +020018from .graph_optimiser_util import memory_only_ops
Louis Verhaardaee5d752020-09-30 09:01:52 +020019from .operation import CustomType
20from .operation import Op
Louis Verhaard17afa282020-10-14 08:32:41 +020021from .rewrite_graph import visit_graph_post_order
Patrik Gustavssoneca2e952020-05-27 09:15:11 +020022from .tensor import MemType
Diego Russoe8a10452020-04-21 17:39:10 +010023from .tensor import TensorFormat
24from .tensor import TensorPurpose
Tim Hall79d07d22020-04-27 18:20:16 +010025
26
Louis Verhaard17afa282020-10-14 08:32:41 +020027def get_format(purpose, arch):
Fredrik Svedberge22ba8c2021-01-27 16:53:41 +010028 if purpose in (TensorPurpose.FeatureMap, TensorPurpose.LUT, TensorPurpose.Scratch, TensorPurpose.ScratchFast):
Louis Verhaard17afa282020-10-14 08:32:41 +020029 fmt = arch.default_feature_map_format
30 elif purpose == TensorPurpose.Weights:
31 fmt = arch.default_weight_format
32 elif purpose == TensorPurpose.Unknown:
33 fmt = TensorFormat.Unknown
34 else:
35 assert 0, "unknown tensor purpose {}".format(purpose)
36 return fmt
Tim Hall79d07d22020-04-27 18:20:16 +010037
38
Louis Verhaard17afa282020-10-14 08:32:41 +020039def mark_purpose(tens, arch, purpose):
40 # Sets tensor's purpose, format, mem_area and mem_type
41 if tens.purpose == TensorPurpose.Unknown:
42 tens.purpose = purpose
43 elif tens.purpose not in (purpose, TensorPurpose.LUT):
44 assert 0, "Cannot resolve tensor purpose {} and {} for tensor {}".format(tens.purpose, purpose, tens)
Patrik Gustavssonee99bb12021-04-08 09:04:00 +020045
Louis Verhaard17afa282020-10-14 08:32:41 +020046 fmt = get_format(purpose, arch)
47 tens.set_format(fmt, arch)
48 tens.mem_area = arch.tensor_storage_mem_area[tens.purpose]
49 tens.mem_type = arch.tensor_storage_mem_type[tens.purpose]
50
Fredrik Svedberge22ba8c2021-01-27 16:53:41 +010051 if (
52 len(tens.ops) == 1
53 and tens.ops[0].type == Op.Const
54 and purpose not in (TensorPurpose.Scratch, TensorPurpose.ScratchFast)
55 ):
Louis Verhaard17afa282020-10-14 08:32:41 +020056 tens.mem_area = arch.permanent_storage_mem_area # special case constants, as they must be in permanent storage
57 tens.mem_type = MemType.Permanent_NPU
Tim Hall79d07d22020-04-27 18:20:16 +010058
59
Louis Verhaard17afa282020-10-14 08:32:41 +020060def rewrite_mark_tensor_purpose(op, arch):
61 # find disconnected outputs and mark as feature maps
62 for tens in op.outputs:
63 if not tens.consumers():
64 mark_purpose(tens, arch, TensorPurpose.FeatureMap)
65 weight_tensors = op.get_weight_tensors()
66 for tens in op.inputs:
Louis Verhaard0411edb2020-11-16 16:37:11 +010067 if tens is None:
68 continue
Louis Verhaard17afa282020-10-14 08:32:41 +020069 if tens.purpose != TensorPurpose.Unknown:
70 purpose = tens.purpose
71 elif tens in weight_tensors:
72 purpose = TensorPurpose.Weights
73 else:
74 purpose = TensorPurpose.FeatureMap
75 mark_purpose(tens, arch, purpose)
Jonas Ohlsson0957e3e2021-09-01 15:57:21 +020076 if op.type in memory_only_ops:
77 # Memory only operator input and output point to same data
Louis Verhaard17afa282020-10-14 08:32:41 +020078 op.ofm.mem_area = op.ifm.mem_area
Tim Hall79d07d22020-04-27 18:20:16 +010079
Louis Verhaard17afa282020-10-14 08:32:41 +020080 if op.type == Op.Custom and op.attrs.get("custom_type") == CustomType.ExistingNpuOp:
81 scratch_tensor = None
Tim Hall79d07d22020-04-27 18:20:16 +010082
Louis Verhaard17afa282020-10-14 08:32:41 +020083 if len(op.inputs) >= 3:
84 scratch_tensor = op.inputs[2] # should be existing scratch tensor
85 if scratch_tensor.name.endswith("_scratch"):
86 scratch_tensor.purpose = TensorPurpose.Scratch
Tim Hall79d07d22020-04-27 18:20:16 +010087
Fredrik Svedberge22ba8c2021-01-27 16:53:41 +010088 if len(op.inputs) >= 4:
89 scratch_fast_tensor = op.inputs[3] # should be existing scratch fast tensor
90 if scratch_fast_tensor.name.endswith("_scratch_fast"):
91 scratch_fast_tensor.purpose = TensorPurpose.ScratchFast
92
Louis Verhaard17afa282020-10-14 08:32:41 +020093 if scratch_tensor is None:
Michael McGeagh528a56d2020-12-16 11:33:21 +000094 op.error("Scratch tensor not found.")
Tim Hall79d07d22020-04-27 18:20:16 +010095
96
97def mark_tensor_purpose(nng, arch, verbose_tensor_purpose=False):
Louis Verhaard17afa282020-10-14 08:32:41 +020098 # Sets purpose, format, mem_area and mem_type for all tensors in the graph
Tim Hall79d07d22020-04-27 18:20:16 +010099 for sg in nng.subgraphs:
Louis Verhaard17afa282020-10-14 08:32:41 +0200100 visit_graph_post_order(sg.output_tensors, arch, [], [rewrite_mark_tensor_purpose])
Tim Hall79d07d22020-04-27 18:20:16 +0100101 for tens in sg.output_tensors:
Louis Verhaard17afa282020-10-14 08:32:41 +0200102 mark_purpose(tens, arch, TensorPurpose.FeatureMap)
Tim Hall79d07d22020-04-27 18:20:16 +0100103
104 if verbose_tensor_purpose:
105 nng.print_graph_with_tensors()
106
107 return nng