Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 1 | # Copyright (C) 2020 Arm Limited or its affiliates. All rights reserved. |
| 2 | # |
| 3 | # SPDX-License-Identifier: Apache-2.0 |
| 4 | # |
| 5 | # Licensed under the Apache License, Version 2.0 (the License); you may |
| 6 | # not use this file except in compliance with the License. |
| 7 | # You may obtain a copy of the License at |
| 8 | # |
| 9 | # www.apache.org/licenses/LICENSE-2.0 |
| 10 | # |
| 11 | # Unless required by applicable law or agreed to in writing, software |
| 12 | # distributed under the License is distributed on an AS IS BASIS, WITHOUT |
| 13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | # See the License for the specific language governing permissions and |
| 15 | # limitations under the License. |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 16 | # Description: |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 17 | # Mark purpose and select formats for Tensors. |
Tim Hall | c8310b1 | 2020-06-17 14:53:11 +0100 | [diff] [blame] | 18 | from .errors import OperatorError |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 19 | from .operation import CustomType |
| 20 | from .operation import Op |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 21 | from .rewrite_graph import visit_graph_post_order |
Patrik Gustavsson | eca2e95 | 2020-05-27 09:15:11 +0200 | [diff] [blame] | 22 | from .tensor import MemType |
Diego Russo | e8a1045 | 2020-04-21 17:39:10 +0100 | [diff] [blame] | 23 | from .tensor import TensorFormat |
| 24 | from .tensor import TensorPurpose |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 25 | |
| 26 | |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 27 | def get_format(purpose, arch): |
| 28 | if purpose in (TensorPurpose.FeatureMap, TensorPurpose.LUT, TensorPurpose.Scratch): |
| 29 | fmt = arch.default_feature_map_format |
| 30 | elif purpose == TensorPurpose.Weights: |
| 31 | fmt = arch.default_weight_format |
| 32 | elif purpose == TensorPurpose.Unknown: |
| 33 | fmt = TensorFormat.Unknown |
| 34 | else: |
| 35 | assert 0, "unknown tensor purpose {}".format(purpose) |
| 36 | return fmt |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 37 | |
| 38 | |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 39 | def mark_purpose(tens, arch, purpose): |
| 40 | # Sets tensor's purpose, format, mem_area and mem_type |
| 41 | if tens.purpose == TensorPurpose.Unknown: |
| 42 | tens.purpose = purpose |
| 43 | elif tens.purpose not in (purpose, TensorPurpose.LUT): |
| 44 | assert 0, "Cannot resolve tensor purpose {} and {} for tensor {}".format(tens.purpose, purpose, tens) |
| 45 | fmt = get_format(purpose, arch) |
| 46 | tens.set_format(fmt, arch) |
| 47 | tens.mem_area = arch.tensor_storage_mem_area[tens.purpose] |
| 48 | tens.mem_type = arch.tensor_storage_mem_type[tens.purpose] |
| 49 | |
| 50 | if len(tens.ops) == 1 and tens.ops[0].type == Op.Const: |
| 51 | tens.mem_area = arch.permanent_storage_mem_area # special case constants, as they must be in permanent storage |
| 52 | tens.mem_type = MemType.Permanent_NPU |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 53 | |
| 54 | |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 55 | def rewrite_mark_tensor_purpose(op, arch): |
| 56 | # find disconnected outputs and mark as feature maps |
| 57 | for tens in op.outputs: |
| 58 | if not tens.consumers(): |
| 59 | mark_purpose(tens, arch, TensorPurpose.FeatureMap) |
| 60 | weight_tensors = op.get_weight_tensors() |
| 61 | for tens in op.inputs: |
| 62 | if tens.purpose != TensorPurpose.Unknown: |
| 63 | purpose = tens.purpose |
| 64 | elif tens in weight_tensors: |
| 65 | purpose = TensorPurpose.Weights |
| 66 | else: |
| 67 | purpose = TensorPurpose.FeatureMap |
| 68 | mark_purpose(tens, arch, purpose) |
| 69 | if op.type == Op.Reshape: |
| 70 | # Reshape's input and output point to same data |
| 71 | op.ofm.mem_area = op.ifm.mem_area |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 72 | |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 73 | if op.type == Op.Custom and op.attrs.get("custom_type") == CustomType.ExistingNpuOp: |
| 74 | scratch_tensor = None |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 75 | |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 76 | if len(op.inputs) >= 3: |
| 77 | scratch_tensor = op.inputs[2] # should be existing scratch tensor |
| 78 | if scratch_tensor.name.endswith("_scratch"): |
| 79 | scratch_tensor.purpose = TensorPurpose.Scratch |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 80 | |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 81 | if scratch_tensor is None: |
| 82 | OperatorError(op, "Scratch tensor not found.") |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 83 | |
| 84 | |
| 85 | def mark_tensor_purpose(nng, arch, verbose_tensor_purpose=False): |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 86 | # Sets purpose, format, mem_area and mem_type for all tensors in the graph |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 87 | for sg in nng.subgraphs: |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 88 | visit_graph_post_order(sg.output_tensors, arch, [], [rewrite_mark_tensor_purpose]) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 89 | for tens in sg.output_tensors: |
Louis Verhaard | 17afa28 | 2020-10-14 08:32:41 +0200 | [diff] [blame] | 90 | mark_purpose(tens, arch, TensorPurpose.FeatureMap) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 91 | |
| 92 | if verbose_tensor_purpose: |
| 93 | nng.print_graph_with_tensors() |
| 94 | |
| 95 | return nng |