Rickard Bolin | bc6ee58 | 2022-11-04 08:24:29 +0000 | [diff] [blame^] | 1 | # SPDX-FileCopyrightText: Copyright 2020-2022 Arm Limited and/or its affiliates <open-source-office@arm.com> |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 2 | # |
| 3 | # SPDX-License-Identifier: Apache-2.0 |
| 4 | # |
| 5 | # Licensed under the Apache License, Version 2.0 (the License); you may |
| 6 | # not use this file except in compliance with the License. |
| 7 | # You may obtain a copy of the License at |
| 8 | # |
| 9 | # www.apache.org/licenses/LICENSE-2.0 |
| 10 | # |
| 11 | # Unless required by applicable law or agreed to in writing, software |
| 12 | # distributed under the License is distributed on an AS IS BASIS, WITHOUT |
| 13 | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 14 | # See the License for the specific language governing permissions and |
| 15 | # limitations under the License. |
Rickard Bolin | bc6ee58 | 2022-11-04 08:24:29 +0000 | [diff] [blame^] | 16 | # |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 17 | # Description: |
| 18 | # Functions used to write to a TensorFlow Lite format file. Supports adding in file identifiers. |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 19 | import flatbuffers |
Diego Russo | e8a1045 | 2020-04-21 17:39:10 +0100 | [diff] [blame] | 20 | import flatbuffers.number_types as N |
| 21 | import numpy as np |
| 22 | from flatbuffers import encode |
Diego Russo | ea6111a | 2020-04-14 18:41:58 +0100 | [diff] [blame] | 23 | from flatbuffers.builder import UOffsetTFlags |
| 24 | |
Michael McGeagh | 7a6f843 | 2020-12-02 15:29:22 +0000 | [diff] [blame] | 25 | from .errors import VelaError |
Diego Russo | e8a1045 | 2020-04-21 17:39:10 +0100 | [diff] [blame] | 26 | from .nn_graph import PassPlacement |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 27 | from .operation import Op |
Patrik Gustavsson | 5e26eda | 2021-06-30 09:07:16 +0200 | [diff] [blame] | 28 | from .reader_util import align_inputs_indices |
Patrik Gustavsson | eca2e95 | 2020-05-27 09:15:11 +0200 | [diff] [blame] | 29 | from .tensor import MemType |
Johan Alfvén | b9f8159 | 2022-10-31 14:39:02 +0100 | [diff] [blame] | 30 | from .tensor import shape_num_elements |
Samuel Panijel | 6f4955a | 2021-06-10 13:40:03 +0300 | [diff] [blame] | 31 | from .tensor import TensorPurpose |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 32 | from .tflite import Buffer |
| 33 | from .tflite import Metadata |
Diego Russo | e8a1045 | 2020-04-21 17:39:10 +0100 | [diff] [blame] | 34 | from .tflite import Model |
| 35 | from .tflite import Operator |
| 36 | from .tflite import OperatorCode |
| 37 | from .tflite import QuantizationParameters |
| 38 | from .tflite import SubGraph |
| 39 | from .tflite import Tensor |
| 40 | from .tflite_mapping import builtin_operator_inv_map |
| 41 | from .tflite_mapping import BuiltinOperator |
Diego Russo | e8a1045 | 2020-04-21 17:39:10 +0100 | [diff] [blame] | 42 | from .tflite_mapping import datatype_inv_map |
| 43 | |
Tim Hall | ffe8e28 | 2021-06-24 18:29:53 +0100 | [diff] [blame] | 44 | # the python flatbuffer interface is missing a method to add in file identifier. patching it in here: |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 45 | |
| 46 | tflite_version = 3 |
| 47 | tflite_file_identifier = "TFL" + str(tflite_version) |
| 48 | |
| 49 | |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 50 | def FinishWithFileIdentifier(self, rootTable, fid): |
| 51 | if fid is None or len(fid) != 4: |
Michael McGeagh | 7a6f843 | 2020-12-02 15:29:22 +0000 | [diff] [blame] | 52 | raise VelaError("FileIdentifier must be 4 chars") |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 53 | |
| 54 | flags = N.Uint8Flags |
| 55 | prepSize = 4 |
| 56 | self.Prep(self.minalign, prepSize + len(fid)) |
| 57 | for i in range(3, -1, -1): |
| 58 | self.head = self.head - flags.bytewidth |
| 59 | encode.Write(flags.packer_type, self.Bytes, self.Head(), ord(fid[i])) |
| 60 | |
| 61 | return self.Finish(rootTable) |
| 62 | |
| 63 | |
| 64 | flatbuffers.Builder.FinishWithFileIdentifier = FinishWithFileIdentifier |
| 65 | |
| 66 | |
| 67 | def make_vector(v): |
| 68 | try: |
| 69 | len(v) |
| 70 | return v |
| 71 | except TypeError: |
| 72 | return [v] |
| 73 | |
| 74 | |
| 75 | class TFLiteSerialiser: |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 76 | |
| 77 | BUF_IDX_SCRATCH = 0 # Always assign scratch to buffer 0 |
| 78 | BUF_IDX_SCRATCH_FAST = 1 # Always assign scratch_fast to buffer 1 |
| 79 | BUF_IDX_START = 2 # Unique buffer id for every tensor in all subgraphs |
| 80 | |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 81 | def __init__(self, nng): |
| 82 | self.builder = flatbuffers.Builder(0) |
| 83 | self.nng = nng |
| 84 | |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 85 | self.buf_idx = TFLiteSerialiser.BUF_IDX_START |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 86 | self.buffers_to_write = [] # have an empty array there |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 87 | self.tensor_map_all = [] # Keep track of all subgraphs |
| 88 | self.tensor_map_sg = [] # Keep track of one subgraph |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 89 | |
Michael McGeagh | f3e3ad7 | 2020-12-02 12:39:03 +0000 | [diff] [blame] | 90 | self.ops_to_ignore = (Op.Const, Op.Placeholder, Op.SubgraphInput) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 91 | |
| 92 | self.tensors_to_reshape = {} |
| 93 | |
| 94 | self.subgraphs_to_write = [sg for sg in self.nng.subgraphs if sg.placement == PassPlacement.Cpu] |
| 95 | |
| 96 | all_ops = [] |
| 97 | for sg in self.subgraphs_to_write: |
| 98 | for ps in sg.passes: |
| 99 | for op in ps.ops: |
| 100 | if op.type not in self.ops_to_ignore: |
Patrik Gustavsson | 5e26eda | 2021-06-30 09:07:16 +0200 | [diff] [blame] | 101 | # swap from nng input indexing to TensorFlow Lite input indexing |
| 102 | self.align_nng_inputs_to_tflite(op) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 103 | all_ops.append(op) |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 104 | if op.type.is_conv2d_op() or op.type.is_depthwise_conv2d_op(): |
Andreas Nevalainen | d8c032d | 2020-09-11 10:25:09 +0200 | [diff] [blame] | 105 | # If values are None op has non-constant weights |
| 106 | if op.inputs[1].values is not None: |
| 107 | self.tensors_to_reshape[op.inputs[1]] = (3, 0, 1, 2) |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 108 | if op.type == Op.FullyConnected: |
Andreas Nevalainen | d8c032d | 2020-09-11 10:25:09 +0200 | [diff] [blame] | 109 | # If values are None op has non-constant weights |
| 110 | if op.inputs[1].values is not None: |
| 111 | self.tensors_to_reshape[op.inputs[1]] = (1, 0) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 112 | |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 113 | # list of tuple(Op, string); the custom code is only used for 3rd party custom operators |
| 114 | self.operator_codes = sorted(set((op.type, op.attrs.get("custom_code", "")) for op in all_ops)) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 115 | self.operator_code_map = {} |
| 116 | |
Patrik Gustavsson | 5e26eda | 2021-06-30 09:07:16 +0200 | [diff] [blame] | 117 | def align_nng_inputs_to_tflite(self, op): |
| 118 | from_indices = op.type.info.indices |
| 119 | _, _, to_indices = builtin_operator_inv_map[op.type] |
| 120 | op.inputs = align_inputs_indices(from_indices, to_indices, op.inputs) |
| 121 | |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 122 | def write_byte_vector(self, v, alignment=1): |
| 123 | builder = self.builder |
| 124 | builder.StartVector(1, len(v), alignment) |
| 125 | for e in v[::-1]: |
| 126 | builder.PrependByte(e) |
erik.andersson@arm.com | 61f05d9 | 2022-09-27 12:06:32 +0200 | [diff] [blame] | 127 | return builder.EndVector() |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 128 | |
| 129 | def write_int_vector(self, v): |
| 130 | builder = self.builder |
| 131 | builder.StartVector(4, len(v), 4) |
| 132 | for e in v[::-1]: |
| 133 | builder.PrependInt32(e) |
erik.andersson@arm.com | 61f05d9 | 2022-09-27 12:06:32 +0200 | [diff] [blame] | 134 | return builder.EndVector() |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 135 | |
| 136 | def write_long_vector(self, v): |
| 137 | builder = self.builder |
| 138 | builder.StartVector(8, len(v), 8) |
| 139 | for e in v[::-1]: |
| 140 | builder.PrependInt64(e) |
erik.andersson@arm.com | 61f05d9 | 2022-09-27 12:06:32 +0200 | [diff] [blame] | 141 | return builder.EndVector() |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 142 | |
| 143 | def write_float_vector(self, v): |
| 144 | builder = self.builder |
| 145 | builder.StartVector(4, len(v), 4) |
| 146 | for e in v[::-1]: |
| 147 | builder.PrependFloat32(e) |
erik.andersson@arm.com | 61f05d9 | 2022-09-27 12:06:32 +0200 | [diff] [blame] | 148 | return builder.EndVector() |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 149 | |
| 150 | def write_offset_vector(self, v): |
| 151 | builder = self.builder |
| 152 | builder.StartVector(4, len(v), 4) |
| 153 | for e in v[::-1]: |
| 154 | builder.PrependUOffsetTRelative(e) |
erik.andersson@arm.com | 61f05d9 | 2022-09-27 12:06:32 +0200 | [diff] [blame] | 155 | return builder.EndVector() |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 156 | |
Tim Hall | c8310b1 | 2020-06-17 14:53:11 +0100 | [diff] [blame] | 157 | def assign_buffers_to_tensors(self, tensors, scratch_tensor): |
| 158 | if scratch_tensor is not None: |
| 159 | scratch_tensor_mem_area = scratch_tensor.mem_area |
Tim Hall | 25f605c | 2020-05-18 18:04:26 +0100 | [diff] [blame] | 160 | else: |
| 161 | scratch_tensor_mem_area = None # all tensors are initialised to MemArea.Unknown |
| 162 | |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 163 | buffer_map = {} |
Patrik Gustavsson | eca2e95 | 2020-05-27 09:15:11 +0200 | [diff] [blame] | 164 | |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 165 | for tens in tensors: |
Patrik Gustavsson | eca2e95 | 2020-05-27 09:15:11 +0200 | [diff] [blame] | 166 | # Set buffer ids depending on allocation |
| 167 | if tens.is_allocated_in_tensor_arena(scratch_tensor_mem_area): |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 168 | buffer_map[tens] = TFLiteSerialiser.BUF_IDX_SCRATCH |
Patrik Gustavsson | eca2e95 | 2020-05-27 09:15:11 +0200 | [diff] [blame] | 169 | elif tens.mem_type == MemType.Scratch_fast: |
| 170 | # For Scratch_fast when not co-allocated with scratch in the TensorArena: |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 171 | buffer_map[tens] = TFLiteSerialiser.BUF_IDX_SCRATCH_FAST |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 172 | else: |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 173 | buffer_map[tens] = self.buf_idx |
| 174 | self.buf_idx += 1 |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 175 | |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 176 | # Initialize/extend buffers_to_write to a length equal to number of buffers so |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 177 | # they can be appended at the correct index during tensor serialization |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 178 | self.buffers_to_write += [None] * (self.buf_idx) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 179 | |
| 180 | return buffer_map |
| 181 | |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 182 | def serialise_operator_code(self, idx, op_type, custom_code): |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 183 | builder = self.builder |
| 184 | custom_code_offset = None |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 185 | if op_type == Op.Custom: |
Patrik Gustavsson | 5e26eda | 2021-06-30 09:07:16 +0200 | [diff] [blame] | 186 | tf_code, opt_serializer, _ = builtin_operator_inv_map[op_type] |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 187 | custom_code_offset = builder.CreateString(custom_code) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 188 | else: |
Tim Hall | e9194df | 2020-08-04 20:37:01 +0100 | [diff] [blame] | 189 | assert ( |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 190 | op_type in builtin_operator_inv_map |
| 191 | ), "Vela does not contain a mapping to serialise {} operator to a TensorFlow Lite operator".format(op_type) |
Patrik Gustavsson | 5e26eda | 2021-06-30 09:07:16 +0200 | [diff] [blame] | 192 | tf_code, opt_serializer, _ = builtin_operator_inv_map[op_type] |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 193 | |
Tim Hall | b218376 | 2021-01-25 21:42:56 +0000 | [diff] [blame] | 194 | if op_type == Op.CustomNpuOp: |
Tim Hall | e9194df | 2020-08-04 20:37:01 +0100 | [diff] [blame] | 195 | assert ( |
Tim Hall | b218376 | 2021-01-25 21:42:56 +0000 | [diff] [blame] | 196 | tf_code == BuiltinOperator.CUSTOM |
Tim Hall | e9194df | 2020-08-04 20:37:01 +0100 | [diff] [blame] | 197 | ), "Vela only supports serialising NpuOp operators as TensorFlow Lite Custom operators" |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 198 | custom_code_offset = builder.CreateString("ethos-u") |
| 199 | |
Tim Hall | b218376 | 2021-01-25 21:42:56 +0000 | [diff] [blame] | 200 | # there can be multiple different types of 3rd party custom operators (i.e. non-"ethos-u" ones). therefore we |
| 201 | # need to add an extra level of indirection to this particular entry in the operator_code_map to allow for the |
| 202 | # correct lookup later on |
| 203 | if op_type == Op.Custom: |
| 204 | if op_type not in self.operator_code_map: |
| 205 | self.operator_code_map[op_type] = {} |
| 206 | self.operator_code_map[op_type][custom_code] = (idx, tf_code, opt_serializer) |
| 207 | else: |
| 208 | self.operator_code_map[op_type] = (idx, tf_code, opt_serializer) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 209 | |
| 210 | OperatorCode.OperatorCodeStart(builder) |
Tim Hall | 42abec1 | 2021-02-04 21:31:57 +0000 | [diff] [blame] | 211 | OperatorCode.OperatorCodeAddDeprecatedBuiltinCode(builder, tf_code if tf_code < 127 else 127) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 212 | OperatorCode.OperatorCodeAddBuiltinCode(builder, tf_code) |
| 213 | if custom_code_offset is not None: |
| 214 | OperatorCode.OperatorCodeAddCustomCode(builder, custom_code_offset) |
| 215 | |
| 216 | return OperatorCode.OperatorCodeEnd(builder) |
| 217 | |
| 218 | def serialise_quantization_parameters(self, quant): |
| 219 | builder = self.builder |
| 220 | |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 221 | qp = None |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 222 | min = None |
| 223 | max = None |
| 224 | scale = None |
| 225 | zero_point = None |
| 226 | if quant is not None: |
| 227 | if quant.min is not None: |
| 228 | min = self.write_float_vector(make_vector(quant.min)) |
| 229 | if quant.max is not None: |
| 230 | max = self.write_float_vector(make_vector(quant.max)) |
| 231 | if quant.scale_f32 is not None: |
| 232 | scale = self.write_float_vector(make_vector(quant.scale_f32)) |
| 233 | if quant.zero_point is not None: |
| 234 | zero_point = self.write_long_vector(make_vector(quant.zero_point)) |
| 235 | |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 236 | QuantizationParameters.QuantizationParametersStart(builder) |
| 237 | if min is not None: |
| 238 | QuantizationParameters.QuantizationParametersAddMin(builder, min) |
| 239 | if max is not None: |
| 240 | QuantizationParameters.QuantizationParametersAddMax(builder, max) |
| 241 | if scale is not None: |
| 242 | QuantizationParameters.QuantizationParametersAddScale(builder, scale) |
| 243 | if zero_point is not None: |
| 244 | QuantizationParameters.QuantizationParametersAddZeroPoint(builder, zero_point) |
Fredrik Svedberg | cc8569f | 2021-11-01 14:25:29 +0100 | [diff] [blame] | 245 | if quant.quant_dim is not None: |
| 246 | QuantizationParameters.QuantizationParametersAddQuantizedDimension(builder, quant.quant_dim) |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 247 | qp = QuantizationParameters.QuantizationParametersEnd(builder) |
| 248 | |
| 249 | return qp |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 250 | |
| 251 | def serialise_tensor(self, tens): |
| 252 | builder = self.builder |
Johan Alfvén | b9f8159 | 2022-10-31 14:39:02 +0100 | [diff] [blame] | 253 | if shape_num_elements(tens.original_shape) != shape_num_elements(tens.shape): |
| 254 | # shapes have changed size, therefore assume that the latest (modified) shape is correct |
| 255 | tens_shape = tens.shape |
| 256 | else: |
| 257 | # shapes have not changed size, therefore the original shape is valid |
| 258 | tens_shape = tens.original_shape |
James Peet | 7519d50 | 2021-07-19 16:47:58 +0100 | [diff] [blame] | 259 | values = tens.values |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 260 | |
| 261 | if values is None: |
| 262 | values = np.empty(shape=(0), dtype=np.uint8) |
| 263 | |
| 264 | if tens in self.tensors_to_reshape: |
| 265 | reorder = self.tensors_to_reshape[tens] |
| 266 | tens_shape = [tens_shape[idx] for idx in reorder] |
| 267 | values = values.transpose(reorder) |
| 268 | |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 269 | buf_id = self.buffer_map[tens] |
Patrik Gustavsson | eca2e95 | 2020-05-27 09:15:11 +0200 | [diff] [blame] | 270 | self.buffers_to_write[buf_id] = values.flatten().view(np.uint8) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 271 | |
| 272 | shape = self.write_int_vector(tens_shape) |
| 273 | |
| 274 | name = builder.CreateString(tens.name) |
| 275 | quant = self.serialise_quantization_parameters(tens.quantization) |
| 276 | |
| 277 | Tensor.TensorStart(builder) |
| 278 | Tensor.TensorAddShape(builder, shape) |
| 279 | Tensor.TensorAddType(builder, datatype_inv_map[tens.dtype]) |
| 280 | # All tensors must have a valid backing buffer, even if it is empty. |
| 281 | # Empty buffers should be kept unique for TensorFlow Lite Micro |
| 282 | Tensor.TensorAddBuffer(builder, buf_id) |
| 283 | Tensor.TensorAddName(builder, name) |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 284 | if quant is not None: |
| 285 | Tensor.TensorAddQuantization(builder, quant) |
| 286 | Tensor.TensorAddIsVariable(builder, tens.is_variable) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 287 | |
| 288 | res = Tensor.TensorEnd(builder) |
| 289 | return res |
| 290 | |
| 291 | def serialise_operator(self, op): |
| 292 | builder = self.builder |
| 293 | |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 294 | inputs_offset = self.write_int_vector( |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 295 | [self.tensor_map_sg[tens] if tens in self.tensor_map_sg else -1 for tens in op.inputs] |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 296 | ) |
Michael McGeagh | bb1b09e | 2020-08-19 11:24:17 +0100 | [diff] [blame] | 297 | outputs_offset = self.write_int_vector( |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 298 | [self.tensor_map_sg[tens] for tens in op.outputs if tens in self.tensor_map_sg] |
Michael McGeagh | bb1b09e | 2020-08-19 11:24:17 +0100 | [diff] [blame] | 299 | ) |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 300 | intermediates_offset = self.write_int_vector( |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 301 | [self.tensor_map_sg[tens] for tens in op.intermediates if tens in self.tensor_map_sg] |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 302 | ) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 303 | |
Tim Hall | b218376 | 2021-01-25 21:42:56 +0000 | [diff] [blame] | 304 | if op.type == Op.Custom: |
| 305 | op_idx, tflop, opt_serializer = self.operator_code_map[op.type][op.attrs.get("custom_code", "")] |
| 306 | else: |
| 307 | op_idx, tflop, opt_serializer = self.operator_code_map[op.type] |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 308 | |
| 309 | builtin_opt_offset = None |
| 310 | custom_opt_offset = None |
| 311 | if opt_serializer is not None: |
| 312 | attrs = dict(op.attrs) |
| 313 | if "strides" in attrs: |
| 314 | attrs["stride_h"] = attrs["strides"][1] |
| 315 | attrs["stride_w"] = attrs["strides"][2] |
| 316 | if "ksize" in attrs: |
| 317 | attrs["filter_height"] = attrs["ksize"][1] |
| 318 | attrs["filter_width"] = attrs["ksize"][2] |
| 319 | if "dilation" in attrs: |
| 320 | attrs["dilation_h_factor"] = attrs["dilation"][1] |
| 321 | attrs["dilation_w_factor"] = attrs["dilation"][2] |
| 322 | if "channel_multiplier" in attrs: |
| 323 | attrs["depth_multiplier"] = attrs["channel_multiplier"] |
Louis Verhaard | c86a9d2 | 2020-11-02 18:04:27 +0100 | [diff] [blame] | 324 | attrs["fused_activation_function"] = op.activation.op_type if op.activation is not None else None |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 325 | |
| 326 | builtin_opt_offset, custom_opt_offset = opt_serializer.serialize(builder, attrs) |
| 327 | |
| 328 | mutating_variable_inputs_offset = self.write_byte_vector([]) |
| 329 | Operator.OperatorStart(builder) |
| 330 | Operator.OperatorAddOpcodeIndex(builder, op_idx) |
| 331 | Operator.OperatorAddInputs(builder, inputs_offset) |
| 332 | Operator.OperatorAddOutputs(builder, outputs_offset) |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 333 | Operator.OperatorAddIntermediates(builder, intermediates_offset) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 334 | |
| 335 | if builtin_opt_offset is not None: |
| 336 | Operator.OperatorAddBuiltinOptionsType(builder, opt_serializer.builtin_opt_type) |
| 337 | Operator.OperatorAddBuiltinOptions(builder, builtin_opt_offset) |
| 338 | if custom_opt_offset is not None: |
| 339 | Operator.OperatorAddCustomOptions(builder, custom_opt_offset) |
| 340 | Operator.OperatorAddCustomOptionsFormat(builder, opt_serializer.custom_opt_format) |
| 341 | |
| 342 | Operator.OperatorAddMutatingVariableInputs(builder, mutating_variable_inputs_offset) |
| 343 | return Operator.OperatorEnd(builder) |
| 344 | |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 345 | def serialise_subgraph(self, sg, name): |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 346 | builder = self.builder |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 347 | all_ops = [] |
Michael McGeagh | 515c956 | 2020-09-02 15:52:43 +0100 | [diff] [blame] | 348 | placeholder_ops = [] |
| 349 | |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 350 | for ps in sg.passes: |
| 351 | for op in ps.ops: |
| 352 | if op.type not in self.ops_to_ignore: |
| 353 | all_ops.append(op) |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 354 | elif op.type == Op.Placeholder: |
Michael McGeagh | 515c956 | 2020-09-02 15:52:43 +0100 | [diff] [blame] | 355 | placeholder_ops.append(op) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 356 | |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 357 | # Make sure all original tensors are written back, special case for Ops |
| 358 | # with connected subgraphs. Even though not all inputs are used, |
| 359 | # the reference kernel expects all inputs to be in the tflite file. |
| 360 | # Since we traverse the graph starting with all outputs they are |
| 361 | # always added but if an input is not referenced it will not be added |
| 362 | # to an op. |
| 363 | tensor_set = set(sg.original_inputs) |
| 364 | |
Michael McGeagh | 515c956 | 2020-09-02 15:52:43 +0100 | [diff] [blame] | 365 | # Add the tensors from all valid ops, as well as the tensors from placeholder ops |
| 366 | # This allows us to serialise tensors which arent attached to any specific ops, |
| 367 | # e.g. due to an empty graph containing no ops |
| 368 | for op in all_ops + placeholder_ops: |
Fredrik Svedberg | 8d0f489 | 2021-02-16 21:59:50 +0100 | [diff] [blame] | 369 | for tens in op.inputs + op.outputs + op.intermediates: |
Andreas Nevalainen | d8c032d | 2020-09-11 10:25:09 +0200 | [diff] [blame] | 370 | if tens is not None: |
| 371 | tensor_set.add(tens) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 372 | |
| 373 | all_tensors = [tens for nm, idx, tens in sorted((tens.name, idx, tens) for idx, tens in enumerate(tensor_set))] |
| 374 | |
Samuel Panijel | 6f4955a | 2021-06-10 13:40:03 +0300 | [diff] [blame] | 375 | scratch_tensors = [tens for tens in all_tensors if tens.purpose is TensorPurpose.Scratch] |
Patrik Gustavsson | 3ab9452 | 2020-06-29 17:36:55 +0200 | [diff] [blame] | 376 | |
Tim Hall | c8310b1 | 2020-06-17 14:53:11 +0100 | [diff] [blame] | 377 | if len(scratch_tensors) == 0: |
| 378 | scratch_tensor = None |
| 379 | else: |
| 380 | assert len(scratch_tensors) == 1, "Multiple scratch tensors" |
| 381 | scratch_tensor = scratch_tensors[0] |
| 382 | |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 383 | self.tensor_map_sg = {tens: idx for idx, tens in enumerate(all_tensors)} |
Tim Hall | c8310b1 | 2020-06-17 14:53:11 +0100 | [diff] [blame] | 384 | self.buffer_map = self.assign_buffers_to_tensors(all_tensors, scratch_tensor) |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 385 | self.tensor_map_all.append(self.tensor_map_sg) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 386 | |
| 387 | tensors_offset = self.write_offset_vector([self.serialise_tensor(tens) for tens in all_tensors]) |
| 388 | |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 389 | # Make sure the input_tensors haven't been modified |
| 390 | assert all(inp in sg.original_inputs for inp in sg.input_tensors) |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 391 | inputs = [self.tensor_map_sg[tens] for tens in sg.original_inputs if tens in self.tensor_map_sg] |
Tim Hall | c8310b1 | 2020-06-17 14:53:11 +0100 | [diff] [blame] | 392 | |
Tim Hall | c8310b1 | 2020-06-17 14:53:11 +0100 | [diff] [blame] | 393 | inputs_offset = self.write_int_vector(inputs) |
Michael McGeagh | bb1b09e | 2020-08-19 11:24:17 +0100 | [diff] [blame] | 394 | outputs_offset = self.write_int_vector( |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 395 | [self.tensor_map_sg[tens] for tens in sg.output_tensors if tens in self.tensor_map_sg] |
Michael McGeagh | bb1b09e | 2020-08-19 11:24:17 +0100 | [diff] [blame] | 396 | ) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 397 | |
| 398 | operators_offset = self.write_offset_vector([self.serialise_operator(op) for op in all_ops]) |
| 399 | |
| 400 | SubGraph.SubGraphStart(builder) |
| 401 | SubGraph.SubGraphAddTensors(builder, tensors_offset) |
| 402 | SubGraph.SubGraphAddInputs(builder, inputs_offset) |
| 403 | SubGraph.SubGraphAddOutputs(builder, outputs_offset) |
| 404 | |
| 405 | SubGraph.SubGraphAddOperators(builder, operators_offset) |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 406 | SubGraph.SubGraphAddName(builder, name) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 407 | |
| 408 | return SubGraph.SubGraphEnd(builder) |
| 409 | |
| 410 | def write_aligned_bytes(self, buf): |
| 411 | builder = self.builder |
erik.andersson@arm.com | 61f05d9 | 2022-09-27 12:06:32 +0200 | [diff] [blame] | 412 | builder.assertNotNested() |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 413 | builder.nested = True |
| 414 | data = bytes(buf) |
| 415 | length_bytes = UOffsetTFlags.py_type(len(data)) |
erik.andersson@arm.com | 61f05d9 | 2022-09-27 12:06:32 +0200 | [diff] [blame] | 416 | builder.vectorNumElems = length_bytes |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 417 | builder.Prep(16, length_bytes) # Reserve aligned storage |
| 418 | builder.head = UOffsetTFlags.py_type(builder.Head() - length_bytes) # Update FlatBuffer internal pointer |
| 419 | builder.Bytes[builder.Head() : builder.Head() + length_bytes] = data # Assign bytes to aligned area |
erik.andersson@arm.com | 61f05d9 | 2022-09-27 12:06:32 +0200 | [diff] [blame] | 420 | return builder.EndVector() |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 421 | |
| 422 | def serialise_buffer(self, buf): |
| 423 | builder = self.builder |
| 424 | data = None |
| 425 | if buf is not None: |
| 426 | data = self.write_aligned_bytes(buf) |
| 427 | Buffer.BufferStart(builder) |
| 428 | if data is not None: |
| 429 | Buffer.BufferAddData(builder, data) |
| 430 | return Buffer.BufferEnd(builder) |
| 431 | |
| 432 | def serialise_metadata(self, metadata): |
| 433 | builder = self.builder |
| 434 | name = builder.CreateString(metadata[0]) |
| 435 | |
| 436 | Metadata.MetadataStart(builder) |
| 437 | Metadata.MetadataAddName(builder, name) |
| 438 | Metadata.MetadataAddBuffer(builder, metadata[1]) |
| 439 | |
| 440 | return Metadata.MetadataEnd(builder) |
| 441 | |
| 442 | def serialise_model(self): |
| 443 | builder = self.builder |
| 444 | operator_code_offset = self.write_offset_vector( |
Louis Verhaard | aee5d75 | 2020-09-30 09:01:52 +0200 | [diff] [blame] | 445 | [self.serialise_operator_code(idx, optype, code) for idx, (optype, code) in enumerate(self.operator_codes)] |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 446 | ) |
| 447 | |
| 448 | description = builder.CreateString("Vela Optimised") |
| 449 | |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 450 | subgraph_offset = self.write_offset_vector( |
| 451 | [self.serialise_subgraph(sg, builder.CreateString(sg.name)) for sg in self.subgraphs_to_write] |
| 452 | ) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 453 | |
| 454 | # Fill the metadata buffer |
| 455 | version = np.int32(0) |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 456 | subgraph_idx = np.int32(len(self.subgraphs_to_write)) |
| 457 | |
| 458 | nbr_tensors_all = np.sum([len(tensor_map_sg) for tensor_map_sg in self.tensor_map_all], dtype=np.int32) |
| 459 | |
| 460 | offlineAlloc = [version, subgraph_idx, nbr_tensors_all] |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 461 | |
Fredrik Svedberg | e22ba8c | 2021-01-27 16:53:41 +0100 | [diff] [blame] | 462 | if not any([name == b"OfflineMemoryAllocation" for name, _ in self.nng.metadata]): |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 463 | for tensor_map_sg in self.tensor_map_all: |
| 464 | nbr_tensors_sg = np.int32(len(tensor_map_sg)) |
| 465 | # An offset of -1 indicates that the tensor will be allocated online by Tensorflow Lite Micro |
| 466 | offsets = [np.int32(-1)] * nbr_tensors_sg |
| 467 | # Ensure that the order of the offsets match the order of the tensors |
| 468 | for tens, idx in tensor_map_sg.items(): |
| 469 | # Set offsets for tensor allocated in Tensor Arena or in the scratch_fast area |
| 470 | if tens.mem_type in (MemType.Scratch, MemType.Scratch_fast): |
| 471 | offsets[idx] = np.int32(tens.address) if tens.address is not None else np.int32(0) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 472 | |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 473 | offlineAlloc += offsets |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 474 | |
Johan Alfvén | 673683b | 2022-09-05 09:39:47 +0200 | [diff] [blame] | 475 | self.nng.metadata.append(("OfflineMemoryAllocation", np.array(offlineAlloc))) |
Michael McGeagh | 22f74e1 | 2020-08-07 16:21:03 +0100 | [diff] [blame] | 476 | |
| 477 | metadata_list = [] |
| 478 | for name, buffer in self.nng.metadata: |
| 479 | self.buffers_to_write.append(buffer) |
| 480 | metadata_list.append((name, len(self.buffers_to_write) - 1)) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 481 | |
| 482 | buffers_offset = self.write_offset_vector([self.serialise_buffer(buf) for buf in self.buffers_to_write]) |
Tim Hall | 79d07d2 | 2020-04-27 18:20:16 +0100 | [diff] [blame] | 483 | metadata_offset = self.write_offset_vector([self.serialise_metadata(metadata) for metadata in metadata_list]) |
| 484 | |
| 485 | Model.ModelStart(builder) |
| 486 | Model.ModelAddVersion(builder, tflite_version) |
| 487 | Model.ModelAddOperatorCodes(builder, operator_code_offset) |
| 488 | Model.ModelAddSubgraphs(builder, subgraph_offset) |
| 489 | Model.ModelAddDescription(builder, description) |
| 490 | Model.ModelAddBuffers(builder, buffers_offset) |
| 491 | Model.ModelAddMetadata(builder, metadata_offset) |
| 492 | return Model.ModelEnd(builder) |
| 493 | |
| 494 | def serialise(self): |
| 495 | |
| 496 | model = self.serialise_model() |
| 497 | |
| 498 | self.builder.FinishWithFileIdentifier(model, tflite_file_identifier) |
| 499 | |
| 500 | return self.builder.Output() |
| 501 | |
| 502 | def write(self, filename): |
| 503 | with open(self.filename, "wb") as f: |
| 504 | f.write(self.serialised_buf) |
| 505 | |
| 506 | |
| 507 | def write_tflite(nng, filename): |
| 508 | writer = TFLiteSerialiser(nng) |
| 509 | buf = writer.serialise() |
| 510 | |
| 511 | with open(filename, "wb") as f: |
| 512 | f.write(buf) |