blob: ac0b3969d994a5ff7e66aaca91d9457285610881 [file] [log] [blame]
Patrik Gustavsson8f1f9aa2021-06-28 07:41:58 +02001# Copyright (C) 2021 Arm Limited or its affiliates. All rights reserved.
2#
3# SPDX-License-Identifier: Apache-2.0
4#
5# Licensed under the Apache License, Version 2.0 (the License); you may
6# not use this file except in compliance with the License.
7# You may obtain a copy of the License at
8#
9# www.apache.org/licenses/LICENSE-2.0
10#
11# Unless required by applicable law or agreed to in writing, software
12# distributed under the License is distributed on an AS IS BASIS, WITHOUT
13# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14# See the License for the specific language governing permissions and
15# limitations under the License.
16# Description:
17# Functions used to read from a TOSA format file.
18import os.path
19import struct
20import sys
21
22import numpy as np
23
24from .nn_graph import Graph
25from .nn_graph import Subgraph
26from .operation import Op
27from .operation import Operation
28from .reader_util import clone_and_reshape_tensor
29from .reader_util import decode_str
30from .reader_util import fixup_tensors
31from .tensor import QuantizationParameters
32from .tensor import Tensor
33from .tflite_mapping import DataType
34from .tosa.TosaGraph import TosaGraph as TG
35from .tosa_mapping import datatype_map
36from .tosa_mapping import tosa_operator_map
37from .tosa_mapping import unsupported_tosa_operators
38
39
40class TosaSubgraph:
41 def __init__(self, file_path, graph, block):
42 self.graph = graph
43 self.name = decode_str(block.Name())
44
45 self.tensors = []
46 for idx in range(block.TensorsLength()):
47 self.tensors.append(self.parse_tensor(block.Tensors(idx), file_path))
48
49 for idx in range(block.OperatorsLength()):
50 self.parse_operator(idx, block.Operators(idx))
51
52 # Get the subgraph inputs and outputs
53 self.inputs = self.get_sg_inputs_remove_duplicates(block)
54 self.outputs = self.get_sg_outputs_remove_duplicates(block)
55 fixup_tensors(self.inputs, self.tensors)
56
57 def get_sg_inputs_remove_duplicates(self, block):
58 inputs = []
59 for idx in range(block.InputsLength()):
60 tens_data = block.Inputs(idx)
61 self.add_not_duplicate(tens_data, inputs, "input")
62 return inputs
63
64 def get_sg_outputs_remove_duplicates(self, block):
65 outputs = []
66 for idx in range(block.OutputsLength()):
67 tens_data = block.Outputs(idx)
68 self.add_not_duplicate(tens_data, outputs, "output")
69 return outputs
70
71 def add_not_duplicate(self, tens_data, tensors, warning_str):
72 name = decode_str(tens_data)
73 tensor = self.get_tensor_by_name(name)
74 if tensor not in tensors:
75 tensors.append(tensor)
76 else:
77 print(f"Warning: Subgraph {warning_str} tensor ({tensor}) already seen. Removing the duplicate.")
78
79 def get_tensor_by_name(self, name):
80 for tens in self.tensors:
81 if tens.name == name:
82 return tens
83 return None
84
85 def parse_operator(self, op_index, op_data):
86 op_code = op_data.Op()
87 if op_code in unsupported_tosa_operators:
88 print("Unsupported Operator", op_code)
89 assert False
90
91 op_type, attr_serializer, quant_serializer, indices = tosa_operator_map[op_code]
92 inputs = []
93 outputs = []
94 for idx in range(op_data.InputsLength()):
95 input_tens = self.get_tensor_by_name(decode_str(op_data.Inputs(idx)))
96 inputs.append(input_tens)
97 assert input_tens is not None
98
99 for idx in range(op_data.OutputsLength()):
100 output_tens = self.get_tensor_by_name(decode_str(op_data.Outputs(idx)))
101 outputs.append(output_tens)
102 assert output_tens is not None
103
104 name = "unknown_op_name"
105 if len(outputs):
106 name = outputs[0].name
107 op = Operation(op_type, name)
108 op.type.info.indices = indices
109 op.op_index = op_index
110 op.inputs = inputs
111 op.outputs = outputs
112
113 for out in op.outputs:
114 out.ops = [op]
115
116 # TODO Transpose_conv and conv3d
117 if op.type.is_depthwise_conv2d_op() or op.type.is_conv2d_op() or op.type == Op.FullyConnected:
118 if inputs[1].values is not None:
119 if op.type == Op.FullyConnected:
120 inputs[1] = clone_and_reshape_tensor(inputs[1], (1, 0), False)
121 elif op.type.is_conv2d_op():
122 inputs[1] = clone_and_reshape_tensor(inputs[1], (1, 2, 3, 0), False)
123 elif op.type.is_depthwise_conv2d_op():
124 inputs[1] = clone_and_reshape_tensor(inputs[1], (1, 2, 0, 3), False)
125 if op.type.needs_bias() and len(inputs) <= op_type.info.indices.biases[0]:
126 # No Bias tensor
127 inputs.append(None)
128 if inputs[-1] and inputs[-1].values is not None:
129 # Since bias tensor is used for both bias and scale,
130 # a clone with a unique equivalence_id is needed
131 inputs[-1] = clone_and_reshape_tensor(inputs[-1], (0,), True)
132
133 if attr_serializer is not None:
134 op.attrs = attr_serializer.deserialize(op_data)
135
136 if "dilation" in op.attrs:
137 dilation = op.attrs["dilation"]
138 if len(dilation) == 2:
139 op.attrs["dilation"] = (1, dilation[0], dilation[1], 1)
140 elif len(dilation) == 3:
141 # TODO CONV3D more to be done....
142 op.attrs["dilation"] = (dilation[0], dilation[1], dilation[2], 1)
143 if "kernel" in op.attrs:
144 kernel = op.attrs["kernel"]
145 if len(kernel) == 2:
146 op.attrs["ksize"] = (1, kernel[0], kernel[1], 1)
147 else:
148 # TODO CONV3D more to be done....
149 print("Unsupported kernel dimensions: ", len(kernel))
150 assert False
151
152 if quant_serializer is not None:
153 quant_info = quant_serializer.deserialize(op_data)
154
155 # TODO tensor zero points currently set here
156 # zero points part of Rescale operation, handled in tosa_graph_optimizer
157 if "input_zp" in quant_info:
158 self.set_tensor_zp(op.ifm, quant_info["input_zp"])
159 if "weight_zp" in quant_info:
160 self.set_tensor_zp(op.weights, quant_info["weight_zp"])
161 if "ouput_zp" in quant_info:
162 self.set_tensor_zp(op.ofm, quant_info["output_zp"])
163 if "a_zp" in quant_info:
164 self.set_tensor_zp(op.ifm, quant_info["a_zp"])
165 if "b_zp" in quant_info:
166 self.set_tensor_zp(op.ifm2, quant_info["b_zp"])
167
168 def parse_tensor(self, tens_data, file_path):
169 name = decode_str(tens_data.Name())
170 np_shape = tens_data.ShapeAsNumpy()
171 shape = list(np_shape) if type(np_shape) is np.ndarray else []
172 tens_dtype = tens_data.Type()
173 dtype = datatype_map[tens_dtype]
174
175 tens = Tensor(shape, dtype, name)
176
177 # Initialize quantization parameters
178 tens.quantization = QuantizationParameters()
179
180 tens.quantization.scale_f32 = 1.0
181 if dtype == DataType.uint8:
182 tens.quantization.quant_min = 0
183 tens.quantization.quant_max = (1 << dtype.bits) - 1
184 elif dtype in (DataType.int8, DataType.int16, DataType.int32, DataType.int64):
185 tens.quantization.quant_min = -(1 << (dtype.bits - 1))
186 tens.quantization.quant_max = (1 << (dtype.bits - 1)) - 1
187
188 tens.values = None
189 if tens_data.NpyFilename() is not None:
190 try:
191 fname = decode_str(tens_data.NpyFilename())
192 tens.values = np.load(os.path.join(file_path, fname))
193 assert list(tens.values.shape) == tens.shape
194 tens.quant_values = tens.values
195 except (struct.error, TypeError, RuntimeError) as e:
196 print(f'Error: Invalid npy file. Got "{e}" ')
197 sys.exit(1)
198
199 return tens
200
201 def set_tensor_zp(self, tens, zp):
202 if tens.quantization.zero_point is None:
203 tens.quantization.zero_point = zp
204 elif tens.quantization.zero_point != zp:
205 print(f"Error: Setting tensor zp not possible, tensor already has different zero point")
206 assert False
207
208
209class TosaGraph:
210 def __init__(self, filename, batch_size, feed_dict, output_node_names, initialisation_nodes):
211
212 self.op_times = {}
213 if batch_size is None:
214 batch_size = 1
215 self.batch_size = batch_size
216 self.name = os.path.splitext(os.path.basename(filename))[0]
217 self.initialisation_nodes = initialisation_nodes
218
219 with open(filename, "rb") as f:
220 buf = bytearray(f.read())
221
222 try:
223 parsing_step = "parsing root"
224 tosa_graph = TG.GetRootAsTosaGraph(buf, 0)
225
226 parsing_step = "parsing version"
227 self.check_version(tosa_graph)
228
229 parsing_step = "parsing blocks length"
230 file_path = os.path.dirname(filename)
231 self.subgraphs = []
232 for b_idx in range(tosa_graph.BlocksLength()):
233 parsing_step = f"parsing block {b_idx}"
234 self.subgraphs.append(TosaSubgraph(file_path, self, tosa_graph.Blocks(b_idx)))
235
236 self.nng = Graph(self.name, self.batch_size)
237 for tosa_sg in self.subgraphs:
238 sg = Subgraph(tosa_sg.name)
239 sg.original_inputs = tosa_sg.inputs # Preserve the original input order
240 sg.output_tensors = tosa_sg.outputs
241 self.nng.subgraphs.append(sg)
242
243 except (struct.error, TypeError, RuntimeError) as e:
244 print(f'Error: Invalid .tosa file. Got "{e}" while {parsing_step}.')
245 sys.exit(1)
246
247 def check_version(self, tosa_graph):
248 version = tosa_graph.Version()
249 version_str = f"{version._major()}.{version._minor()}.{version._patch()}"
250 if version_str != "0.22.0":
251 print(f"Unsupported TOSA version: {version_str}")
252 assert False
253
254
255def read_tosa(filename, batch_size, feed_dict, output_node_names, initialisation_nodes):
256 tosa_graph = TosaGraph(filename, batch_size, feed_dict, output_node_names, initialisation_nodes)
257 nng = tosa_graph.nng
258 nng.refresh_after_modification()
259 return nng