blob: af6ff842a756abd5b519c03ecd4ee1dd3c6b9f49 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Finn Williams85d36712021-01-26 22:30:06 +000037IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
38
39IDeserializer::~IDeserializer() = default;
40
41IDeserializer *IDeserializer::CreateRaw()
42{
43 return new IDeserializer();
44}
45
46IDeserializerPtr IDeserializer::Create()
47{
48 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
49}
50
51void IDeserializer::Destroy(IDeserializer *parser)
52{
53 delete parser;
54}
55
56armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
57{
58 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
59}
60
61armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
62{
63 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
64}
65
66BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
67{
68 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
69}
70
71BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
72{
73 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
74}
75
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000076namespace
77{
Kevin May43a799c2019-02-08 16:31:42 +000078
79const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
80
Finn Williams85d36712021-01-26 22:30:06 +000081 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000082 unsigned int layersIndex,
83 const CheckLocation& location)
84{
85 if (graph->layers() == nullptr)
86 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010087 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
88 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
89 "layers:{1} at {2}",
90 location.m_Function,
91 layersIndex,
92 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000093 }
94 else if (layersIndex >= graph->layers()->size())
95 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010096 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
97 location.m_Function,
98 layersIndex,
99 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000100 }
101}
102
Finn Williams85d36712021-01-26 22:30:06 +0000103void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000104 unsigned int layersIndex,
105 unsigned int layerIndex,
106 const CheckLocation& location)
107{
108 if (graph->layers() == nullptr)
109 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100110 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
111 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
112 "layers:{1} at {2}",
113 location.m_Function,
114 layersIndex,
115 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000116 }
117 else if (layersIndex >= graph->layers()->size())
118 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100119 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
120 "layers:{1} at {2}",
121 location.m_Function,
122 layersIndex,
123 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000124 }
125 else if (layerIndex >= graph->layers()[layersIndex].size()
126 && layerIndex != VIRTUAL_LAYER_ID)
127 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100128 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
129 "layers:{1} layer:{2} at {3}",
130 location.m_Function,
131 layersIndex,
132 layerIndex,
133 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000134 }
135}
136
Finn Williams85d36712021-01-26 22:30:06 +0000137void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000138 const CheckLocation& location)
139{
140 if (rawPtr == nullptr)
141 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100142 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
143 location.m_Function,
144 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000145 }
146}
147
Finn Williams85d36712021-01-26 22:30:06 +0000148void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000149 const CheckLocation& location)
150{
151 if (rawPtr == nullptr)
152 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100153 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
154 location.m_Function,
155 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000156 }
157}
158
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000159void CheckConstTensorSize(const unsigned int constTensorSize,
160 const unsigned int tensorSize,
161 const CheckLocation& location)
162{
163 if (constTensorSize != tensorSize)
164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100165 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
166 location.m_Function,
167 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000168 }
169}
170
Kevin May43a799c2019-02-08 16:31:42 +0000171#define CHECK_TENSOR_PTR(TENSOR_PTR) \
172 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
173
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000174#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
175 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
176
Mike Kellya0766c32019-02-19 17:22:07 +0000177#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
178 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
179
Kevin May43a799c2019-02-08 16:31:42 +0000180#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
181 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
182
183#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
184 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
185}
186
Saoirse Stewart263829c2019-02-19 15:54:14 +0000187bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
188{
189 const unsigned int actualSize = actual.GetNumDimensions();
190 if (actualSize != expected.size())
191 {
192 return false;
193 }
194
195 for (unsigned int i = 0u; i < actualSize; i++)
196 {
197 if (actual[i] != static_cast<unsigned int>(expected[i]))
198 {
199 return false;
200 }
201 }
202
203 return true;
204}
205
Finn Williams85d36712021-01-26 22:30:06 +0000206IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000207: m_Network(nullptr, nullptr),
208//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000209m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000210{
211 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000212 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
213 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
214 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
215 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
216 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
217 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
218 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
mathad01b392e982021-04-07 12:07:30 +0100219 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Finn Williams85d36712021-01-26 22:30:06 +0000220 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
221 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
222 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
223 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
224 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
225 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
226 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
227 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
228 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
229 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
230 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
231 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
232 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
233 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
234 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
235 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
236 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
237 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
238 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
239 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
240 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
241 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
242 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
243 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
244 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
245 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
246 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
247 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
248 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
249 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
250 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
251 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
252 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
253 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
254 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000255 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000256 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
257 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
258 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
259 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100260 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000261 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
262 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
263 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
264 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
265 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
266 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
267 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
268 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
269 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
270 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
271 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
272 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000273}
274
Finn Williams85d36712021-01-26 22:30:06 +0000275LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000276{
277 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
278
279 switch(layerType)
280 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100281 case Layer::Layer_AbsLayer:
282 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000283 case Layer::Layer_ActivationLayer:
284 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000285 case Layer::Layer_AdditionLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100287 case Layer::Layer_ArgMinMaxLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000289 case Layer::Layer_BatchToSpaceNdLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000291 case Layer::Layer_BatchNormalizationLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100293 case Layer::Layer_CastLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100295 case Layer::Layer_ComparisonLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100297 case Layer::Layer_ConcatLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000299 case Layer::Layer_ConstantLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000301 case Layer::Layer_Convolution2dLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100303 case Layer::Layer_DepthToSpaceLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000305 case Layer::Layer_DepthwiseConvolution2dLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000307 case Layer::Layer_DequantizeLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000309 case Layer::Layer_DetectionPostProcessLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000311 case Layer::Layer_DivisionLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000313 case Layer::Layer_EqualLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000315 case Layer::Layer_ElementwiseUnaryLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000317 case Layer::Layer_FullyConnectedLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100319 case Layer::Layer_FillLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000321 case Layer::Layer_FloorLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000323 case Layer::Layer_GatherLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000325 case Layer::Layer_GreaterLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000327 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000328 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100329 case Layer::Layer_InstanceNormalizationLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000331 case Layer::Layer_L2NormalizationLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000333 case Layer::Layer_LogicalBinaryLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100335 case Layer::Layer_LogSoftmaxLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000337 case Layer::Layer_LstmLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000339 case Layer::Layer_MeanLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000341 case Layer::Layer_MinimumLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000343 case Layer::Layer_MaximumLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100345 case Layer::Layer_MergeLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000347 case Layer::Layer_MergerLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000349 case Layer::Layer_MultiplicationLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000351 case Layer::Layer_NormalizationLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000353 case Layer::Layer_OutputLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000355 case Layer::Layer_PadLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000357 case Layer::Layer_PermuteLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000359 case Layer::Layer_Pooling2dLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100361 case Layer::Layer_PreluLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100363 case Layer::Layer_QLstmLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000365 case Layer::Layer_QuantizeLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100367 case Layer::Layer_QuantizedLstmLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100369 case Layer::Layer_RankLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000371 case Layer::Layer_ReduceLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000373 case Layer::Layer_ReshapeLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000375 case Layer::Layer_ResizeBilinearLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100377 case Layer::Layer_ResizeLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000379 case Layer::Layer_RsqrtLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100381 case Layer::Layer_ShapeLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100383 case Layer::Layer_SliceLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000385 case Layer::Layer_SoftmaxLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000387 case Layer::Layer_SpaceToBatchNdLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100389 case Layer::Layer_SpaceToDepthLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000391 case Layer::Layer_SplitterLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100393 case Layer::Layer_StackLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100395 case Layer::Layer_StandInLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000397 case Layer::Layer_StridedSliceLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000399 case Layer::Layer_SubtractionLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100401 case Layer::Layer_SwitchLayer:
402 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100403 case Layer::Layer_TransposeConvolution2dLayer:
404 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000405 case Layer::Layer_TransposeLayer:
406 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000407 case Layer::Layer_NONE:
408 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100409 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000410 }
411}
412
Finn Williams85d36712021-01-26 22:30:06 +0000413std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000414{
415 auto layer = GetBaseLayer(graph, index);
416 assert(layer);
417 return layer->layerName()->str();
418}
419
Finn Williams85d36712021-01-26 22:30:06 +0000420int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000421{
422 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
423
424 if (layerType == Layer::Layer_InputLayer)
425 {
426 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
427 }
428 else if ( layerType == Layer::Layer_OutputLayer )
429 {
430 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
431 }
432 return 0;
433}
434
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000435armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000436{
437 switch (dataLayout)
438 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000439 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000440 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000441 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000442 default:
443 return armnn::DataLayout::NCHW;
444 }
445}
446
Mike Kellyaf484012019-02-20 16:53:11 +0000447armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
448{
449 switch (function)
450 {
451 case armnnSerializer::ActivationFunction_Sigmoid:
452 return armnn::ActivationFunction::Sigmoid;
453 case armnnSerializer::ActivationFunction_TanH:
454 return armnn::ActivationFunction::TanH;
455 case armnnSerializer::ActivationFunction_Linear:
456 return armnn::ActivationFunction::Linear;
457 case armnnSerializer::ActivationFunction_ReLu:
458 return armnn::ActivationFunction::ReLu;
459 case armnnSerializer::ActivationFunction_BoundedReLu:
460 return armnn::ActivationFunction::BoundedReLu;
461 case armnnSerializer::ActivationFunction_LeakyReLu:
462 return armnn::ActivationFunction::LeakyReLu;
463 case armnnSerializer::ActivationFunction_Abs:
464 return armnn::ActivationFunction::Abs;
465 case armnnSerializer::ActivationFunction_Sqrt:
466 return armnn::ActivationFunction::Sqrt;
467 case armnnSerializer::ActivationFunction_Square:
468 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000469 case armnnSerializer::ActivationFunction_Elu:
470 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000471 case armnnSerializer::ActivationFunction_HardSwish:
472 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000473 default:
474 return armnn::ActivationFunction::Sigmoid;
475 }
476}
477
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100478armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
479{
480 switch (function)
481 {
482 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
483 return armnn::ArgMinMaxFunction::Max;
484 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
485 default:
486 return armnn::ArgMinMaxFunction::Min;
487 }
488}
489
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100490armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
491{
492 switch (operation)
493 {
494 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
495 return armnn::ComparisonOperation::Equal;
496 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
497 return armnn::ComparisonOperation::Greater;
498 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
499 return armnn::ComparisonOperation::GreaterOrEqual;
500 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
501 return armnn::ComparisonOperation::Less;
502 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
503 return armnn::ComparisonOperation::LessOrEqual;
504 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
505 default:
506 return armnn::ComparisonOperation::NotEqual;
507 }
508}
509
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000510armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
511{
512 switch (operation)
513 {
514 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
515 return armnn::ReduceOperation::Sum;
516 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
517 return armnn::ReduceOperation::Max;
518 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
519 return armnn::ReduceOperation::Mean;
520 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
521 return armnn::ReduceOperation::Min;
522 default:
523 return armnn::ReduceOperation::Sum;
524 }
525}
526
James Conroyaba90cd2020-11-06 16:28:18 +0000527armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
528{
529 switch (operation)
530 {
531 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
532 return armnn::LogicalBinaryOperation::LogicalAnd;
533 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
534 return armnn::LogicalBinaryOperation::LogicalOr;
535 default:
536 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
537 }
538}
539
josh minor4a3c6102020-01-06 16:40:46 -0600540armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
541{
542 switch (operation)
543 {
544 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
545 return armnn::UnaryOperation::Abs;
546 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
547 return armnn::UnaryOperation::Rsqrt;
548 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
549 return armnn::UnaryOperation::Sqrt;
550 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
551 return armnn::UnaryOperation::Exp;
552 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
553 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000554 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
555 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100556 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
557 return armnn::UnaryOperation::Log;
558 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
559 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600560 default:
561 throw armnn::InvalidArgumentException("Unary operation unknown");
562 }
563}
564
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100565armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
566{
567 switch (method)
568 {
569 case armnnSerializer::ResizeMethod_NearestNeighbor:
570 return armnn::ResizeMethod::NearestNeighbor;
571 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000572 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100573 default:
574 return armnn::ResizeMethod::NearestNeighbor;
575 }
576}
577
Finn Williams85d36712021-01-26 22:30:06 +0000578armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000579{
580 armnn::DataType type;
581 CHECK_TENSOR_PTR(tensorPtr);
582
583 switch (tensorPtr->dataType())
584 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000585 case DataType_QAsymmS8:
586 type = armnn::DataType::QAsymmS8;
587 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000588 case DataType_QSymmS8:
589 type = armnn::DataType::QSymmS8;
590 break;
Kevin May43a799c2019-02-08 16:31:42 +0000591 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000592 case DataType_QAsymmU8:
593 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000594 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000595 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000596 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000597 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000598 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000599 case DataType_Signed32:
600 type = armnn::DataType::Signed32;
601 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100602 case DataType_Signed64:
603 type = armnn::DataType::Signed64;
604 break;
Kevin May43a799c2019-02-08 16:31:42 +0000605 case DataType_Float32:
606 type = armnn::DataType::Float32;
607 break;
608 case DataType_Float16:
609 type = armnn::DataType::Float16;
610 break;
611 case DataType_Boolean:
612 type = armnn::DataType::Boolean;
613 break;
614 default:
615 {
616 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100617 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
618 tensorPtr->dataType(),
619 EnumNameDataType(tensorPtr->dataType()),
620 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000621 }
622 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000623
Colm Donelan800b2812021-02-12 12:43:35 +0000624 float quantizationScale = tensorPtr->quantizationScale();
625 int32_t quantizationOffset = tensorPtr->quantizationOffset();
626
Finn Williams2605b232020-06-10 15:53:46 +0100627 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
628 {
Colm Donelan800b2812021-02-12 12:43:35 +0000629 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100630 type,
631 quantizationScale,
632 quantizationOffset);
633 }
Colm Donelan800b2812021-02-12 12:43:35 +0000634 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
635 {
636 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
637 type,
638 quantizationScale,
639 quantizationOffset);
640 return result;
641 }
Kevin May43a799c2019-02-08 16:31:42 +0000642
643 auto dimensions = tensorPtr->dimensions();
644 unsigned int size = dimensions->size();
645 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000646 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
647 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
648 // For backwards compatibility check if the dimensionSpecificity vector is present first.
649 // The default is to have dimensionSpecificity set to all true's anyway.
650 if (tensorPtr->dimensionSpecificity() != nullptr)
651 {
652 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
653 size = dimensionSpecificity->size();
654 for (unsigned int i = 0; i < size; ++i)
655 {
656 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
657 }
658 }
659 // Construct a TensorShape
660 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000661
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000662 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000663 if (quantizationScales)
664 {
665 unsigned int quantizationScalesSize = quantizationScales->size();
666 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
667 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000668 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000669 type,
670 scales,
671 quantizationDim);
672 return result;
673 }
674
Kevin May43a799c2019-02-08 16:31:42 +0000675 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000676 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000677 type,
678 quantizationScale,
679 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000680
Kevin May43a799c2019-02-08 16:31:42 +0000681 return result;
682}
683
Finn Williams85d36712021-01-26 22:30:06 +0000684armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000685{
686 CHECK_CONST_TENSOR_PTR(constTensorPtr);
687 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
688
689 switch (constTensorPtr->data_type())
690 {
691 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000692 {
693 auto byteData = constTensorPtr->data_as_ByteData()->data();
694 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
695 return armnn::ConstTensor(tensorInfo, byteData->data());
696 }
Mike Kellya0766c32019-02-19 17:22:07 +0000697 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000698 {
699 auto shortData = constTensorPtr->data_as_ShortData()->data();
700 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
701 return armnn::ConstTensor(tensorInfo, shortData->data());
702 }
Mike Kellya0766c32019-02-19 17:22:07 +0000703 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000704 {
705 auto intData = constTensorPtr->data_as_IntData()->data();
706 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
707 return armnn::ConstTensor(tensorInfo, intData->data());
708 }
Mike Kellya0766c32019-02-19 17:22:07 +0000709 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000710 {
711 auto longData = constTensorPtr->data_as_LongData()->data();
712 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
713 return armnn::ConstTensor(tensorInfo, longData->data());
714 }
Mike Kellya0766c32019-02-19 17:22:07 +0000715 default:
716 {
717 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100718 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
719 constTensorPtr->data_type(),
720 EnumNameConstTensorData(constTensorPtr->data_type()),
721 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000722 }
723 }
724}
725
Finn Williams85d36712021-01-26 22:30:06 +0000726TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000727{
728 CHECK_LAYERS(graphPtr, 0, layerIndex);
729 auto layer = GetBaseLayer(graphPtr, layerIndex);
730 const auto& numInputs = layer->inputSlots()->size();
731
732 TensorRawPtrVector result(numInputs);
733
734 for (unsigned int i=0; i<numInputs; ++i)
735 {
736 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
737 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
738 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
739 }
740 return result;
741}
742
Finn Williams85d36712021-01-26 22:30:06 +0000743TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000744{
745 CHECK_LAYERS(graphPtr, 0, layerIndex);
746 auto layer = GetBaseLayer(graphPtr, layerIndex);
747 const auto& numOutputs = layer->outputSlots()->size();
748
749 TensorRawPtrVector result(numOutputs);
750
751 for (unsigned int i=0; i<numOutputs; ++i)
752 {
753 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
754 }
755 return result;
756}
757
Finn Williams85d36712021-01-26 22:30:06 +0000758void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000759{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000760 CHECK_LAYERS(graph, 0, layerIndex);
761 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100762 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
763 "layerName: {1} / {2}",
764 layerIndex,
765 layerName,
766 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000767}
768
Finn Williams85d36712021-01-26 22:30:06 +0000769void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000770{
771 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000772 m_InputBindings.clear();
773 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000774}
775
Kevin May43a799c2019-02-08 16:31:42 +0000776
Finn Williams85d36712021-01-26 22:30:06 +0000777INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000778{
779 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000780 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
781 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000782}
783
Finn Williams85d36712021-01-26 22:30:06 +0000784armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000785{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000786 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000787 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
788 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
789 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000790}
791
Finn Williams85d36712021-01-26 22:30:06 +0000792GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000793{
794 if (binaryContent == nullptr)
795 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100796 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
797 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000798 }
799 flatbuffers::Verifier verifier(binaryContent, len);
800 if (verifier.VerifyBuffer<SerializedGraph>() == false)
801 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100802 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
803 "flatbuffers format. size:{0} {1}",
804 len,
805 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000806 }
807 return GetSerializedGraph(binaryContent);
808}
809
Finn Williams85d36712021-01-26 22:30:06 +0000810INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000811{
812 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100813 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000814 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000815 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000816 {
817 if (layer->layer_type() != Layer_InputLayer &&
818 layer->layer_type() != Layer_OutputLayer)
819 {
820 // lookup and call the parser function
821 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000822 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000823 }
824 ++layerIndex;
825 }
826
Derek Lamberti8ddae332019-02-21 16:29:43 +0000827 SetupInputLayers(graph);
828 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000829
830 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100831 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000832 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100833 Connections& connections = graphIt.second;
834 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000835 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100836 const unsigned int outputSlotIndex = outputIt.first;
837 IOutputSlot* outputSlot = outputIt.second;
838 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000839 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100840 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000841 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100842 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000843 }
Kevin May43a799c2019-02-08 16:31:42 +0000844 }
845 }
846 }
847
848 return std::move(m_Network);
849}
850
Finn Williams85d36712021-01-26 22:30:06 +0000851BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000852 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000853{
Jan Eilers8eb25602020-03-09 12:13:48 +0000854 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000855 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000856 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000857 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000858 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000859 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000860 }
861 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100862 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
863 name,
864 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000865}
866
Finn Williams85d36712021-01-26 22:30:06 +0000867BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000868 const std::string& name) const
869{
Jan Eilers8eb25602020-03-09 12:13:48 +0000870 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000871 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000872 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000873 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000874 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000875 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000876 }
877 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100878 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
879 name,
880 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000881}
882
Finn Williams85d36712021-01-26 22:30:06 +0000883unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000884{
885 for (unsigned int i = 0; i < graph->layers()->size(); i++)
886 {
887 auto layer = graph->layers()->Get(i);
888 if (layer->layer_type() == Layer::Layer_InputLayer)
889 {
890 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
891 if (layerBindingId == targetId)
892 {
893 return i;
894 }
895 }
896 }
897 throw ParseException("Input layer with given layerBindingId not found");
898}
899
Finn Williams85d36712021-01-26 22:30:06 +0000900unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000901{
902 for (unsigned int i = 0; i < graph->layers()->size(); i++)
903 {
904 auto layer = graph->layers()->Get(i);
905 if (layer->layer_type() == Layer::Layer_OutputLayer)
906 {
907 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
908 if (layerBindingId == targetId)
909 {
910 return i;
911 }
912 }
913 }
914 throw ParseException("Output layer with given layerBindingId not found");
915}
916
Finn Williams85d36712021-01-26 22:30:06 +0000917unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100918{
919 for (unsigned int i = 0; i < graph->layers()->size(); i++)
920 {
921 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
922 if (layer->index() == targetIndex)
923 {
924 return i;
925 }
926 }
927 throw ParseException("Layer with given index not found");
928}
929
Finn Williams85d36712021-01-26 22:30:06 +0000930IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000931{
Finn Williams85d36712021-01-26 22:30:06 +0000932 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000933
934 if (graph->featureVersions())
935 {
936 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +0100937 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Tee Jungaa920c52019-11-05 10:48:25 +0000938 }
939
940 return versions;
941}
942
Finn Williams85d36712021-01-26 22:30:06 +0000943void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000944{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000945 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100946 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000947 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100948 m_InputBindings.reserve(numInputs);
949
950 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000951 {
Tee Jungaa920c52019-11-05 10:48:25 +0000952 unsigned int inputLayerIndex = 0xFFFFFFFF;
953 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
954 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100955 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000956 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
957 }
958 else
959 {
960 const int inputId = graph->inputIds()->Get(i);
961 inputLayerIndex = GetInputLayerInVector(graph, inputId);
962 }
963
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100964 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000965
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100966 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
967 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100968 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000969
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100970 IConnectableLayer* inputLayer =
971 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000972
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100973 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
974 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
975 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
976
Derek Lamberti8ddae332019-02-21 16:29:43 +0000977 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100978 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000979 }
980}
981
Finn Williams85d36712021-01-26 22:30:06 +0000982void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000983{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000984 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100985 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000986 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100987 m_OutputBindings.reserve(numOutputs);
988
989 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000990 {
Tee Jungaa920c52019-11-05 10:48:25 +0000991 unsigned int outputLayerIndex = 0xFFFFFFFF;
992 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
993 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100994 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000995 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
996 }
997 else
998 {
999 const int outputId = graph->outputIds()->Get(i);
1000 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1001 }
1002
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001003 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001004
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001005 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1006 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001007 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001008
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001009 IConnectableLayer* outputLayer =
1010 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001011
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001012 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001013 unsigned int sourceLayerIndex =
1014 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001015 unsigned int outputSlotIndex =
1016 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001017 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001018 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1019 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001020 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001021 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001022 }
1023}
1024
Finn Williams85d36712021-01-26 22:30:06 +00001025void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001026 uint32_t layerIndex,
1027 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001028{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001029 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001030 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001031 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1032 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001033 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001034 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1035 " for layer index: {2} {3}",
1036 baseLayer->outputSlots()->size(),
1037 layer->GetNumOutputSlots(),
1038 layerIndex,
1039 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001040 }
1041
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001042 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001043 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001044 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1045 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1046 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1047 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001048 }
1049}
1050
Finn Williams85d36712021-01-26 22:30:06 +00001051void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001052 uint32_t layerIndex,
1053 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001054{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001055 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001056 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001057 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1058 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001059 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001060 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1061 " for layer index:{2} {3}",
1062 baseLayer->inputSlots()->size(),
1063 layer->GetNumInputSlots(),
1064 layerIndex,
1065 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001066 }
1067
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001068 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001069 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001070 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1071 auto fbConnection = fbInputSlot->connection();
1072 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1073 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001074 }
1075}
1076
Finn Williams85d36712021-01-26 22:30:06 +00001077void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001078 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001079 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001080{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001081 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001082 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001083 m_GraphConnections[sourceLayerIndex] = Connections();
1084 }
1085
1086 Connections& connections = m_GraphConnections[sourceLayerIndex];
1087 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1088 {
1089 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001090 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001091 else
1092 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001093 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001094 }
1095}
Kevin May43a799c2019-02-08 16:31:42 +00001096
Finn Williams85d36712021-01-26 22:30:06 +00001097void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001098 uint32_t outputSlotIndex,
1099 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001100{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001101 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1102 {
1103 m_GraphConnections[sourceLayerIndex] = Connections();
1104 }
1105
1106 Connections& connections = m_GraphConnections[sourceLayerIndex];
1107 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1108 {
1109 throw ParseException("Same output slot index processed twice");
1110 }
1111
1112 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001113}
1114
Finn Williams85d36712021-01-26 22:30:06 +00001115void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001116{
1117 CHECK_LAYERS(graph, 0, layerIndex);
1118 auto inputs = GetInputs(graph, layerIndex);
1119 CHECK_LOCATION();
1120 CHECK_VALID_SIZE(inputs.size(), 1);
1121
1122 auto outputs = GetOutputs(graph, layerIndex);
1123 CHECK_VALID_SIZE(outputs.size(), 1);
1124
1125 auto layerName = GetLayerName(graph, layerIndex);
1126
josh minor4a3c6102020-01-06 16:40:46 -06001127 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1128 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001129 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1130 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1131
1132 RegisterInputSlots(graph, layerIndex, layer);
1133 RegisterOutputSlots(graph, layerIndex, layer);
1134}
1135
Finn Williams85d36712021-01-26 22:30:06 +00001136void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001137{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001138 CHECK_LAYERS(graph, 0, layerIndex);
1139 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001140 CHECK_LOCATION();
1141 CHECK_VALID_SIZE(inputs.size(), 1);
1142
Derek Lamberti8ddae332019-02-21 16:29:43 +00001143 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001144 CHECK_VALID_SIZE(outputs.size(), 1);
1145
Derek Lamberti8ddae332019-02-21 16:29:43 +00001146 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001147 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001148 auto serializerDescriptor = serializerLayer->descriptor();
1149
1150 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001151 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001152 descriptor.m_A = serializerDescriptor->a();
1153 descriptor.m_B = serializerDescriptor->b();
1154
1155 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1156 layerName.c_str());
1157 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1158 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1159
Derek Lamberti8ddae332019-02-21 16:29:43 +00001160 RegisterInputSlots(graph, layerIndex, layer);
1161 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001162}
1163
Finn Williams85d36712021-01-26 22:30:06 +00001164void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001165{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001166 CHECK_LAYERS(graph, 0, layerIndex);
1167 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001168 CHECK_LOCATION();
1169 CHECK_VALID_SIZE(inputs.size(), 2);
1170
Derek Lamberti8ddae332019-02-21 16:29:43 +00001171 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001172 CHECK_VALID_SIZE(outputs.size(), 1);
1173
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001174 auto layerName = GetLayerName(graph, layerIndex);
1175 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001176
1177 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1178 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1179
Derek Lamberti8ddae332019-02-21 16:29:43 +00001180 RegisterInputSlots(graph, layerIndex, layer);
1181 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001182}
1183
Finn Williams85d36712021-01-26 22:30:06 +00001184void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001185{
1186 CHECK_LAYERS(graph, 0, layerIndex);
1187 auto inputs = GetInputs(graph, layerIndex);
1188 CHECK_LOCATION();
1189 CHECK_VALID_SIZE(inputs.size(), 1);
1190
1191 auto outputs = GetOutputs(graph, layerIndex);
1192 CHECK_VALID_SIZE(outputs.size(), 1);
1193
1194 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1195 auto serializerDescriptor = serializerLayer->descriptor();
1196
1197 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001198 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001199 descriptor.m_Axis = serializerDescriptor->axis();
1200 auto layerName = GetLayerName(graph, layerIndex);
1201 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1202
1203 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1204 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1205
1206 RegisterInputSlots(graph, layerIndex, layer);
1207 RegisterOutputSlots(graph, layerIndex, layer);
1208}
1209
Finn Williams85d36712021-01-26 22:30:06 +00001210void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001211{
1212 CHECK_LAYERS(graph, 0, layerIndex);
1213
Finn Williams85d36712021-01-26 22:30:06 +00001214 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001215 CHECK_VALID_SIZE(inputs.size(), 1);
1216
Finn Williams85d36712021-01-26 22:30:06 +00001217 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001218 CHECK_VALID_SIZE(outputs.size(), 1);
1219
1220 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1221 auto flatBufferCrops = flatBufferDescriptor->crops();
1222 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1223
1224 if (flatBufferCrops->Length() % 2 != 0)
1225 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001226 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001227 }
1228
1229 std::vector<std::pair<unsigned int, unsigned int>> crops;
1230 crops.reserve(flatBufferCrops->Length() / 2);
1231 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1232 {
1233 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1234 }
1235
1236 armnn::BatchToSpaceNdDescriptor descriptor;
1237 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1238 descriptor.m_BlockShape =
1239 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1240 descriptor.m_Crops = crops;
1241
1242 auto layerName = GetLayerName(graph, layerIndex);
1243 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1244
1245 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1246 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1247
1248 RegisterInputSlots(graph, layerIndex, layer);
1249 RegisterOutputSlots(graph, layerIndex, layer);
1250}
1251
Finn Williams85d36712021-01-26 22:30:06 +00001252void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001253{
1254 CHECK_LAYERS(graph, 0, layerIndex);
1255
1256 auto inputs = GetInputs(graph, layerIndex);
1257 CHECK_VALID_SIZE(inputs.size(), 1);
1258
1259 auto outputs = GetOutputs(graph, layerIndex);
1260 CHECK_VALID_SIZE(outputs.size(), 1);
1261 auto outputInfo = ToTensorInfo(outputs[0]);
1262
ruoyan015c7ab052019-03-04 14:48:02 +00001263 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001264
1265 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1266 auto serializerDescriptor = serializerLayer->descriptor();
1267
1268 armnn::BatchNormalizationDescriptor descriptor;
1269 descriptor.m_Eps = serializerDescriptor->eps();
1270 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1271
1272 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1273 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1274 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1275 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1276
1277 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1278 mean,
1279 variance,
1280 beta,
1281 gamma,
1282 layerName.c_str());
1283 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1284
1285 RegisterInputSlots(graph, layerIndex, layer);
1286 RegisterOutputSlots(graph, layerIndex, layer);
1287}
1288
mathad01b392e982021-04-07 12:07:30 +01001289void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1290{
1291 CHECK_LAYERS(graph, 0, layerIndex);
1292 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1293 CHECK_LOCATION();
1294 CHECK_VALID_SIZE(inputs.size(), 1);
1295
1296 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1297 CHECK_VALID_SIZE(outputs.size(), 1);
1298
1299 auto layerName = GetLayerName(graph, layerIndex);
1300
1301 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1302
1303 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1304 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1305
1306 RegisterInputSlots(graph, layerIndex, layer);
1307 RegisterOutputSlots(graph, layerIndex, layer);
1308}
1309
Finn Williams85d36712021-01-26 22:30:06 +00001310void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001311{
1312 CHECK_LAYERS(graph, 0, layerIndex);
1313 CHECK_LOCATION();
1314
1315 auto outputs = GetOutputs(graph, layerIndex);
1316 CHECK_VALID_SIZE(outputs.size(), 1);
1317
1318 auto layerName = GetLayerName(graph, layerIndex);
1319
1320 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1321 auto serializerInput = serializerLayer->input();
1322
1323 armnn::ConstTensor input = ToConstTensor(serializerInput);
1324
1325 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1326
1327 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1328 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1329
1330 RegisterOutputSlots(graph, layerIndex, layer);
1331}
1332
Finn Williams85d36712021-01-26 22:30:06 +00001333void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001334{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001335 CHECK_LAYERS(graph, 0, layerIndex);
1336 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001337 CHECK_LOCATION();
1338 CHECK_VALID_SIZE(inputs.size(), 1);
1339
Derek Lamberti8ddae332019-02-21 16:29:43 +00001340 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001341 CHECK_VALID_SIZE(outputs.size(), 1);
1342
Derek Lamberti8ddae332019-02-21 16:29:43 +00001343 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001344 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001345 auto serializerDescriptor = serializerLayer->descriptor();
1346
1347 armnn::Convolution2dDescriptor descriptor;
1348 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1349 descriptor.m_PadRight = serializerDescriptor->padRight();
1350 descriptor.m_PadTop = serializerDescriptor->padTop();
1351 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1352 descriptor.m_StrideX = serializerDescriptor->strideX();
1353 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001354 descriptor.m_DilationX = serializerDescriptor->dilationX();
1355 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001356 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1357 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1358
1359 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1360 armnn::ConstTensor biases;
1361
Matteo Martincighfc598e12019-05-14 10:36:13 +01001362 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001363 if (descriptor.m_BiasEnabled)
1364 {
1365 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001366 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001367 }
1368 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1369 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001370 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001371 layerName.c_str());
1372 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1373 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1374
Derek Lamberti8ddae332019-02-21 16:29:43 +00001375 RegisterInputSlots(graph, layerIndex, layer);
1376 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001377}
1378
Finn Williams85d36712021-01-26 22:30:06 +00001379void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001380{
1381 CHECK_LAYERS(graph, 0, layerIndex);
1382
1383 auto inputs = GetInputs(graph, layerIndex);
1384 CHECK_VALID_SIZE(inputs.size(), 1);
1385
1386 auto outputs = GetOutputs(graph, layerIndex);
1387 CHECK_VALID_SIZE(outputs.size(), 1);
1388
1389 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1390
1391 armnn::DepthToSpaceDescriptor descriptor;
1392 descriptor.m_BlockSize = fbDescriptor->blockSize();
1393 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1394
1395 auto layerName = GetLayerName(graph, layerIndex);
1396 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1397
1398 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1399 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1400
1401 RegisterInputSlots(graph, layerIndex, layer);
1402 RegisterOutputSlots(graph, layerIndex, layer);
1403}
1404
Finn Williams85d36712021-01-26 22:30:06 +00001405void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001406{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001407 CHECK_LAYERS(graph, 0, layerIndex);
1408 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001409 CHECK_LOCATION();
1410 CHECK_VALID_SIZE(inputs.size(), 1);
1411
Derek Lamberti8ddae332019-02-21 16:29:43 +00001412 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001413 CHECK_VALID_SIZE(outputs.size(), 1);
1414
Derek Lamberti8ddae332019-02-21 16:29:43 +00001415 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001416 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001417 auto serializerDescriptor = serializerLayer->descriptor();
1418
1419 armnn::DepthwiseConvolution2dDescriptor descriptor;
1420 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1421 descriptor.m_PadRight = serializerDescriptor->padRight();
1422 descriptor.m_PadTop = serializerDescriptor->padTop();
1423 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1424 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001425 descriptor.m_StrideY = serializerDescriptor->strideY();
1426 descriptor.m_DilationX = serializerDescriptor->dilationX();
1427 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001428 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1429 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1430
Jan Eilers53ef7952021-06-02 12:01:25 +01001431 IConnectableLayer* layer;
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001432
Matteo Martincighfc598e12019-05-14 10:36:13 +01001433 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001434 if (descriptor.m_BiasEnabled)
1435 {
Jan Eilers53ef7952021-06-02 12:01:25 +01001436 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001437 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001438 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001439
1440 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1441 // The data layout for weights in ArmNN used to be [M,I,H,W] but now it's changed to [1,H,W,I*M]
1442 // When reading older flatbuffer files we need to add a permutation to get to the new layout.
1443 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1444 {
1445 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1446 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1447 PermutationVector permutationVector = { 3, 2, 0, 1 };
1448 armnn::TensorInfo weightsInfo = weights.GetInfo();
1449 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1450 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1451 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1452 weights.GetMemoryArea(), permuteBuffer.get(),
1453 GetDataTypeSize(weightsInfo.GetDataType()));
1454
1455 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1456 auto weightsShape = weightsInfo.GetShape();
1457 weightsInfo.SetShape({1,
1458 weightsShape[0],
1459 weightsShape[1],
1460 weightsShape[2]*weightsShape[3]});
1461
1462 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1463
1464 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1465 weightsPermuted,
1466 optionalBiases,
1467 layerName.c_str());
1468 }
1469 else
1470 {
1471 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1472 weights,
1473 optionalBiases,
1474 layerName.c_str());
1475 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001476
1477 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1478 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1479
Derek Lamberti8ddae332019-02-21 16:29:43 +00001480 RegisterInputSlots(graph, layerIndex, layer);
1481 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001482}
1483
Finn Williams85d36712021-01-26 22:30:06 +00001484void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001485{
1486 CHECK_LAYERS(graph, 0, layerIndex);
1487 auto inputs = GetInputs(graph, layerIndex);
1488 CHECK_LOCATION();
1489 CHECK_VALID_SIZE(inputs.size(), 2);
1490
1491 auto outputs = GetOutputs(graph, layerIndex);
1492 CHECK_VALID_SIZE(outputs.size(), 4);
1493
1494 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1495 auto layerName = GetLayerName(graph, layerIndex);
1496 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1497
1498 armnn::DetectionPostProcessDescriptor descriptor;
1499 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1500 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1501 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1502 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1503 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1504 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1505 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1506 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1507 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1508 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1509 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1510
1511 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1512
1513 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1514 anchors,
1515 layerName.c_str());
1516
1517 for (unsigned int i = 0; i < 4; i++)
1518 {
1519 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1520 }
1521
1522 RegisterInputSlots(graph, layerIndex, layer);
1523 RegisterOutputSlots(graph, layerIndex, layer);
1524}
1525
Finn Williams85d36712021-01-26 22:30:06 +00001526void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001527{
1528 CHECK_LAYERS(graph, 0, layerIndex);
1529 auto inputs = GetInputs(graph, layerIndex);
1530 CHECK_LOCATION();
1531 CHECK_VALID_SIZE(inputs.size(), 2);
1532
1533 auto outputs = GetOutputs(graph, layerIndex);
1534 CHECK_VALID_SIZE(outputs.size(), 1);
1535
1536 auto layerName = GetLayerName(graph, layerIndex);
1537 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1538
1539 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1540 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1541
1542 RegisterInputSlots(graph, layerIndex, layer);
1543 RegisterOutputSlots(graph, layerIndex, layer);
1544}
1545
Finn Williams85d36712021-01-26 22:30:06 +00001546void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001547{
1548 CHECK_LAYERS(graph, 0, layerIndex);
1549 auto inputs = GetInputs(graph, layerIndex);
1550 CHECK_LOCATION();
1551 CHECK_VALID_SIZE(inputs.size(), 2);
1552
1553 auto outputs = GetOutputs(graph, layerIndex);
1554 CHECK_VALID_SIZE(outputs.size(), 1);
1555
1556 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001557 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1558 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001559
1560 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1561 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1562
1563 RegisterInputSlots(graph, layerIndex, layer);
1564 RegisterOutputSlots(graph, layerIndex, layer);
1565}
1566
Finn Williams85d36712021-01-26 22:30:06 +00001567void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001568{
1569 CHECK_LAYERS(graph, 0, layerIndex);
1570 auto inputs = GetInputs(graph, layerIndex);
1571 CHECK_LOCATION();
1572 CHECK_VALID_SIZE(inputs.size(), 1);
1573
1574 auto outputs = GetOutputs(graph, layerIndex);
1575 CHECK_VALID_SIZE(outputs.size(), 1);
1576
1577 auto layerName = GetLayerName(graph, layerIndex);
1578 armnn::FillDescriptor descriptor(1.0f);
1579 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1580
1581 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1582 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1583
1584 RegisterInputSlots(graph, layerIndex, layer);
1585 RegisterOutputSlots(graph, layerIndex, layer);
1586}
1587
Finn Williams85d36712021-01-26 22:30:06 +00001588void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001589{
1590 CHECK_LAYERS(graph, 0, layerIndex);
1591 auto inputs = GetInputs(graph, layerIndex);
1592 CHECK_LOCATION();
1593 CHECK_VALID_SIZE(inputs.size(), 2);
1594
1595 auto outputs = GetOutputs(graph, layerIndex);
1596 CHECK_VALID_SIZE(outputs.size(), 1);
1597
1598 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001599 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1600 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001601
1602 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1603 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1604
1605 RegisterInputSlots(graph, layerIndex, layer);
1606 RegisterOutputSlots(graph, layerIndex, layer);
1607}
1608
Finn Williams85d36712021-01-26 22:30:06 +00001609void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001610{
1611 CHECK_LAYERS(graph, 0, layerIndex);
1612
1613 auto inputs = GetInputs(graph, layerIndex);
1614 CHECK_VALID_SIZE(inputs.size(), 1);
1615
1616 auto outputs = GetOutputs(graph, layerIndex);
1617 CHECK_VALID_SIZE(outputs.size(), 1);
1618
1619 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1620 auto fbDescriptor = fbLayer->descriptor();
1621
1622 armnn::InstanceNormalizationDescriptor descriptor;
1623 descriptor.m_Gamma = fbDescriptor->gamma();
1624 descriptor.m_Beta = fbDescriptor->beta();
1625 descriptor.m_Eps = fbDescriptor->eps();
1626 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1627
1628 const std::string layerName = GetLayerName(graph, layerIndex);
1629 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1630
1631 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1632 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1633
1634 RegisterInputSlots(graph, layerIndex, layer);
1635 RegisterOutputSlots(graph, layerIndex, layer);
1636}
1637
Finn Williams85d36712021-01-26 22:30:06 +00001638void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001639{
1640 CHECK_LAYERS(graph, 0, layerIndex);
1641
1642 auto inputs = GetInputs(graph, layerIndex);
1643 CHECK_VALID_SIZE(inputs.size(), 1);
1644
1645 auto outputs = GetOutputs(graph, layerIndex);
1646 CHECK_VALID_SIZE(outputs.size(), 1);
1647 auto outputInfo = ToTensorInfo(outputs[0]);
1648
1649 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1650 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1651
1652 auto layerName = GetLayerName(graph, layerIndex);
1653 armnn::L2NormalizationDescriptor descriptor;
1654 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001655 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001656
1657 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1658 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1659
1660 RegisterInputSlots(graph, layerIndex, layer);
1661 RegisterOutputSlots(graph, layerIndex, layer);
1662}
1663
Finn Williams85d36712021-01-26 22:30:06 +00001664void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001665{
1666 CHECK_LAYERS(graph, 0, layerIndex);
1667 CHECK_LOCATION();
1668
1669 auto inputs = GetInputs(graph, layerIndex);
1670 CHECK_VALID_SIZE(inputs.size(), 2);
1671
1672 auto outputs = GetOutputs(graph, layerIndex);
1673 CHECK_VALID_SIZE(outputs.size(), 1);
1674
1675 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1676 auto fbDescriptor = fbLayer->descriptor();
1677
1678 armnn::LogicalBinaryDescriptor descriptor;
1679 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1680
1681 const std::string& layerName = GetLayerName(graph, layerIndex);
1682 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1683
1684 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1685 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1686
1687 RegisterInputSlots(graph, layerIndex, layer);
1688 RegisterOutputSlots(graph, layerIndex, layer);
1689}
1690
Finn Williams85d36712021-01-26 22:30:06 +00001691void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001692{
1693 CHECK_LAYERS(graph, 0, layerIndex);
1694
Finn Williams85d36712021-01-26 22:30:06 +00001695 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001696 CHECK_VALID_SIZE(inputs.size(), 1);
1697
Finn Williams85d36712021-01-26 22:30:06 +00001698 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001699 CHECK_VALID_SIZE(outputs.size(), 1);
1700
1701 armnn::LogSoftmaxDescriptor descriptor;
1702 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1703 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1704 auto layerName = GetLayerName(graph, layerIndex);
1705
1706 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1707
1708 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1709 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1710
1711 RegisterInputSlots(graph, layerIndex, layer);
1712 RegisterOutputSlots(graph, layerIndex, layer);
1713}
1714
Finn Williams85d36712021-01-26 22:30:06 +00001715void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001716{
1717 CHECK_LAYERS(graph, 0, layerIndex);
1718 auto inputs = GetInputs(graph, layerIndex);
1719 CHECK_LOCATION();
1720 CHECK_VALID_SIZE(inputs.size(), 2);
1721
1722 auto outputs = GetOutputs(graph, layerIndex);
1723 CHECK_VALID_SIZE(outputs.size(), 1);
1724
1725 auto layerName = GetLayerName(graph, layerIndex);
1726 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1727
1728 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1729 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1730
1731 RegisterInputSlots(graph, layerIndex, layer);
1732 RegisterOutputSlots(graph, layerIndex, layer);
1733}
1734
Finn Williams85d36712021-01-26 22:30:06 +00001735void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001736{
1737 CHECK_LAYERS(graph, 0, layerIndex);
1738 auto inputs = GetInputs(graph, layerIndex);
1739 CHECK_LOCATION();
1740 CHECK_VALID_SIZE(inputs.size(), 2);
1741
1742 auto outputs = GetOutputs(graph, layerIndex);
1743 CHECK_VALID_SIZE(outputs.size(), 1);
1744
1745 auto layerName = GetLayerName(graph, layerIndex);
1746 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1747
1748 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1749 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1750
1751 RegisterInputSlots(graph, layerIndex, layer);
1752 RegisterOutputSlots(graph, layerIndex, layer);
1753}
1754
Jim Flynne242f2d2019-05-22 14:24:13 +01001755const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1756 unsigned int layerIndex)
1757{
1758 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1759
1760 switch (layerType)
1761 {
1762 case Layer::Layer_ConcatLayer:
1763 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1764 case Layer::Layer_MergerLayer:
1765 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1766 default:
1767 throw armnn::Exception("unknown layer type, should be concat or merger");
1768 }
1769}
1770
Finn Williams85d36712021-01-26 22:30:06 +00001771void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001772{
1773 CHECK_LAYERS(graph, 0, layerIndex);
1774 CHECK_LOCATION();
1775
1776 auto inputs = GetInputs(graph, layerIndex);
1777 CHECK_VALID_SIZE(inputs.size(), 2);
1778
1779 auto outputs = GetOutputs(graph, layerIndex);
1780 CHECK_VALID_SIZE(outputs.size(), 1);
1781
1782 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1783 auto fbDescriptor = fbLayer->descriptor();
1784
1785 armnn::ComparisonDescriptor descriptor;
1786 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1787
1788 const std::string& layerName = GetLayerName(graph, layerIndex);
1789 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1790
1791 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1792 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1793
1794 RegisterInputSlots(graph, layerIndex, layer);
1795 RegisterOutputSlots(graph, layerIndex, layer);
1796}
1797
Finn Williams85d36712021-01-26 22:30:06 +00001798void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06001799{
1800 CHECK_LAYERS(graph, 0, layerIndex);
1801 CHECK_LOCATION();
1802
1803 auto inputs = GetInputs(graph, layerIndex);
1804 CHECK_VALID_SIZE(inputs.size(), 1);
1805
1806 auto outputs = GetOutputs(graph, layerIndex);
1807 CHECK_VALID_SIZE(outputs.size(), 1);
1808
1809 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1810 auto fbDescriptor = fbLayer->descriptor();
1811
1812 armnn::ElementwiseUnaryDescriptor descriptor;
1813 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1814
1815 const std::string& layerName = GetLayerName(graph, layerIndex);
1816 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1817
1818 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1819 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1820
1821 RegisterInputSlots(graph, layerIndex, layer);
1822 RegisterOutputSlots(graph, layerIndex, layer);
1823}
1824
Finn Williams85d36712021-01-26 22:30:06 +00001825void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001826{
1827 CHECK_LAYERS(graph, 0, layerIndex);
1828 CHECK_LOCATION();
1829
1830 auto outputs = GetOutputs(graph, layerIndex);
1831 CHECK_VALID_SIZE(outputs.size(), 1);
1832
Jim Flynnac25a1b2019-02-28 10:40:49 +00001833 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001834 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1835 unsigned int numViews = originsDescriptor->numViews();
1836 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001837
1838 // can now check the number of inputs == number of views
1839 auto inputs = GetInputs(graph, layerIndex);
1840 CHECK_VALID_SIZE(inputs.size(), numViews);
1841
1842 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001843 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001844 for (unsigned int v = 0; v < numViews; ++v)
1845 {
1846 auto originPtr = originsPtr->Get(v);
1847 for (unsigned int d = 0; d < numDimensions; ++d)
1848 {
1849 uint32_t value = originPtr->data()->Get(d);
1850 descriptor.SetViewOriginCoord(v, d, value);
1851 }
1852 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001853 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001854
Jim Flynn906f9462019-05-10 13:55:21 +01001855 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001856 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1857 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1858
1859 RegisterInputSlots(graph, layerIndex, layer);
1860 RegisterOutputSlots(graph, layerIndex, layer);
1861}
1862
Finn Williams85d36712021-01-26 22:30:06 +00001863void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001864{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001865 CHECK_LAYERS(graph, 0, layerIndex);
1866 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001867 CHECK_LOCATION();
1868 CHECK_VALID_SIZE(inputs.size(), 2);
1869
Derek Lamberti8ddae332019-02-21 16:29:43 +00001870 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001871 CHECK_VALID_SIZE(outputs.size(), 1);
1872
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001873 auto layerName = GetLayerName(graph, layerIndex);
1874 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001875
1876 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1877 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1878
Derek Lamberti8ddae332019-02-21 16:29:43 +00001879 RegisterInputSlots(graph, layerIndex, layer);
1880 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001881}
1882
Finn Williams85d36712021-01-26 22:30:06 +00001883void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001884{
1885 CHECK_LAYERS(graph, 0, layerIndex);
1886 CHECK_LOCATION();
1887
1888 auto inputs = GetInputs(graph, layerIndex);
1889 CHECK_VALID_SIZE(inputs.size(), 1);
1890
1891 auto outputs = GetOutputs(graph, layerIndex);
1892 CHECK_VALID_SIZE(outputs.size(), 1);
1893
1894 auto layerName = GetLayerName(graph, layerIndex);
1895
1896 armnn::IConnectableLayer* layer;
1897
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001898 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001899
1900 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1901 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1902
1903 RegisterInputSlots(graph, layerIndex, layer);
1904 RegisterOutputSlots(graph, layerIndex, layer);
1905}
1906
Finn Williams85d36712021-01-26 22:30:06 +00001907void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001908{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001909 CHECK_LAYERS(graph, 0, layerIndex);
1910 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001911 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001912
Derek Lamberti8ddae332019-02-21 16:29:43 +00001913 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001914 CHECK_VALID_SIZE(outputs.size(), 1);
1915
Derek Lamberti8ddae332019-02-21 16:29:43 +00001916 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001917 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001918 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1919
1920 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1921 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1922 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001923 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
1924 uint32_t numInputs = 1;
1925 if (!fullyConnectedDescriptor.m_ConstantWeights)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001926 {
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001927 numInputs = 2;
1928 if (fullyConnectedDescriptor.m_BiasEnabled)
1929 {
1930 numInputs = 3;
1931 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001932 }
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001933 CHECK_VALID_SIZE(inputs.size(), numInputs);
1934
1935 armnn::Optional <armnn::ConstTensor> optionalWeights = armnn::EmptyOptional();
1936 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1937 if (fullyConnectedDescriptor.m_ConstantWeights)
1938 {
1939 armnn::ConstTensor weightsTensorData = ToConstTensor(flatBufferLayer->weights());
1940 optionalWeights = armnn::Optional<armnn::ConstTensor>(weightsTensorData);
1941
1942 if (flatBufferDescriptor->biasEnabled())
1943 {
1944 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1945 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
1946 }
1947 }
1948
1949 armnn::IConnectableLayer* layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1950 optionalWeights,
1951 optionalBiases,
1952 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001953
1954 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1955 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1956
Derek Lamberti8ddae332019-02-21 16:29:43 +00001957 RegisterInputSlots(graph, layerIndex, layer);
1958 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001959}
1960
Finn Williams85d36712021-01-26 22:30:06 +00001961void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001962{
1963 CHECK_LAYERS(graph, 0, layerIndex);
1964
Finn Williams85d36712021-01-26 22:30:06 +00001965 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001966 CHECK_VALID_SIZE(inputs.size(), 1);
1967
Finn Williams85d36712021-01-26 22:30:06 +00001968 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001969 CHECK_VALID_SIZE(outputs.size(), 1);
1970
1971 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1972 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001973 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001974
1975 if (flatBufferPadList->Length() % 2 != 0)
1976 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001977 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
1978 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001979 }
1980
1981 std::vector<std::pair<unsigned int, unsigned int>> padList;
1982 padList.reserve(flatBufferPadList->Length() / 2);
1983 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1984 {
1985 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1986 }
1987
David Monahan34757812019-06-19 11:47:21 +01001988 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001989
1990 auto layerName = GetLayerName(graph, layerIndex);
1991 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1992
1993 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1994 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1995
1996 RegisterInputSlots(graph, layerIndex, layer);
1997 RegisterOutputSlots(graph, layerIndex, layer);
1998}
1999
Finn Williams85d36712021-01-26 22:30:06 +00002000void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002001{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002002 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002003
2004 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002005 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002006
Derek Lamberti8ddae332019-02-21 16:29:43 +00002007 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002008 CHECK_VALID_SIZE(inputs.size(), 1);
2009
Derek Lamberti8ddae332019-02-21 16:29:43 +00002010 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002011 CHECK_VALID_SIZE(outputs.size(), 1);
2012 auto outputInfo = ToTensorInfo(outputs[0]);
2013
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002014 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002015 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2016
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002017 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002018 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2019
Derek Lamberti8ddae332019-02-21 16:29:43 +00002020 RegisterInputSlots(graph, layerIndex, layer);
2021 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002022}
2023
Finn Williams85d36712021-01-26 22:30:06 +00002024armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPoolingDescriptor(PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002025 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002026{
Jan Eilers8eb25602020-03-09 12:13:48 +00002027 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002028 armnn::Pooling2dDescriptor desc;
2029
2030 switch (pooling2dDesc->poolType())
2031 {
2032 case PoolingAlgorithm_Average:
2033 {
2034 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002035 break;
2036 }
2037 case PoolingAlgorithm_Max:
2038 {
2039 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002040 break;
2041 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002042 case PoolingAlgorithm_L2:
2043 {
2044 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2045 break;
2046 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002047 default:
2048 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002049 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002050 }
2051 }
2052
2053 switch (pooling2dDesc->outputShapeRounding())
2054 {
2055 case OutputShapeRounding_Floor:
2056 {
2057 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2058 break;
2059 }
2060 case OutputShapeRounding_Ceiling:
2061 {
2062 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2063 break;
2064 }
2065 default:
2066 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002067 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002068 }
2069 }
2070
2071 switch (pooling2dDesc->paddingMethod())
2072 {
2073 case PaddingMethod_Exclude:
2074 {
2075 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2076 break;
2077 }
2078 case PaddingMethod_IgnoreValue:
2079 {
2080 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2081 break;
2082 }
2083 default:
2084 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002085 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002086 }
2087 }
2088
2089 switch (pooling2dDesc->dataLayout())
2090 {
2091 case DataLayout_NCHW:
2092 {
2093 desc.m_DataLayout = armnn::DataLayout::NCHW;
2094 break;
2095 }
2096 case DataLayout_NHWC:
2097 {
2098 desc.m_DataLayout = armnn::DataLayout::NHWC;
2099 break;
2100 }
2101 default:
2102 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002103 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002104 }
2105 }
2106
2107 desc.m_PadRight = pooling2dDesc->padRight();
2108 desc.m_PadLeft = pooling2dDesc->padLeft();
2109 desc.m_PadBottom = pooling2dDesc->padBottom();
2110 desc.m_PadTop = pooling2dDesc->padTop();
2111 desc.m_StrideX = pooling2dDesc->strideX();
2112 desc.m_StrideY = pooling2dDesc->strideY();
2113 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2114 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2115
2116 return desc;
2117}
2118
Finn Williams85d36712021-01-26 22:30:06 +00002119
2120
2121void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002122{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002123 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002124
Derek Lamberti8ddae332019-02-21 16:29:43 +00002125 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002126 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002127 CHECK_VALID_SIZE(inputs.size(), 1);
2128
Derek Lamberti8ddae332019-02-21 16:29:43 +00002129 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002130 CHECK_VALID_SIZE(outputs.size(), 1);
2131 auto outputInfo = ToTensorInfo(outputs[0]);
2132
2133 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002134 auto layerName = GetLayerName(graph, layerIndex);
2135 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002136 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2137
Derek Lamberti8ddae332019-02-21 16:29:43 +00002138 RegisterInputSlots(graph, layerIndex, layer);
2139 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002140}
2141
Finn Williams85d36712021-01-26 22:30:06 +00002142void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002143{
2144 CHECK_LAYERS(graph, 0, layerIndex);
2145
2146 auto inputs = GetInputs(graph, layerIndex);
2147 CHECK_VALID_SIZE(inputs.size(), 1);
2148
2149 auto outputs = GetOutputs(graph, layerIndex);
2150 CHECK_VALID_SIZE(outputs.size(), 1);
2151 auto outputInfo = ToTensorInfo(outputs[0]);
2152
2153 auto layerName = GetLayerName(graph, layerIndex);
2154 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2155 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2156
2157 RegisterInputSlots(graph, layerIndex, layer);
2158 RegisterOutputSlots(graph, layerIndex, layer);
2159}
2160
Finn Williams85d36712021-01-26 22:30:06 +00002161armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002162 const std::vector<uint32_t>& targetDimsIn)
2163{
2164 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2165 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2166
2167 if (stretchDim != targetDimsIn.end())
2168 {
2169 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2170 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002171 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2172 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002173 }
2174
2175 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002176 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002177 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2178
2179 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2180 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2181 }
2182
2183 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2184
2185 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2186 reshapeInfo.SetShape(outputShape);
2187
2188 return reshapeInfo;
2189}
2190
Finn Williams85d36712021-01-26 22:30:06 +00002191void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002192{
2193 CHECK_LAYERS(graph, 0, layerIndex);
2194
Finn Williams85d36712021-01-26 22:30:06 +00002195 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002196 CHECK_VALID_SIZE(inputs.size(), 1);
2197
Finn Williams85d36712021-01-26 22:30:06 +00002198 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002199 CHECK_VALID_SIZE(outputs.size(), 1);
2200
2201 auto layerName = GetLayerName(graph, layerIndex);
2202 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2203
2204 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2205 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2206
2207 RegisterInputSlots(graph, layerIndex, layer);
2208 RegisterOutputSlots(graph, layerIndex, layer);
2209}
2210
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002211void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2212{
2213 CHECK_LAYERS(graph, 0, layerIndex);
2214 CHECK_LOCATION();
2215
2216 auto inputs = GetInputs(graph, layerIndex);
2217 CHECK_VALID_SIZE(inputs.size(), 1);
2218
2219 auto outputs = GetOutputs(graph, layerIndex);
2220 CHECK_VALID_SIZE(outputs.size(), 1);
2221
2222 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2223 auto fbDescriptor = fbLayer->descriptor();
2224 auto flatBufferAxis = fbDescriptor->axis();
2225
2226 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002227 descriptor.m_KeepDims = fbDescriptor->keepDims();
2228 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2229 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2230
2231 const std::string& layerName = GetLayerName(graph, layerIndex);
2232 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2233
2234 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2235 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2236
2237 RegisterInputSlots(graph, layerIndex, layer);
2238 RegisterOutputSlots(graph, layerIndex, layer);
2239}
2240
Finn Williams85d36712021-01-26 22:30:06 +00002241void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002242{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002243 CHECK_LAYERS(graph, 0, layerIndex);
2244 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002245
Derek Lamberti8ddae332019-02-21 16:29:43 +00002246 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002247 CHECK_VALID_SIZE(outputs.size(), 1);
2248
2249 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2250 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2251
Derek Lamberti8ddae332019-02-21 16:29:43 +00002252 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002253 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2254
Finn Williams85d36712021-01-26 22:30:06 +00002255 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002256 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2257
2258 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2259 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2260
2261 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2262 {
2263 std::stringstream ss;
2264 ss << "New shape defined in reshape parameters "
2265 << reshapeOutputTensorShape
2266 << " does not equal output shape "
2267 << actualOutputTensorInfo.GetShape()
2268 << ": "
2269 << CHECK_LOCATION().AsString();
2270 throw ParseException(ss.str());
2271 }
2272
2273 armnn::ReshapeDescriptor reshapeDesc;
2274 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2275
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002276 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002277 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2278 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2279
Derek Lamberti8ddae332019-02-21 16:29:43 +00002280 RegisterInputSlots(graph, layerIndex, layer);
2281 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002282}
2283
Finn Williams85d36712021-01-26 22:30:06 +00002284void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002285{
2286 CHECK_LAYERS(graph, 0, layerIndex);
2287
Finn Williams85d36712021-01-26 22:30:06 +00002288 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002289 CHECK_VALID_SIZE(inputs.size(), 1);
2290
Finn Williams85d36712021-01-26 22:30:06 +00002291 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002292 CHECK_VALID_SIZE(outputs.size(), 1);
2293
2294 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2295
2296 armnn::ResizeDescriptor descriptor;
2297 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2298 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2299 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2300 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002301 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2302 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002303
2304 auto layerName = GetLayerName(graph, layerIndex);
2305 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2306
2307 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2308 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2309
2310 RegisterInputSlots(graph, layerIndex, layer);
2311 RegisterOutputSlots(graph, layerIndex, layer);
2312}
2313
Finn Williams85d36712021-01-26 22:30:06 +00002314void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002315{
2316 CHECK_LAYERS(graph, 0, layerIndex);
2317
Finn Williams85d36712021-01-26 22:30:06 +00002318 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002319 CHECK_VALID_SIZE(inputs.size(), 1);
2320
Finn Williams85d36712021-01-26 22:30:06 +00002321 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002322 CHECK_VALID_SIZE(outputs.size(), 1);
2323
2324 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2325
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002326 armnn::ResizeDescriptor descriptor;
2327 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002328 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002329 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2330 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002331 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2332 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002333
2334 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002335 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002336
2337 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2338 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2339
2340 RegisterInputSlots(graph, layerIndex, layer);
2341 RegisterOutputSlots(graph, layerIndex, layer);
2342}
2343
Keith Davis3ae3f972021-05-21 16:33:48 +01002344void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2345{
2346 CHECK_LAYERS(graph, 0, layerIndex);
2347
2348 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2349 CHECK_VALID_SIZE(inputs.size(), 1);
2350
2351 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2352 CHECK_VALID_SIZE(outputs.size(), 1);
2353
2354 auto layerName = GetLayerName(graph, layerIndex);
2355 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2356
2357 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2358 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2359
2360 RegisterInputSlots(graph, layerIndex, layer);
2361 RegisterOutputSlots(graph, layerIndex, layer);
2362}
2363
Finn Williams85d36712021-01-26 22:30:06 +00002364void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002365{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002366 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002367
Finn Williams85d36712021-01-26 22:30:06 +00002368 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002369 CHECK_VALID_SIZE(inputs.size(), 1);
2370
Finn Williams85d36712021-01-26 22:30:06 +00002371 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002372 CHECK_VALID_SIZE(outputs.size(), 1);
2373
2374 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002375 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002376 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002377
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002378 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2379
2380 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2381 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2382
Derek Lamberti8ddae332019-02-21 16:29:43 +00002383 RegisterInputSlots(graph, layerIndex, layer);
2384 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002385}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002386
Finn Williams85d36712021-01-26 22:30:06 +00002387void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002388{
2389 CHECK_LAYERS(graph, 0, layerIndex);
2390
Finn Williams85d36712021-01-26 22:30:06 +00002391 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002392 CHECK_VALID_SIZE(inputs.size(), 1);
2393
Finn Williams85d36712021-01-26 22:30:06 +00002394 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002395 CHECK_VALID_SIZE(outputs.size(), 1);
2396
2397 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2398 auto flatBufferPadList = flatBufferDescriptor->padList();
2399 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2400
2401 if (flatBufferPadList->Length() % 2 != 0)
2402 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002403 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2404 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002405 }
2406
2407 std::vector<std::pair<unsigned int, unsigned int>> padList;
2408 padList.reserve(flatBufferPadList->Length() / 2);
2409 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2410 {
2411 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2412 }
2413
2414 armnn::SpaceToBatchNdDescriptor descriptor;
2415 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2416 descriptor.m_BlockShape =
2417 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2418 descriptor.m_PadList = padList;
2419
2420 auto layerName = GetLayerName(graph, layerIndex);
2421 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2422
2423 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2424 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2425
2426 RegisterInputSlots(graph, layerIndex, layer);
2427 RegisterOutputSlots(graph, layerIndex, layer);
2428}
2429
Finn Williams85d36712021-01-26 22:30:06 +00002430void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002431{
2432 CHECK_LAYERS(graph, 0, layerIndex);
2433
Finn Williams85d36712021-01-26 22:30:06 +00002434 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002435 CHECK_VALID_SIZE(inputs.size(), 1);
2436
Finn Williams85d36712021-01-26 22:30:06 +00002437 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002438 CHECK_VALID_SIZE(outputs.size(), 1);
2439
2440 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2441
2442 armnn::SpaceToDepthDescriptor descriptor;
2443 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2444 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2445
2446 auto layerName = GetLayerName(graph, layerIndex);
2447 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2448
2449 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2450 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2451
2452 RegisterInputSlots(graph, layerIndex, layer);
2453 RegisterOutputSlots(graph, layerIndex, layer);
2454}
2455
Finn Williams85d36712021-01-26 22:30:06 +00002456armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2457 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002458 unsigned int layerIndex)
2459{
Jan Eilers8eb25602020-03-09 12:13:48 +00002460 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002461 armnn::NormalizationDescriptor desc;
2462
2463 switch (normalizationDescriptor->normChannelType())
2464 {
2465 case NormalizationAlgorithmChannel_Across:
2466 {
2467 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2468 break;
2469 }
2470 case NormalizationAlgorithmChannel_Within:
2471 {
2472 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2473 break;
2474 }
2475 default:
2476 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002477 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002478 }
2479 }
2480
2481 switch (normalizationDescriptor->normMethodType())
2482 {
2483 case NormalizationAlgorithmMethod_LocalBrightness:
2484 {
2485 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2486 break;
2487 }
2488 case NormalizationAlgorithmMethod_LocalContrast:
2489 {
2490 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2491 break;
2492 }
2493 default:
2494 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002495 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002496 }
2497 }
2498
2499 switch (normalizationDescriptor->dataLayout())
2500 {
2501 case DataLayout_NCHW:
2502 {
2503 desc.m_DataLayout = armnn::DataLayout::NCHW;
2504 break;
2505 }
2506 case DataLayout_NHWC:
2507 {
2508 desc.m_DataLayout = armnn::DataLayout::NHWC;
2509 break;
2510 }
2511 default:
2512 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002513 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002514 }
2515 }
2516
2517 desc.m_Alpha = normalizationDescriptor->alpha();
2518 desc.m_Beta = normalizationDescriptor->beta();
2519 desc.m_K = normalizationDescriptor->k();
2520 desc.m_NormSize = normalizationDescriptor->normSize();
2521
2522 return desc;
2523}
2524
Finn Williams85d36712021-01-26 22:30:06 +00002525void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002526{
2527 CHECK_LAYERS(graph, 0, layerIndex);
2528
2529 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2530
Finn Williams85d36712021-01-26 22:30:06 +00002531 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002532 CHECK_VALID_SIZE(inputs.size(), 1);
2533
Finn Williams85d36712021-01-26 22:30:06 +00002534 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002535 CHECK_VALID_SIZE(outputs.size(), 1);
2536
2537 auto outputInfo = ToTensorInfo(outputs[0]);
2538
2539 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2540 auto layerName = GetLayerName(graph, layerIndex);
2541
2542 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2543 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2544
2545 RegisterInputSlots(graph, layerIndex, layer);
2546 RegisterOutputSlots(graph, layerIndex, layer);
2547}
2548
Finn Williams85d36712021-01-26 22:30:06 +00002549void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002550{
2551 CHECK_LAYERS(graph, 0, layerIndex);
2552 auto inputs = GetInputs(graph, layerIndex);
2553 CHECK_LOCATION();
2554 CHECK_VALID_SIZE(inputs.size(), 1);
2555
2556 auto outputs = GetOutputs(graph, layerIndex);
2557 CHECK_VALID_SIZE(outputs.size(), 1);
2558
2559 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002560
josh minor4a3c6102020-01-06 16:40:46 -06002561 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2562 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002563 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2564 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2565
2566 RegisterInputSlots(graph, layerIndex, layer);
2567 RegisterOutputSlots(graph, layerIndex, layer);
2568}
2569
Finn Williams85d36712021-01-26 22:30:06 +00002570void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002571{
2572 CHECK_LAYERS(graph, 0, layerIndex);
2573
2574 auto inputs = GetInputs(graph, layerIndex);
2575 CHECK_VALID_SIZE(inputs.size(), 1);
2576
2577 auto outputs = GetOutputs(graph, layerIndex);
2578 CHECK_VALID_SIZE(outputs.size(), 1);
2579
2580 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2581
2582 auto fbBegin = fbDescriptor->begin();
2583 auto fbSize = fbDescriptor->size();
2584
2585 if (fbBegin->Length() != fbSize->Length())
2586 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002587 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2588 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002589 }
2590
2591 armnn::SliceDescriptor descriptor;
2592 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2593 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2594
2595 auto layerName = GetLayerName(graph, layerIndex);
2596 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2597
2598 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2599 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2600
2601 RegisterInputSlots(graph, layerIndex, layer);
2602 RegisterOutputSlots(graph, layerIndex, layer);
2603}
2604
Finn Williams85d36712021-01-26 22:30:06 +00002605void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002606{
2607 CHECK_LAYERS(graph, 0, layerIndex);
2608
Finn Williams85d36712021-01-26 22:30:06 +00002609 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002610 CHECK_VALID_SIZE(inputs.size(), 1);
2611
Finn Williams85d36712021-01-26 22:30:06 +00002612 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002613 CHECK_VALID_SIZE(outputs.size(), 1);
2614
2615 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2616
2617 auto flatBufferBegin = flatBufferDescriptor->begin();
2618 auto flatBufferEnd = flatBufferDescriptor->end();
2619 auto flatBufferStride = flatBufferDescriptor->stride();
2620
2621 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2622 flatBufferBegin->Length() == flatBufferStride->Length()))
2623 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002624 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2625 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002626 }
2627
2628 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2629 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2630 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2631
2632 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2633 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2634 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2635 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2636 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2637 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2638 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2639
2640 auto layerName = GetLayerName(graph, layerIndex);
2641 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2642
2643 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2644 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2645
2646 RegisterInputSlots(graph, layerIndex, layer);
2647 RegisterOutputSlots(graph, layerIndex, layer);
2648}
2649
Finn Williams85d36712021-01-26 22:30:06 +00002650void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002651{
2652 CHECK_LAYERS(graph, 0, layerIndex);
2653 auto inputs = GetInputs(graph, layerIndex);
2654 CHECK_LOCATION();
2655 CHECK_VALID_SIZE(inputs.size(), 2);
2656
2657 auto outputs = GetOutputs(graph, layerIndex);
2658 CHECK_VALID_SIZE(outputs.size(), 1);
2659
2660 auto layerName = GetLayerName(graph, layerIndex);
2661 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2662
2663 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2664 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2665
2666 RegisterInputSlots(graph, layerIndex, layer);
2667 RegisterOutputSlots(graph, layerIndex, layer);
2668}
2669
Finn Williams85d36712021-01-26 22:30:06 +00002670void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002671{
2672 CHECK_LAYERS(graph, 0, layerIndex);
2673
Finn Williams85d36712021-01-26 22:30:06 +00002674 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002675 CHECK_VALID_SIZE(inputs.size(), 2);
2676
Finn Williams85d36712021-01-26 22:30:06 +00002677 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002678 CHECK_VALID_SIZE(outputs.size(), 1);
2679
Teresa Charlin52664732020-06-29 16:27:03 +01002680 armnn::GatherDescriptor descriptor;
2681 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2682
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002683 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002684 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002685
2686 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002687 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2688
2689 RegisterInputSlots(graph, layerIndex, layer);
2690 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002691}
2692
Finn Williams85d36712021-01-26 22:30:06 +00002693void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002694{
2695 CHECK_LAYERS(graph, 0, layerIndex);
2696
Finn Williams85d36712021-01-26 22:30:06 +00002697 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002698 CHECK_VALID_SIZE(inputs.size(), 1);
2699
Finn Williams85d36712021-01-26 22:30:06 +00002700 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002701 CHECK_VALID_SIZE(outputs.size(), 1);
2702
2703 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2704 auto flatBufferAxis = flatBufferDescriptor->axis();
2705 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2706
2707 armnn::MeanDescriptor descriptor;
2708 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2709 descriptor.m_KeepDims = flatBufferKeepDims;
2710
2711 auto layerName = GetLayerName(graph, layerIndex);
2712 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2713
2714 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2715 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2716
2717 RegisterInputSlots(graph, layerIndex, layer);
2718 RegisterOutputSlots(graph, layerIndex, layer);
2719}
2720
Finn Williams85d36712021-01-26 22:30:06 +00002721void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00002722{
2723 CHECK_LAYERS(graph, 0, layerIndex);
2724
Finn Williams85d36712021-01-26 22:30:06 +00002725 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002726 CHECK_VALID_SIZE(inputs.size(), 1);
2727
Finn Williams85d36712021-01-26 22:30:06 +00002728 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002729
2730 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2731 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2732 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2733 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2734 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2735 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2736
2737 // Check numViews and numDimensions corresponds to the ones already serialized ...
2738 // numViews == flatBufferViewSizes.size();
2739 // foreach: numDimensions == flatBufferViewSizes[x].size();
2740
2741 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2742 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2743 {
2744 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2745 {
2746 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2747 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2748 }
2749 }
2750
2751 auto layerName = GetLayerName(graph, layerIndex);
2752 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2753
2754 // I could have as many outputs as views ...
2755 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2756 {
2757 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2758 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2759 }
2760
2761 RegisterInputSlots(graph, layerIndex, layer);
2762 RegisterOutputSlots(graph, layerIndex, layer);
2763}
2764
Finn Williams85d36712021-01-26 22:30:06 +00002765armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00002766{
2767 armnn::LstmDescriptor desc;
2768
2769 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2770 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2771 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2772 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2773 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2774 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002775 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002776
2777 return desc;
2778}
2779
Finn Williams85d36712021-01-26 22:30:06 +00002780void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00002781{
2782 CHECK_LAYERS(graph, 0, layerIndex);
2783
2784 auto inputs = GetInputs(graph, layerIndex);
2785 CHECK_VALID_SIZE(inputs.size(), 3);
2786
2787 auto outputs = GetOutputs(graph, layerIndex);
2788 CHECK_VALID_SIZE(outputs.size(), 4);
2789
2790 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2791 auto layerName = GetLayerName(graph, layerIndex);
2792 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2793 auto flatBufferInputParams = flatBufferLayer->inputParams();
2794
2795 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2796
2797 armnn::LstmInputParams lstmInputParams;
2798
2799 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2800 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2801 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2802 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2803 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2804 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2805 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2806 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2807 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2808
2809 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2810 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2811 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2812 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2813 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2814 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2815 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2816 lstmInputParams.m_CellBias = &cellBias;
2817 lstmInputParams.m_OutputGateBias = &outputGateBias;
2818
2819 armnn::ConstTensor inputToInputWeights;
2820 armnn::ConstTensor recurrentToInputWeights;
2821 armnn::ConstTensor cellToInputWeights;
2822 armnn::ConstTensor inputGateBias;
2823 if (!lstmDescriptor.m_CifgEnabled)
2824 {
2825 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2826 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2827 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2828 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2829
2830 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2831 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2832 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2833 lstmInputParams.m_InputGateBias = &inputGateBias;
2834 }
2835
2836 armnn::ConstTensor projectionWeights;
2837 armnn::ConstTensor projectionBias;
2838 if (lstmDescriptor.m_ProjectionEnabled)
2839 {
2840 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2841 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2842
2843 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2844 lstmInputParams.m_ProjectionBias = &projectionBias;
2845 }
2846
2847 armnn::ConstTensor cellToForgetWeights;
2848 armnn::ConstTensor cellToOutputWeights;
2849 if (lstmDescriptor.m_PeepholeEnabled)
2850 {
2851 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2852 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2853
2854 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2855 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2856 }
2857
Jan Eilersf8c62972019-07-17 11:07:49 +01002858 armnn::ConstTensor inputLayerNormWeights;
2859 armnn::ConstTensor forgetLayerNormWeights;
2860 armnn::ConstTensor cellLayerNormWeights;
2861 armnn::ConstTensor outputLayerNormWeights;
2862 if (lstmDescriptor.m_LayerNormEnabled)
2863 {
2864 if (!lstmDescriptor.m_CifgEnabled)
2865 {
2866 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2867 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2868 }
2869 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2870 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2871 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2872
2873 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2874 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2875 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2876 }
2877
Jim Flynn11af3752019-03-19 17:22:29 +00002878 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2879
2880 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2881 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2882
2883 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2884 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2885
2886 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2887 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2888
2889 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2890 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2891
2892 RegisterInputSlots(graph, layerIndex, layer);
2893 RegisterOutputSlots(graph, layerIndex, layer);
2894}
2895
Finn Williams85d36712021-01-26 22:30:06 +00002896armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01002897{
2898 armnn::QLstmDescriptor desc;
2899
2900 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2901 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2902 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2903 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2904
2905 desc.m_CellClip = qLstmDescriptor->cellClip();
2906 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2907
2908 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2909 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2910 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2911 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2912
2913 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2914 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2915
2916 return desc;
2917}
2918
Finn Williams85d36712021-01-26 22:30:06 +00002919void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01002920{
2921 CHECK_LAYERS(graph, 0, layerIndex);
2922
2923 auto inputs = GetInputs(graph, layerIndex);
2924 CHECK_VALID_SIZE(inputs.size(), 3);
2925
2926 auto outputs = GetOutputs(graph, layerIndex);
2927 CHECK_VALID_SIZE(outputs.size(), 3);
2928
2929 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2930 auto layerName = GetLayerName(graph, layerIndex);
2931 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2932 auto flatBufferInputParams = flatBufferLayer->inputParams();
2933
2934 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2935 armnn::LstmInputParams qLstmInputParams;
2936
2937 // Mandatory params
2938 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2939 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2940 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2941 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2942 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2943 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2944 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2945 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2946 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2947
2948 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2949 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2950 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2951 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2952 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2953 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2954 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2955 qLstmInputParams.m_CellBias = &cellBias;
2956 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2957
2958 // Optional CIFG params
2959 armnn::ConstTensor inputToInputWeights;
2960 armnn::ConstTensor recurrentToInputWeights;
2961 armnn::ConstTensor inputGateBias;
2962
2963 if (!qLstmDescriptor.m_CifgEnabled)
2964 {
2965 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2966 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2967 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2968
2969 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2970 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2971 qLstmInputParams.m_InputGateBias = &inputGateBias;
2972 }
2973
2974 // Optional projection params
2975 armnn::ConstTensor projectionWeights;
2976 armnn::ConstTensor projectionBias;
2977
2978 if (qLstmDescriptor.m_ProjectionEnabled)
2979 {
2980 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2981 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2982
2983 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2984 qLstmInputParams.m_ProjectionBias = &projectionBias;
2985 }
2986
2987 // Optional peephole params
2988 armnn::ConstTensor cellToInputWeights;
2989 armnn::ConstTensor cellToForgetWeights;
2990 armnn::ConstTensor cellToOutputWeights;
2991
2992 if (qLstmDescriptor.m_PeepholeEnabled)
2993 {
2994 if (!qLstmDescriptor.m_CifgEnabled)
2995 {
2996 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2997 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2998 }
2999
3000 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3001 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3002
3003 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3004 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3005 }
3006
3007 // Optional layer norm params
3008 armnn::ConstTensor inputLayerNormWeights;
3009 armnn::ConstTensor forgetLayerNormWeights;
3010 armnn::ConstTensor cellLayerNormWeights;
3011 armnn::ConstTensor outputLayerNormWeights;
3012
3013 if (qLstmDescriptor.m_LayerNormEnabled)
3014 {
3015 if (!qLstmDescriptor.m_CifgEnabled)
3016 {
3017 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3018 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3019 }
3020
3021 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3022 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3023 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3024
3025 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3026 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3027 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3028 }
3029
3030 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3031
3032 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3033 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3034
3035 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3036 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3037
3038 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3039 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3040
3041 RegisterInputSlots(graph, layerIndex, layer);
3042 RegisterOutputSlots(graph, layerIndex, layer);
3043}
3044
Finn Williams85d36712021-01-26 22:30:06 +00003045void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003046{
3047 CHECK_LAYERS(graph, 0, layerIndex);
3048
3049 auto inputs = GetInputs(graph, layerIndex);
3050 CHECK_VALID_SIZE(inputs.size(), 3);
3051
3052 auto outputs = GetOutputs(graph, layerIndex);
3053 CHECK_VALID_SIZE(outputs.size(), 2);
3054
3055 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3056 auto layerName = GetLayerName(graph, layerIndex);
3057 auto flatBufferInputParams = flatBufferLayer->inputParams();
3058
3059 armnn::QuantizedLstmInputParams lstmInputParams;
3060
3061 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3062 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3063 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3064 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3065 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3066 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3067 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3068 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3069 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3070 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3071 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3072 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3073
3074 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3075 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3076 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3077 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3078 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3079 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3080 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3081 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3082 lstmInputParams.m_InputGateBias = &inputGateBias;
3083 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3084 lstmInputParams.m_CellBias = &cellBias;
3085 lstmInputParams.m_OutputGateBias = &outputGateBias;
3086
3087 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3088
3089 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3090 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3091
3092 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3093 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3094
3095 RegisterInputSlots(graph, layerIndex, layer);
3096 RegisterOutputSlots(graph, layerIndex, layer);
3097}
3098
Finn Williams85d36712021-01-26 22:30:06 +00003099void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003100{
3101 CHECK_LAYERS(graph, 0, layerIndex);
3102
Finn Williams85d36712021-01-26 22:30:06 +00003103 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003104 CHECK_VALID_SIZE(inputs.size(), 1);
3105
Finn Williams85d36712021-01-26 22:30:06 +00003106 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003107 CHECK_VALID_SIZE(outputs.size(), 1);
3108
3109 const std::string layerName = GetLayerName(graph, layerIndex);
3110 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3111
3112 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3113 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3114
3115 RegisterInputSlots(graph, layerIndex, layer);
3116 RegisterOutputSlots(graph, layerIndex, layer);
3117}
3118
Finn Williams85d36712021-01-26 22:30:06 +00003119void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003120{
3121 CHECK_LAYERS(graph, 0, layerIndex);
3122
Finn Williams85d36712021-01-26 22:30:06 +00003123 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003124 CHECK_VALID_SIZE(inputs.size(), 2);
3125
Finn Williams85d36712021-01-26 22:30:06 +00003126 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003127 CHECK_VALID_SIZE(outputs.size(), 1);
3128
3129 const std::string layerName = GetLayerName(graph, layerIndex);
3130 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3131
3132 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3133 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3134
3135 RegisterInputSlots(graph, layerIndex, layer);
3136 RegisterOutputSlots(graph, layerIndex, layer);
3137}
3138
Finn Williams85d36712021-01-26 22:30:06 +00003139void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003140{
3141 CHECK_LAYERS(graph, 0, layerIndex);
3142 auto inputs = GetInputs(graph, layerIndex);
3143 CHECK_LOCATION();
3144 CHECK_VALID_SIZE(inputs.size(), 2);
3145
3146 auto outputs = GetOutputs(graph, layerIndex);
3147 CHECK_VALID_SIZE(outputs.size(), 2);
3148
3149 auto layerName = GetLayerName(graph, layerIndex);
3150 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3151
3152 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3153 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3154
3155 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3156 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3157
3158 RegisterInputSlots(graph, layerIndex, layer);
3159 RegisterOutputSlots(graph, layerIndex, layer);
3160}
3161
Finn Williams85d36712021-01-26 22:30:06 +00003162void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003163{
3164 CHECK_LAYERS(graph, 0, layerIndex);
3165 auto inputs = GetInputs(graph, layerIndex);
3166 CHECK_LOCATION();
3167 CHECK_VALID_SIZE(inputs.size(), 2);
3168
3169 auto outputs = GetOutputs(graph, layerIndex);
3170 CHECK_VALID_SIZE(outputs.size(), 1);
3171
3172 auto layerName = GetLayerName(graph, layerIndex);
3173 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3174
3175 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3176 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3177
3178 RegisterInputSlots(graph, layerIndex, layer);
3179 RegisterOutputSlots(graph, layerIndex, layer);
3180}
3181
Finn Williams85d36712021-01-26 22:30:06 +00003182void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003183{
3184 CHECK_LAYERS(graph, 0, layerIndex);
3185
3186 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3187
3188 auto inputs = GetInputs(graph, layerIndex);
3189 CHECK_VALID_SIZE(inputs.size(), 1);
3190
3191 auto outputs = GetOutputs(graph, layerIndex);
3192 CHECK_VALID_SIZE(outputs.size(), 1);
3193 auto outputInfo = ToTensorInfo(outputs[0]);
3194
3195 auto layerName = GetLayerName(graph, layerIndex);
3196 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
3197
3198 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3199 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3200
3201 RegisterInputSlots(graph, layerIndex, layer);
3202 RegisterOutputSlots(graph, layerIndex, layer);
3203}
3204
Finn Williams85d36712021-01-26 22:30:06 +00003205void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003206{
3207 CHECK_LAYERS(graph, 0, layerIndex);
3208
3209 auto inputs = GetInputs(graph, layerIndex);
3210 CHECK_VALID_SIZE(inputs.size(), 1);
3211
3212 auto outputs = GetOutputs(graph, layerIndex);
3213 CHECK_VALID_SIZE(outputs.size(), 1);
3214
3215 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3216 auto layerName = GetLayerName(graph, layerIndex);
3217 auto serializerDescriptor = serializerLayer->descriptor();
3218
3219 armnn::TransposeConvolution2dDescriptor descriptor;
3220 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3221 descriptor.m_PadRight = serializerDescriptor->padRight();
3222 descriptor.m_PadTop = serializerDescriptor->padTop();
3223 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3224 descriptor.m_StrideX = serializerDescriptor->strideX();
3225 descriptor.m_StrideY = serializerDescriptor->strideY();;
3226 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3227 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3228
3229 // weights & biases
3230 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3231 armnn::Optional<armnn::ConstTensor> optionalBiases;
3232 if (descriptor.m_BiasEnabled)
3233 {
3234 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3235 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3236 }
3237
3238 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3239 weights,
3240 optionalBiases,
3241 layerName.c_str());
3242
3243 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3244 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3245
3246 RegisterInputSlots(graph, layerIndex, layer);
3247 RegisterOutputSlots(graph, layerIndex, layer);
3248}
3249
Finn Williams85d36712021-01-26 22:30:06 +00003250void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003251{
3252 CHECK_LAYERS(graph, 0, layerIndex);
3253 auto inputs = GetInputs(graph, layerIndex);
3254
3255 auto outputs = GetOutputs(graph, layerIndex);
3256 CHECK_VALID_SIZE(outputs.size(), 1);
3257
3258 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3259 unsigned int axis = flatBufferDescriptor->axis();
3260 unsigned int numInputs = flatBufferDescriptor->numInputs();
3261 CHECK_VALID_SIZE(inputs.size(), numInputs);
3262
3263 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3264 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3265 flatBufferInputShape->begin() + flatBufferInputShape->size());
3266
3267 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3268 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3269
3270 for (unsigned int i=0; i<inputs.size(); ++i)
3271 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003272 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003273 if (descriptor.m_InputShape != inputShape)
3274 {
3275 std::stringstream ss;
3276 ss << "Shape of input "
3277 << i
3278 << " "
3279 << inputShape
3280 << " does not equal defined input shape "
3281 << descriptor.m_InputShape
3282 << ": "
3283 << CHECK_LOCATION().AsString();
3284 throw ParseException(ss.str());
3285 }
3286 }
3287
3288 auto layerName = GetLayerName(graph, layerIndex);
3289 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3290
3291 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3292 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3293
3294 RegisterInputSlots(graph, layerIndex, layer);
3295 RegisterOutputSlots(graph, layerIndex, layer);
3296}
3297
Finn Williams85d36712021-01-26 22:30:06 +00003298void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003299{
3300 CHECK_LAYERS(graph, 0, layerIndex);
3301
3302 auto inputs = GetInputs(graph, layerIndex);
3303 auto outputs = GetOutputs(graph, layerIndex);
3304
3305 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3306 auto fbDescriptor = fbLayer->descriptor();
3307
3308 armnn::StandInDescriptor descriptor;
3309 descriptor.m_NumInputs = fbDescriptor->numInputs();
3310 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3311
3312 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3313 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3314
3315 const std::string layerName = GetLayerName(graph, layerIndex);
3316 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3317
3318 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3319 {
3320 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3321 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3322 }
3323
3324 RegisterInputSlots(graph, layerIndex, layer);
3325 RegisterOutputSlots(graph, layerIndex, layer);
3326}
3327
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003328} // namespace armnnDeserializer