blob: a5114ecfcaa7c4efbe98d9e6e947f963796505d2 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Finn Williams85d36712021-01-26 22:30:06 +000037IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
38
39IDeserializer::~IDeserializer() = default;
40
41IDeserializer *IDeserializer::CreateRaw()
42{
43 return new IDeserializer();
44}
45
46IDeserializerPtr IDeserializer::Create()
47{
48 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
49}
50
51void IDeserializer::Destroy(IDeserializer *parser)
52{
53 delete parser;
54}
55
56armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
57{
58 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
59}
60
61armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
62{
63 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
64}
65
66BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
67{
68 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
69}
70
71BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
72{
73 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
74}
75
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000076namespace
77{
Kevin May43a799c2019-02-08 16:31:42 +000078
79const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
80
Finn Williams85d36712021-01-26 22:30:06 +000081 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000082 unsigned int layersIndex,
83 const CheckLocation& location)
84{
85 if (graph->layers() == nullptr)
86 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010087 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
88 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
89 "layers:{1} at {2}",
90 location.m_Function,
91 layersIndex,
92 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000093 }
94 else if (layersIndex >= graph->layers()->size())
95 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010096 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
97 location.m_Function,
98 layersIndex,
99 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000100 }
101}
102
Finn Williams85d36712021-01-26 22:30:06 +0000103void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000104 unsigned int layersIndex,
105 unsigned int layerIndex,
106 const CheckLocation& location)
107{
108 if (graph->layers() == nullptr)
109 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100110 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
111 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
112 "layers:{1} at {2}",
113 location.m_Function,
114 layersIndex,
115 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000116 }
117 else if (layersIndex >= graph->layers()->size())
118 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100119 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
120 "layers:{1} at {2}",
121 location.m_Function,
122 layersIndex,
123 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000124 }
125 else if (layerIndex >= graph->layers()[layersIndex].size()
126 && layerIndex != VIRTUAL_LAYER_ID)
127 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100128 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
129 "layers:{1} layer:{2} at {3}",
130 location.m_Function,
131 layersIndex,
132 layerIndex,
133 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000134 }
135}
136
Finn Williams85d36712021-01-26 22:30:06 +0000137void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000138 const CheckLocation& location)
139{
140 if (rawPtr == nullptr)
141 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100142 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
143 location.m_Function,
144 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000145 }
146}
147
Finn Williams85d36712021-01-26 22:30:06 +0000148void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000149 const CheckLocation& location)
150{
151 if (rawPtr == nullptr)
152 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100153 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
154 location.m_Function,
155 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000156 }
157}
158
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000159void CheckConstTensorSize(const unsigned int constTensorSize,
160 const unsigned int tensorSize,
161 const CheckLocation& location)
162{
163 if (constTensorSize != tensorSize)
164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100165 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
166 location.m_Function,
167 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000168 }
169}
170
Kevin May43a799c2019-02-08 16:31:42 +0000171#define CHECK_TENSOR_PTR(TENSOR_PTR) \
172 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
173
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000174#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
175 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
176
Mike Kellya0766c32019-02-19 17:22:07 +0000177#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
178 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
179
Kevin May43a799c2019-02-08 16:31:42 +0000180#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
181 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
182
183#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
184 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
185}
186
Saoirse Stewart263829c2019-02-19 15:54:14 +0000187bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
188{
189 const unsigned int actualSize = actual.GetNumDimensions();
190 if (actualSize != expected.size())
191 {
192 return false;
193 }
194
195 for (unsigned int i = 0u; i < actualSize; i++)
196 {
197 if (actual[i] != static_cast<unsigned int>(expected[i]))
198 {
199 return false;
200 }
201 }
202
203 return true;
204}
205
Finn Williams85d36712021-01-26 22:30:06 +0000206IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000207: m_Network(nullptr, nullptr),
208//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000209m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000210{
211 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000212 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
213 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
214 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
215 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
216 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
217 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
mathad01b392e982021-04-07 12:07:30 +0100218 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Simon Obute51f67772021-09-03 15:50:13 +0100219 m_ParserFunctions[Layer_ChannelShuffleLayer] = &DeserializerImpl::ParseChannelShuffle;
220 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
Finn Williams85d36712021-01-26 22:30:06 +0000221 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
222 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
223 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100224 m_ParserFunctions[Layer_Convolution3dLayer] = &DeserializerImpl::ParseConvolution3d;
Finn Williams85d36712021-01-26 22:30:06 +0000225 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
226 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
227 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
228 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
229 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
230 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
231 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
232 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
233 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
234 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
235 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
236 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
237 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
238 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
239 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
240 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
241 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
242 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
243 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
244 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
245 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
246 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
247 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
248 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
249 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
250 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
251 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000252 m_ParserFunctions[Layer_Pooling3dLayer] = &DeserializerImpl::ParsePooling3d;
Finn Williams85d36712021-01-26 22:30:06 +0000253 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
254 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
255 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
256 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
257 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000258 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000259 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
260 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
261 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
262 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100263 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000264 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
265 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
266 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
267 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
268 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
269 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
270 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
271 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
272 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
273 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
274 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
275 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100276 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000277}
278
Finn Williams85d36712021-01-26 22:30:06 +0000279LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000280{
281 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
282
283 switch(layerType)
284 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100285 case Layer::Layer_AbsLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000287 case Layer::Layer_ActivationLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000289 case Layer::Layer_AdditionLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100291 case Layer::Layer_ArgMinMaxLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000293 case Layer::Layer_BatchToSpaceNdLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000295 case Layer::Layer_BatchNormalizationLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100297 case Layer::Layer_CastLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Simon Obute51f67772021-09-03 15:50:13 +0100299 case Layer::Layer_ChannelShuffleLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100301 case Layer::Layer_ComparisonLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100303 case Layer::Layer_ConcatLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000305 case Layer::Layer_ConstantLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000307 case Layer::Layer_Convolution2dLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100309 case Layer::Layer_Convolution3dLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100311 case Layer::Layer_DepthToSpaceLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000313 case Layer::Layer_DepthwiseConvolution2dLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000315 case Layer::Layer_DequantizeLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000317 case Layer::Layer_DetectionPostProcessLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000319 case Layer::Layer_DivisionLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000321 case Layer::Layer_EqualLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000323 case Layer::Layer_ElementwiseUnaryLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000325 case Layer::Layer_FullyConnectedLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100327 case Layer::Layer_FillLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000329 case Layer::Layer_FloorLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000331 case Layer::Layer_GatherLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000333 case Layer::Layer_GreaterLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000335 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000336 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100337 case Layer::Layer_InstanceNormalizationLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000339 case Layer::Layer_L2NormalizationLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000341 case Layer::Layer_LogicalBinaryLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100343 case Layer::Layer_LogSoftmaxLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000345 case Layer::Layer_LstmLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000347 case Layer::Layer_MeanLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000349 case Layer::Layer_MinimumLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000351 case Layer::Layer_MaximumLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100353 case Layer::Layer_MergeLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000355 case Layer::Layer_MergerLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000357 case Layer::Layer_MultiplicationLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000359 case Layer::Layer_NormalizationLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000361 case Layer::Layer_OutputLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000363 case Layer::Layer_PadLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000365 case Layer::Layer_PermuteLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000367 case Layer::Layer_Pooling2dLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000369 case Layer::Layer_Pooling3dLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100371 case Layer::Layer_PreluLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100373 case Layer::Layer_QLstmLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000375 case Layer::Layer_QuantizeLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100377 case Layer::Layer_QuantizedLstmLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100379 case Layer::Layer_RankLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000381 case Layer::Layer_ReduceLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000383 case Layer::Layer_ReshapeLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000385 case Layer::Layer_ResizeBilinearLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100387 case Layer::Layer_ResizeLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000389 case Layer::Layer_RsqrtLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100391 case Layer::Layer_ShapeLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100393 case Layer::Layer_SliceLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000395 case Layer::Layer_SoftmaxLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000397 case Layer::Layer_SpaceToBatchNdLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100399 case Layer::Layer_SpaceToDepthLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000401 case Layer::Layer_SplitterLayer:
402 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100403 case Layer::Layer_StackLayer:
404 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100405 case Layer::Layer_StandInLayer:
406 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000407 case Layer::Layer_StridedSliceLayer:
408 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000409 case Layer::Layer_SubtractionLayer:
410 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100411 case Layer::Layer_SwitchLayer:
412 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100413 case Layer::Layer_TransposeConvolution2dLayer:
414 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000415 case Layer::Layer_TransposeLayer:
416 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100417 case Layer::Layer_UnidirectionalSequenceLstmLayer:
418 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000419 case Layer::Layer_NONE:
420 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100421 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000422 }
423}
424
Finn Williams85d36712021-01-26 22:30:06 +0000425std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000426{
427 auto layer = GetBaseLayer(graph, index);
428 assert(layer);
429 return layer->layerName()->str();
430}
431
Finn Williams85d36712021-01-26 22:30:06 +0000432int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000433{
434 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
435
436 if (layerType == Layer::Layer_InputLayer)
437 {
438 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
439 }
440 else if ( layerType == Layer::Layer_OutputLayer )
441 {
442 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
443 }
444 return 0;
445}
446
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000447armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000448{
449 switch (dataLayout)
450 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000451 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000452 return armnn::DataLayout::NHWC;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100453 case armnnSerializer::DataLayout::DataLayout_NDHWC:
454 return armnn::DataLayout::NDHWC;
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100455 case armnnSerializer::DataLayout::DataLayout_NCDHW:
456 return armnn::DataLayout::NCDHW;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000457 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000458 default:
459 return armnn::DataLayout::NCHW;
460 }
461}
462
Mike Kellyaf484012019-02-20 16:53:11 +0000463armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
464{
465 switch (function)
466 {
467 case armnnSerializer::ActivationFunction_Sigmoid:
468 return armnn::ActivationFunction::Sigmoid;
469 case armnnSerializer::ActivationFunction_TanH:
470 return armnn::ActivationFunction::TanH;
471 case armnnSerializer::ActivationFunction_Linear:
472 return armnn::ActivationFunction::Linear;
473 case armnnSerializer::ActivationFunction_ReLu:
474 return armnn::ActivationFunction::ReLu;
475 case armnnSerializer::ActivationFunction_BoundedReLu:
476 return armnn::ActivationFunction::BoundedReLu;
477 case armnnSerializer::ActivationFunction_LeakyReLu:
478 return armnn::ActivationFunction::LeakyReLu;
479 case armnnSerializer::ActivationFunction_Abs:
480 return armnn::ActivationFunction::Abs;
481 case armnnSerializer::ActivationFunction_Sqrt:
482 return armnn::ActivationFunction::Sqrt;
483 case armnnSerializer::ActivationFunction_Square:
484 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000485 case armnnSerializer::ActivationFunction_Elu:
486 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000487 case armnnSerializer::ActivationFunction_HardSwish:
488 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000489 default:
490 return armnn::ActivationFunction::Sigmoid;
491 }
492}
493
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100494armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
495{
496 switch (function)
497 {
498 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
499 return armnn::ArgMinMaxFunction::Max;
500 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
501 default:
502 return armnn::ArgMinMaxFunction::Min;
503 }
504}
505
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100506armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
507{
508 switch (operation)
509 {
510 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
511 return armnn::ComparisonOperation::Equal;
512 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
513 return armnn::ComparisonOperation::Greater;
514 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
515 return armnn::ComparisonOperation::GreaterOrEqual;
516 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
517 return armnn::ComparisonOperation::Less;
518 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
519 return armnn::ComparisonOperation::LessOrEqual;
520 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
521 default:
522 return armnn::ComparisonOperation::NotEqual;
523 }
524}
525
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000526armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
527{
528 switch (operation)
529 {
530 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
531 return armnn::ReduceOperation::Sum;
532 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
533 return armnn::ReduceOperation::Max;
534 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
535 return armnn::ReduceOperation::Mean;
536 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
537 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100538 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
539 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000540 default:
541 return armnn::ReduceOperation::Sum;
542 }
543}
544
James Conroyaba90cd2020-11-06 16:28:18 +0000545armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
546{
547 switch (operation)
548 {
549 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
550 return armnn::LogicalBinaryOperation::LogicalAnd;
551 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
552 return armnn::LogicalBinaryOperation::LogicalOr;
553 default:
554 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
555 }
556}
557
josh minor4a3c6102020-01-06 16:40:46 -0600558armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
559{
560 switch (operation)
561 {
562 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
563 return armnn::UnaryOperation::Abs;
564 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
565 return armnn::UnaryOperation::Rsqrt;
566 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
567 return armnn::UnaryOperation::Sqrt;
568 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
569 return armnn::UnaryOperation::Exp;
570 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
571 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000572 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
573 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100574 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
575 return armnn::UnaryOperation::Log;
576 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
577 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600578 default:
579 throw armnn::InvalidArgumentException("Unary operation unknown");
580 }
581}
582
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +0100583armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
584{
585 switch (paddingMode)
586 {
587 case armnnSerializer::PaddingMode::PaddingMode_Reflect:
588 return armnn::PaddingMode::Reflect;
589 case armnnSerializer::PaddingMode::PaddingMode_Symmetric:
590 return armnn::PaddingMode::Symmetric;
591 default:
592 return armnn::PaddingMode::Constant;
593 }
594}
595
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100596armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
597{
598 switch (method)
599 {
600 case armnnSerializer::ResizeMethod_NearestNeighbor:
601 return armnn::ResizeMethod::NearestNeighbor;
602 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000603 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100604 default:
605 return armnn::ResizeMethod::NearestNeighbor;
606 }
607}
608
Finn Williams85d36712021-01-26 22:30:06 +0000609armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000610{
611 armnn::DataType type;
612 CHECK_TENSOR_PTR(tensorPtr);
613
614 switch (tensorPtr->dataType())
615 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000616 case DataType_QAsymmS8:
617 type = armnn::DataType::QAsymmS8;
618 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000619 case DataType_QSymmS8:
620 type = armnn::DataType::QSymmS8;
621 break;
Kevin May43a799c2019-02-08 16:31:42 +0000622 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000623 case DataType_QAsymmU8:
624 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000625 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000626 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000627 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000628 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000629 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000630 case DataType_Signed32:
631 type = armnn::DataType::Signed32;
632 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100633 case DataType_Signed64:
634 type = armnn::DataType::Signed64;
635 break;
Kevin May43a799c2019-02-08 16:31:42 +0000636 case DataType_Float32:
637 type = armnn::DataType::Float32;
638 break;
639 case DataType_Float16:
640 type = armnn::DataType::Float16;
641 break;
642 case DataType_Boolean:
643 type = armnn::DataType::Boolean;
644 break;
645 default:
646 {
647 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100648 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
649 tensorPtr->dataType(),
650 EnumNameDataType(tensorPtr->dataType()),
651 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000652 }
653 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000654
Colm Donelan800b2812021-02-12 12:43:35 +0000655 float quantizationScale = tensorPtr->quantizationScale();
656 int32_t quantizationOffset = tensorPtr->quantizationOffset();
657
Finn Williams2605b232020-06-10 15:53:46 +0100658 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
659 {
Colm Donelan800b2812021-02-12 12:43:35 +0000660 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100661 type,
662 quantizationScale,
663 quantizationOffset);
664 }
Colm Donelan800b2812021-02-12 12:43:35 +0000665 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
666 {
667 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
668 type,
669 quantizationScale,
670 quantizationOffset);
671 return result;
672 }
Kevin May43a799c2019-02-08 16:31:42 +0000673
674 auto dimensions = tensorPtr->dimensions();
675 unsigned int size = dimensions->size();
676 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000677 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
678 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
679 // For backwards compatibility check if the dimensionSpecificity vector is present first.
680 // The default is to have dimensionSpecificity set to all true's anyway.
681 if (tensorPtr->dimensionSpecificity() != nullptr)
682 {
683 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
684 size = dimensionSpecificity->size();
685 for (unsigned int i = 0; i < size; ++i)
686 {
687 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
688 }
689 }
690 // Construct a TensorShape
691 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000692
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000693 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000694 if (quantizationScales)
695 {
696 unsigned int quantizationScalesSize = quantizationScales->size();
697 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
698 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000699 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000700 type,
701 scales,
702 quantizationDim);
703 return result;
704 }
705
Kevin May43a799c2019-02-08 16:31:42 +0000706 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000707 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000708 type,
709 quantizationScale,
710 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000711
Kevin May43a799c2019-02-08 16:31:42 +0000712 return result;
713}
714
Finn Williams85d36712021-01-26 22:30:06 +0000715armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000716{
717 CHECK_CONST_TENSOR_PTR(constTensorPtr);
718 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100719 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000720
721 switch (constTensorPtr->data_type())
722 {
723 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000724 {
725 auto byteData = constTensorPtr->data_as_ByteData()->data();
726 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
727 return armnn::ConstTensor(tensorInfo, byteData->data());
728 }
Mike Kellya0766c32019-02-19 17:22:07 +0000729 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000730 {
731 auto shortData = constTensorPtr->data_as_ShortData()->data();
732 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
733 return armnn::ConstTensor(tensorInfo, shortData->data());
734 }
Mike Kellya0766c32019-02-19 17:22:07 +0000735 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000736 {
737 auto intData = constTensorPtr->data_as_IntData()->data();
738 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
739 return armnn::ConstTensor(tensorInfo, intData->data());
740 }
Mike Kellya0766c32019-02-19 17:22:07 +0000741 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000742 {
743 auto longData = constTensorPtr->data_as_LongData()->data();
744 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
745 return armnn::ConstTensor(tensorInfo, longData->data());
746 }
Mike Kellya0766c32019-02-19 17:22:07 +0000747 default:
748 {
749 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100750 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
751 constTensorPtr->data_type(),
752 EnumNameConstTensorData(constTensorPtr->data_type()),
753 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000754 }
755 }
756}
757
Finn Williams85d36712021-01-26 22:30:06 +0000758TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000759{
760 CHECK_LAYERS(graphPtr, 0, layerIndex);
761 auto layer = GetBaseLayer(graphPtr, layerIndex);
762 const auto& numInputs = layer->inputSlots()->size();
763
764 TensorRawPtrVector result(numInputs);
765
766 for (unsigned int i=0; i<numInputs; ++i)
767 {
768 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
769 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
770 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
771 }
772 return result;
773}
774
Finn Williams85d36712021-01-26 22:30:06 +0000775TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000776{
777 CHECK_LAYERS(graphPtr, 0, layerIndex);
778 auto layer = GetBaseLayer(graphPtr, layerIndex);
779 const auto& numOutputs = layer->outputSlots()->size();
780
781 TensorRawPtrVector result(numOutputs);
782
783 for (unsigned int i=0; i<numOutputs; ++i)
784 {
785 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
786 }
787 return result;
788}
789
Finn Williams85d36712021-01-26 22:30:06 +0000790void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000791{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000792 CHECK_LAYERS(graph, 0, layerIndex);
793 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100794 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
795 "layerName: {1} / {2}",
796 layerIndex,
797 layerName,
798 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000799}
800
Finn Williams85d36712021-01-26 22:30:06 +0000801void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000802{
803 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000804 m_InputBindings.clear();
805 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000806}
807
Kevin May43a799c2019-02-08 16:31:42 +0000808
Finn Williams85d36712021-01-26 22:30:06 +0000809INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000810{
811 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000812 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
813 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000814}
815
Finn Williams85d36712021-01-26 22:30:06 +0000816armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000817{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000818 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000819 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
820 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
821 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000822}
823
Finn Williams85d36712021-01-26 22:30:06 +0000824GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000825{
826 if (binaryContent == nullptr)
827 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100828 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
829 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000830 }
831 flatbuffers::Verifier verifier(binaryContent, len);
832 if (verifier.VerifyBuffer<SerializedGraph>() == false)
833 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100834 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
835 "flatbuffers format. size:{0} {1}",
836 len,
837 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000838 }
839 return GetSerializedGraph(binaryContent);
840}
841
Finn Williams85d36712021-01-26 22:30:06 +0000842INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000843{
844 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100845 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000846 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000847 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000848 {
849 if (layer->layer_type() != Layer_InputLayer &&
850 layer->layer_type() != Layer_OutputLayer)
851 {
852 // lookup and call the parser function
853 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000854 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000855 }
856 ++layerIndex;
857 }
858
Derek Lamberti8ddae332019-02-21 16:29:43 +0000859 SetupInputLayers(graph);
860 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000861
862 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100863 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000864 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100865 Connections& connections = graphIt.second;
866 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000867 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100868 const unsigned int outputSlotIndex = outputIt.first;
869 IOutputSlot* outputSlot = outputIt.second;
870 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000871 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100872 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000873 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100874 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000875 }
Kevin May43a799c2019-02-08 16:31:42 +0000876 }
877 }
878 }
879
880 return std::move(m_Network);
881}
882
Finn Williams85d36712021-01-26 22:30:06 +0000883BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000884 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000885{
Jan Eilers8eb25602020-03-09 12:13:48 +0000886 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000887 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000888 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000889 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000890 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000891 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000892 }
893 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100894 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
895 name,
896 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000897}
898
Finn Williams85d36712021-01-26 22:30:06 +0000899BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000900 const std::string& name) const
901{
Jan Eilers8eb25602020-03-09 12:13:48 +0000902 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000903 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000904 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000905 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000906 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000907 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000908 }
909 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100910 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
911 name,
912 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000913}
914
Finn Williams85d36712021-01-26 22:30:06 +0000915unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000916{
917 for (unsigned int i = 0; i < graph->layers()->size(); i++)
918 {
919 auto layer = graph->layers()->Get(i);
920 if (layer->layer_type() == Layer::Layer_InputLayer)
921 {
922 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
923 if (layerBindingId == targetId)
924 {
925 return i;
926 }
927 }
928 }
929 throw ParseException("Input layer with given layerBindingId not found");
930}
931
Finn Williams85d36712021-01-26 22:30:06 +0000932unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000933{
934 for (unsigned int i = 0; i < graph->layers()->size(); i++)
935 {
936 auto layer = graph->layers()->Get(i);
937 if (layer->layer_type() == Layer::Layer_OutputLayer)
938 {
939 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
940 if (layerBindingId == targetId)
941 {
942 return i;
943 }
944 }
945 }
946 throw ParseException("Output layer with given layerBindingId not found");
947}
948
Finn Williams85d36712021-01-26 22:30:06 +0000949unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100950{
951 for (unsigned int i = 0; i < graph->layers()->size(); i++)
952 {
953 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
954 if (layer->index() == targetIndex)
955 {
956 return i;
957 }
958 }
959 throw ParseException("Layer with given index not found");
960}
961
Finn Williams85d36712021-01-26 22:30:06 +0000962IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000963{
Finn Williams85d36712021-01-26 22:30:06 +0000964 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000965
966 if (graph->featureVersions())
967 {
968 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +0100969 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +0100970 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +0000971 }
972
973 return versions;
974}
975
Finn Williams85d36712021-01-26 22:30:06 +0000976void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000977{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000978 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100979 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000980 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100981 m_InputBindings.reserve(numInputs);
982
983 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000984 {
Tee Jungaa920c52019-11-05 10:48:25 +0000985 unsigned int inputLayerIndex = 0xFFFFFFFF;
986 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
987 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100988 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000989 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
990 }
991 else
992 {
993 const int inputId = graph->inputIds()->Get(i);
994 inputLayerIndex = GetInputLayerInVector(graph, inputId);
995 }
996
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100997 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000998
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100999 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1000 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001001 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +00001002
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001003 IConnectableLayer* inputLayer =
1004 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001005
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001006 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
1007 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1008 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1009
Derek Lamberti8ddae332019-02-21 16:29:43 +00001010 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001011 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001012 }
1013}
1014
Finn Williams85d36712021-01-26 22:30:06 +00001015void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001016{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001017 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001018 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001019 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001020 m_OutputBindings.reserve(numOutputs);
1021
1022 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001023 {
Tee Jungaa920c52019-11-05 10:48:25 +00001024 unsigned int outputLayerIndex = 0xFFFFFFFF;
1025 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1026 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001027 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001028 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1029 }
1030 else
1031 {
1032 const int outputId = graph->outputIds()->Get(i);
1033 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1034 }
1035
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001036 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001037
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001038 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1039 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001040 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001041
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001042 IConnectableLayer* outputLayer =
1043 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001044
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001045 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001046 unsigned int sourceLayerIndex =
1047 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001048 unsigned int outputSlotIndex =
1049 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001050 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001051 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1052 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001053 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001054 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001055 }
1056}
1057
Finn Williams85d36712021-01-26 22:30:06 +00001058void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001059 uint32_t layerIndex,
1060 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001061{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001062 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001063 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001064 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1065 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001066 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001067 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1068 " for layer index: {2} {3}",
1069 baseLayer->outputSlots()->size(),
1070 layer->GetNumOutputSlots(),
1071 layerIndex,
1072 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001073 }
1074
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001075 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001076 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001077 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1078 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1079 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1080 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001081 }
1082}
1083
Finn Williams85d36712021-01-26 22:30:06 +00001084void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001085 uint32_t layerIndex,
1086 armnn::IConnectableLayer* layer,
1087 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001088{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001089 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001090 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001091 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001092
1093 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001094 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001095 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1096 " for layer index:{2} {3}",
1097 baseLayer->inputSlots()->size(),
1098 layer->GetNumInputSlots(),
1099 layerIndex,
1100 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001101 }
1102
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001103 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001104 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001105 // Check if slot should be ignored.
1106 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1107 {
1108 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1109 auto fbConnection = fbInputSlot->connection();
1110 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1111 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1112 }
Kevin May43a799c2019-02-08 16:31:42 +00001113 }
1114}
1115
Finn Williams85d36712021-01-26 22:30:06 +00001116void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001117 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001118 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001119{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001120 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001121 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001122 m_GraphConnections[sourceLayerIndex] = Connections();
1123 }
1124
1125 Connections& connections = m_GraphConnections[sourceLayerIndex];
1126 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1127 {
1128 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001129 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001130 else
1131 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001132 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001133 }
1134}
Kevin May43a799c2019-02-08 16:31:42 +00001135
Finn Williams85d36712021-01-26 22:30:06 +00001136void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001137 uint32_t outputSlotIndex,
1138 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001139{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001140 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1141 {
1142 m_GraphConnections[sourceLayerIndex] = Connections();
1143 }
1144
1145 Connections& connections = m_GraphConnections[sourceLayerIndex];
1146 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1147 {
1148 throw ParseException("Same output slot index processed twice");
1149 }
1150
1151 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001152}
1153
Finn Williams85d36712021-01-26 22:30:06 +00001154void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001155{
1156 CHECK_LAYERS(graph, 0, layerIndex);
1157 auto inputs = GetInputs(graph, layerIndex);
1158 CHECK_LOCATION();
1159 CHECK_VALID_SIZE(inputs.size(), 1);
1160
1161 auto outputs = GetOutputs(graph, layerIndex);
1162 CHECK_VALID_SIZE(outputs.size(), 1);
1163
1164 auto layerName = GetLayerName(graph, layerIndex);
1165
josh minor4a3c6102020-01-06 16:40:46 -06001166 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1167 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001168 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1169 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1170
1171 RegisterInputSlots(graph, layerIndex, layer);
1172 RegisterOutputSlots(graph, layerIndex, layer);
1173}
1174
Finn Williams85d36712021-01-26 22:30:06 +00001175void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001176{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001177 CHECK_LAYERS(graph, 0, layerIndex);
1178 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001179 CHECK_LOCATION();
1180 CHECK_VALID_SIZE(inputs.size(), 1);
1181
Derek Lamberti8ddae332019-02-21 16:29:43 +00001182 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001183 CHECK_VALID_SIZE(outputs.size(), 1);
1184
Derek Lamberti8ddae332019-02-21 16:29:43 +00001185 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001186 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001187 auto serializerDescriptor = serializerLayer->descriptor();
1188
1189 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001190 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001191 descriptor.m_A = serializerDescriptor->a();
1192 descriptor.m_B = serializerDescriptor->b();
1193
1194 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1195 layerName.c_str());
1196 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1197 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1198
Derek Lamberti8ddae332019-02-21 16:29:43 +00001199 RegisterInputSlots(graph, layerIndex, layer);
1200 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001201}
1202
Finn Williams85d36712021-01-26 22:30:06 +00001203void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001204{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001205 CHECK_LAYERS(graph, 0, layerIndex);
1206 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001207 CHECK_LOCATION();
1208 CHECK_VALID_SIZE(inputs.size(), 2);
1209
Derek Lamberti8ddae332019-02-21 16:29:43 +00001210 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001211 CHECK_VALID_SIZE(outputs.size(), 1);
1212
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001213 auto layerName = GetLayerName(graph, layerIndex);
1214 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001215
1216 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1217 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1218
Derek Lamberti8ddae332019-02-21 16:29:43 +00001219 RegisterInputSlots(graph, layerIndex, layer);
1220 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001221}
1222
Finn Williams85d36712021-01-26 22:30:06 +00001223void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001224{
1225 CHECK_LAYERS(graph, 0, layerIndex);
1226 auto inputs = GetInputs(graph, layerIndex);
1227 CHECK_LOCATION();
1228 CHECK_VALID_SIZE(inputs.size(), 1);
1229
1230 auto outputs = GetOutputs(graph, layerIndex);
1231 CHECK_VALID_SIZE(outputs.size(), 1);
1232
1233 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1234 auto serializerDescriptor = serializerLayer->descriptor();
1235
1236 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001237 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001238 descriptor.m_Axis = serializerDescriptor->axis();
1239 auto layerName = GetLayerName(graph, layerIndex);
1240 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1241
1242 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1243 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1244
1245 RegisterInputSlots(graph, layerIndex, layer);
1246 RegisterOutputSlots(graph, layerIndex, layer);
1247}
1248
Finn Williams85d36712021-01-26 22:30:06 +00001249void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001250{
1251 CHECK_LAYERS(graph, 0, layerIndex);
1252
Finn Williams85d36712021-01-26 22:30:06 +00001253 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001254 CHECK_VALID_SIZE(inputs.size(), 1);
1255
Finn Williams85d36712021-01-26 22:30:06 +00001256 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001257 CHECK_VALID_SIZE(outputs.size(), 1);
1258
1259 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1260 auto flatBufferCrops = flatBufferDescriptor->crops();
1261 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1262
1263 if (flatBufferCrops->Length() % 2 != 0)
1264 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001265 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001266 }
1267
1268 std::vector<std::pair<unsigned int, unsigned int>> crops;
1269 crops.reserve(flatBufferCrops->Length() / 2);
1270 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1271 {
1272 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1273 }
1274
1275 armnn::BatchToSpaceNdDescriptor descriptor;
1276 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1277 descriptor.m_BlockShape =
1278 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1279 descriptor.m_Crops = crops;
1280
1281 auto layerName = GetLayerName(graph, layerIndex);
1282 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1283
1284 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1285 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1286
1287 RegisterInputSlots(graph, layerIndex, layer);
1288 RegisterOutputSlots(graph, layerIndex, layer);
1289}
1290
Finn Williams85d36712021-01-26 22:30:06 +00001291void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001292{
1293 CHECK_LAYERS(graph, 0, layerIndex);
1294
1295 auto inputs = GetInputs(graph, layerIndex);
1296 CHECK_VALID_SIZE(inputs.size(), 1);
1297
1298 auto outputs = GetOutputs(graph, layerIndex);
1299 CHECK_VALID_SIZE(outputs.size(), 1);
1300 auto outputInfo = ToTensorInfo(outputs[0]);
1301
ruoyan015c7ab052019-03-04 14:48:02 +00001302 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001303
1304 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1305 auto serializerDescriptor = serializerLayer->descriptor();
1306
1307 armnn::BatchNormalizationDescriptor descriptor;
1308 descriptor.m_Eps = serializerDescriptor->eps();
1309 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1310
1311 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1312 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1313 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1314 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1315
1316 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1317 mean,
1318 variance,
1319 beta,
1320 gamma,
1321 layerName.c_str());
1322 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1323
1324 RegisterInputSlots(graph, layerIndex, layer);
1325 RegisterOutputSlots(graph, layerIndex, layer);
1326}
1327
mathad01b392e982021-04-07 12:07:30 +01001328void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1329{
1330 CHECK_LAYERS(graph, 0, layerIndex);
1331 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1332 CHECK_LOCATION();
1333 CHECK_VALID_SIZE(inputs.size(), 1);
1334
1335 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1336 CHECK_VALID_SIZE(outputs.size(), 1);
1337
1338 auto layerName = GetLayerName(graph, layerIndex);
1339
1340 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1341
1342 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1343 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1344
1345 RegisterInputSlots(graph, layerIndex, layer);
1346 RegisterOutputSlots(graph, layerIndex, layer);
1347}
1348
Finn Williams85d36712021-01-26 22:30:06 +00001349void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001350{
1351 CHECK_LAYERS(graph, 0, layerIndex);
1352 CHECK_LOCATION();
1353
1354 auto outputs = GetOutputs(graph, layerIndex);
1355 CHECK_VALID_SIZE(outputs.size(), 1);
1356
1357 auto layerName = GetLayerName(graph, layerIndex);
1358
1359 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1360 auto serializerInput = serializerLayer->input();
1361
1362 armnn::ConstTensor input = ToConstTensor(serializerInput);
1363
1364 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1365
1366 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1367 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1368
1369 RegisterOutputSlots(graph, layerIndex, layer);
1370}
1371
Finn Williams85d36712021-01-26 22:30:06 +00001372void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001373{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001374 CHECK_LAYERS(graph, 0, layerIndex);
1375 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001376 CHECK_LOCATION();
1377 CHECK_VALID_SIZE(inputs.size(), 1);
1378
Derek Lamberti8ddae332019-02-21 16:29:43 +00001379 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001380 CHECK_VALID_SIZE(outputs.size(), 1);
1381
Derek Lamberti8ddae332019-02-21 16:29:43 +00001382 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001383 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001384 auto serializerDescriptor = serializerLayer->descriptor();
1385
1386 armnn::Convolution2dDescriptor descriptor;
1387 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1388 descriptor.m_PadRight = serializerDescriptor->padRight();
1389 descriptor.m_PadTop = serializerDescriptor->padTop();
1390 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1391 descriptor.m_StrideX = serializerDescriptor->strideX();
1392 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001393 descriptor.m_DilationX = serializerDescriptor->dilationX();
1394 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001395 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1396 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1397
1398 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1399 armnn::ConstTensor biases;
1400
Matteo Martincighfc598e12019-05-14 10:36:13 +01001401 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001402 if (descriptor.m_BiasEnabled)
1403 {
1404 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001405 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001406 }
1407 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1408 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001409 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001410 layerName.c_str());
1411 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1412 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1413
Derek Lamberti8ddae332019-02-21 16:29:43 +00001414 RegisterInputSlots(graph, layerIndex, layer);
1415 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001416}
1417
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001418void IDeserializer::DeserializerImpl::ParseConvolution3d(GraphPtr graph, unsigned int layerIndex)
1419{
1420 CHECK_LAYERS(graph, 0, layerIndex);
1421 auto inputs = GetInputs(graph, layerIndex);
1422 CHECK_LOCATION();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001423
1424 auto outputs = GetOutputs(graph, layerIndex);
1425 CHECK_VALID_SIZE(outputs.size(), 1);
1426
1427 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1428 auto layerName = GetLayerName(graph, layerIndex);
1429 auto serializerDescriptor = serializerLayer->descriptor();
1430
1431 armnn::Convolution3dDescriptor descriptor;
1432 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1433 descriptor.m_PadRight = serializerDescriptor->padRight();
1434 descriptor.m_PadTop = serializerDescriptor->padTop();
1435 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1436 descriptor.m_PadFront = serializerDescriptor->padFront();
1437 descriptor.m_PadBack = serializerDescriptor->padBack();
1438 descriptor.m_StrideX = serializerDescriptor->strideX();
1439 descriptor.m_StrideY = serializerDescriptor->strideY();
1440 descriptor.m_StrideZ = serializerDescriptor->strideZ();
1441 descriptor.m_DilationX = serializerDescriptor->dilationX();
1442 descriptor.m_DilationY = serializerDescriptor->dilationY();
1443 descriptor.m_DilationZ = serializerDescriptor->dilationZ();
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001444 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001445 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1446
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001447 uint32_t numInputs = descriptor.GetNumInputs();
1448 CHECK_VALID_SIZE(inputs.size(), numInputs);
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001449
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001450 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1451
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001452 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1453 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1454
1455 RegisterInputSlots(graph, layerIndex, layer);
1456 RegisterOutputSlots(graph, layerIndex, layer);
1457}
1458
Finn Williams85d36712021-01-26 22:30:06 +00001459void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001460{
1461 CHECK_LAYERS(graph, 0, layerIndex);
1462
1463 auto inputs = GetInputs(graph, layerIndex);
1464 CHECK_VALID_SIZE(inputs.size(), 1);
1465
1466 auto outputs = GetOutputs(graph, layerIndex);
1467 CHECK_VALID_SIZE(outputs.size(), 1);
1468
1469 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1470
1471 armnn::DepthToSpaceDescriptor descriptor;
1472 descriptor.m_BlockSize = fbDescriptor->blockSize();
1473 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1474
1475 auto layerName = GetLayerName(graph, layerIndex);
1476 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1477
1478 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1479 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1480
1481 RegisterInputSlots(graph, layerIndex, layer);
1482 RegisterOutputSlots(graph, layerIndex, layer);
1483}
1484
Finn Williams85d36712021-01-26 22:30:06 +00001485void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001486{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001487 CHECK_LAYERS(graph, 0, layerIndex);
1488 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001489 CHECK_LOCATION();
1490 CHECK_VALID_SIZE(inputs.size(), 1);
1491
Derek Lamberti8ddae332019-02-21 16:29:43 +00001492 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001493 CHECK_VALID_SIZE(outputs.size(), 1);
1494
Derek Lamberti8ddae332019-02-21 16:29:43 +00001495 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001496 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001497 auto serializerDescriptor = serializerLayer->descriptor();
1498
1499 armnn::DepthwiseConvolution2dDescriptor descriptor;
1500 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1501 descriptor.m_PadRight = serializerDescriptor->padRight();
1502 descriptor.m_PadTop = serializerDescriptor->padTop();
1503 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1504 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001505 descriptor.m_StrideY = serializerDescriptor->strideY();
1506 descriptor.m_DilationX = serializerDescriptor->dilationX();
1507 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001508 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1509 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1510
Jan Eilers53ef7952021-06-02 12:01:25 +01001511 IConnectableLayer* layer;
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001512
Matteo Martincighfc598e12019-05-14 10:36:13 +01001513 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001514 if (descriptor.m_BiasEnabled)
1515 {
Jan Eilers53ef7952021-06-02 12:01:25 +01001516 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001517 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001518 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001519
1520 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1521 // The data layout for weights in ArmNN used to be [M,I,H,W] but now it's changed to [1,H,W,I*M]
1522 // When reading older flatbuffer files we need to add a permutation to get to the new layout.
1523 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1524 {
1525 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1526 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1527 PermutationVector permutationVector = { 3, 2, 0, 1 };
1528 armnn::TensorInfo weightsInfo = weights.GetInfo();
1529 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1530 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1531 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1532 weights.GetMemoryArea(), permuteBuffer.get(),
1533 GetDataTypeSize(weightsInfo.GetDataType()));
1534
1535 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1536 auto weightsShape = weightsInfo.GetShape();
1537 weightsInfo.SetShape({1,
1538 weightsShape[0],
1539 weightsShape[1],
1540 weightsShape[2]*weightsShape[3]});
1541
1542 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1543
1544 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1545 weightsPermuted,
1546 optionalBiases,
1547 layerName.c_str());
1548 }
1549 else
1550 {
1551 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1552 weights,
1553 optionalBiases,
1554 layerName.c_str());
1555 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001556
1557 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1558 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1559
Derek Lamberti8ddae332019-02-21 16:29:43 +00001560 RegisterInputSlots(graph, layerIndex, layer);
1561 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001562}
1563
Finn Williams85d36712021-01-26 22:30:06 +00001564void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001565{
1566 CHECK_LAYERS(graph, 0, layerIndex);
1567 auto inputs = GetInputs(graph, layerIndex);
1568 CHECK_LOCATION();
1569 CHECK_VALID_SIZE(inputs.size(), 2);
1570
1571 auto outputs = GetOutputs(graph, layerIndex);
1572 CHECK_VALID_SIZE(outputs.size(), 4);
1573
1574 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1575 auto layerName = GetLayerName(graph, layerIndex);
1576 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1577
1578 armnn::DetectionPostProcessDescriptor descriptor;
1579 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1580 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1581 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1582 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1583 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1584 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1585 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1586 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1587 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1588 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1589 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1590
1591 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1592
1593 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1594 anchors,
1595 layerName.c_str());
1596
1597 for (unsigned int i = 0; i < 4; i++)
1598 {
1599 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1600 }
1601
1602 RegisterInputSlots(graph, layerIndex, layer);
1603 RegisterOutputSlots(graph, layerIndex, layer);
1604}
1605
Finn Williams85d36712021-01-26 22:30:06 +00001606void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001607{
1608 CHECK_LAYERS(graph, 0, layerIndex);
1609 auto inputs = GetInputs(graph, layerIndex);
1610 CHECK_LOCATION();
1611 CHECK_VALID_SIZE(inputs.size(), 2);
1612
1613 auto outputs = GetOutputs(graph, layerIndex);
1614 CHECK_VALID_SIZE(outputs.size(), 1);
1615
1616 auto layerName = GetLayerName(graph, layerIndex);
1617 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1618
1619 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1620 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1621
1622 RegisterInputSlots(graph, layerIndex, layer);
1623 RegisterOutputSlots(graph, layerIndex, layer);
1624}
1625
Finn Williams85d36712021-01-26 22:30:06 +00001626void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001627{
1628 CHECK_LAYERS(graph, 0, layerIndex);
1629 auto inputs = GetInputs(graph, layerIndex);
1630 CHECK_LOCATION();
1631 CHECK_VALID_SIZE(inputs.size(), 2);
1632
1633 auto outputs = GetOutputs(graph, layerIndex);
1634 CHECK_VALID_SIZE(outputs.size(), 1);
1635
1636 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001637 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1638 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001639
1640 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1641 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1642
1643 RegisterInputSlots(graph, layerIndex, layer);
1644 RegisterOutputSlots(graph, layerIndex, layer);
1645}
1646
Finn Williams85d36712021-01-26 22:30:06 +00001647void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001648{
1649 CHECK_LAYERS(graph, 0, layerIndex);
1650 auto inputs = GetInputs(graph, layerIndex);
1651 CHECK_LOCATION();
1652 CHECK_VALID_SIZE(inputs.size(), 1);
1653
1654 auto outputs = GetOutputs(graph, layerIndex);
1655 CHECK_VALID_SIZE(outputs.size(), 1);
1656
1657 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armaganfd0cae32021-11-08 17:18:31 +00001658 armnn::FillDescriptor descriptor;
1659 descriptor.m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
Keith Davis300ad562020-06-04 16:34:23 +01001660 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1661
1662 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1663 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1664
1665 RegisterInputSlots(graph, layerIndex, layer);
1666 RegisterOutputSlots(graph, layerIndex, layer);
1667}
1668
Finn Williams85d36712021-01-26 22:30:06 +00001669void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001670{
1671 CHECK_LAYERS(graph, 0, layerIndex);
1672 auto inputs = GetInputs(graph, layerIndex);
1673 CHECK_LOCATION();
1674 CHECK_VALID_SIZE(inputs.size(), 2);
1675
1676 auto outputs = GetOutputs(graph, layerIndex);
1677 CHECK_VALID_SIZE(outputs.size(), 1);
1678
1679 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001680 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1681 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001682
1683 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1684 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1685
1686 RegisterInputSlots(graph, layerIndex, layer);
1687 RegisterOutputSlots(graph, layerIndex, layer);
1688}
1689
Finn Williams85d36712021-01-26 22:30:06 +00001690void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001691{
1692 CHECK_LAYERS(graph, 0, layerIndex);
1693
1694 auto inputs = GetInputs(graph, layerIndex);
1695 CHECK_VALID_SIZE(inputs.size(), 1);
1696
1697 auto outputs = GetOutputs(graph, layerIndex);
1698 CHECK_VALID_SIZE(outputs.size(), 1);
1699
1700 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1701 auto fbDescriptor = fbLayer->descriptor();
1702
1703 armnn::InstanceNormalizationDescriptor descriptor;
1704 descriptor.m_Gamma = fbDescriptor->gamma();
1705 descriptor.m_Beta = fbDescriptor->beta();
1706 descriptor.m_Eps = fbDescriptor->eps();
1707 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1708
1709 const std::string layerName = GetLayerName(graph, layerIndex);
1710 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1711
1712 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1713 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1714
1715 RegisterInputSlots(graph, layerIndex, layer);
1716 RegisterOutputSlots(graph, layerIndex, layer);
1717}
1718
Finn Williams85d36712021-01-26 22:30:06 +00001719void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001720{
1721 CHECK_LAYERS(graph, 0, layerIndex);
1722
1723 auto inputs = GetInputs(graph, layerIndex);
1724 CHECK_VALID_SIZE(inputs.size(), 1);
1725
1726 auto outputs = GetOutputs(graph, layerIndex);
1727 CHECK_VALID_SIZE(outputs.size(), 1);
1728 auto outputInfo = ToTensorInfo(outputs[0]);
1729
1730 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1731 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1732
1733 auto layerName = GetLayerName(graph, layerIndex);
1734 armnn::L2NormalizationDescriptor descriptor;
1735 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001736 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001737
1738 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1739 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1740
1741 RegisterInputSlots(graph, layerIndex, layer);
1742 RegisterOutputSlots(graph, layerIndex, layer);
1743}
1744
Finn Williams85d36712021-01-26 22:30:06 +00001745void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001746{
1747 CHECK_LAYERS(graph, 0, layerIndex);
1748 CHECK_LOCATION();
1749
1750 auto inputs = GetInputs(graph, layerIndex);
1751 CHECK_VALID_SIZE(inputs.size(), 2);
1752
1753 auto outputs = GetOutputs(graph, layerIndex);
1754 CHECK_VALID_SIZE(outputs.size(), 1);
1755
1756 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1757 auto fbDescriptor = fbLayer->descriptor();
1758
1759 armnn::LogicalBinaryDescriptor descriptor;
1760 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1761
1762 const std::string& layerName = GetLayerName(graph, layerIndex);
1763 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1764
1765 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1766 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1767
1768 RegisterInputSlots(graph, layerIndex, layer);
1769 RegisterOutputSlots(graph, layerIndex, layer);
1770}
1771
Finn Williams85d36712021-01-26 22:30:06 +00001772void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001773{
1774 CHECK_LAYERS(graph, 0, layerIndex);
1775
Finn Williams85d36712021-01-26 22:30:06 +00001776 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001777 CHECK_VALID_SIZE(inputs.size(), 1);
1778
Finn Williams85d36712021-01-26 22:30:06 +00001779 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001780 CHECK_VALID_SIZE(outputs.size(), 1);
1781
1782 armnn::LogSoftmaxDescriptor descriptor;
1783 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1784 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1785 auto layerName = GetLayerName(graph, layerIndex);
1786
1787 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1788
1789 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1790 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1791
1792 RegisterInputSlots(graph, layerIndex, layer);
1793 RegisterOutputSlots(graph, layerIndex, layer);
1794}
1795
Finn Williams85d36712021-01-26 22:30:06 +00001796void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001797{
1798 CHECK_LAYERS(graph, 0, layerIndex);
1799 auto inputs = GetInputs(graph, layerIndex);
1800 CHECK_LOCATION();
1801 CHECK_VALID_SIZE(inputs.size(), 2);
1802
1803 auto outputs = GetOutputs(graph, layerIndex);
1804 CHECK_VALID_SIZE(outputs.size(), 1);
1805
1806 auto layerName = GetLayerName(graph, layerIndex);
1807 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1808
1809 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1810 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1811
1812 RegisterInputSlots(graph, layerIndex, layer);
1813 RegisterOutputSlots(graph, layerIndex, layer);
1814}
1815
Finn Williams85d36712021-01-26 22:30:06 +00001816void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001817{
1818 CHECK_LAYERS(graph, 0, layerIndex);
1819 auto inputs = GetInputs(graph, layerIndex);
1820 CHECK_LOCATION();
1821 CHECK_VALID_SIZE(inputs.size(), 2);
1822
1823 auto outputs = GetOutputs(graph, layerIndex);
1824 CHECK_VALID_SIZE(outputs.size(), 1);
1825
1826 auto layerName = GetLayerName(graph, layerIndex);
1827 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1828
1829 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1830 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1831
1832 RegisterInputSlots(graph, layerIndex, layer);
1833 RegisterOutputSlots(graph, layerIndex, layer);
1834}
1835
Jim Flynne242f2d2019-05-22 14:24:13 +01001836const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1837 unsigned int layerIndex)
1838{
1839 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1840
1841 switch (layerType)
1842 {
1843 case Layer::Layer_ConcatLayer:
1844 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1845 case Layer::Layer_MergerLayer:
1846 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1847 default:
1848 throw armnn::Exception("unknown layer type, should be concat or merger");
1849 }
1850}
Simon Obute51f67772021-09-03 15:50:13 +01001851void IDeserializer::DeserializerImpl::ParseChannelShuffle(GraphPtr graph, unsigned int layerIndex)
1852{
1853 CHECK_LAYERS(graph, 0, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001854
Simon Obute51f67772021-09-03 15:50:13 +01001855 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1856 CHECK_VALID_SIZE(inputs.size(), 1);
1857
1858 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1859 CHECK_VALID_SIZE(outputs.size(), 1);
1860
1861 armnn::ChannelShuffleDescriptor descriptor;
1862 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
1863 descriptor.m_NumGroups =
1864 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
1865
1866 auto layerName = GetLayerName(graph, layerIndex);
1867 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
1868
1869 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1870 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1871
1872 RegisterInputSlots(graph, layerIndex, layer);
1873 RegisterOutputSlots(graph, layerIndex, layer);
1874}
Finn Williams85d36712021-01-26 22:30:06 +00001875void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001876{
1877 CHECK_LAYERS(graph, 0, layerIndex);
1878 CHECK_LOCATION();
1879
1880 auto inputs = GetInputs(graph, layerIndex);
1881 CHECK_VALID_SIZE(inputs.size(), 2);
1882
1883 auto outputs = GetOutputs(graph, layerIndex);
1884 CHECK_VALID_SIZE(outputs.size(), 1);
1885
1886 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1887 auto fbDescriptor = fbLayer->descriptor();
1888
1889 armnn::ComparisonDescriptor descriptor;
1890 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1891
1892 const std::string& layerName = GetLayerName(graph, layerIndex);
1893 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1894
1895 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1896 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1897
1898 RegisterInputSlots(graph, layerIndex, layer);
1899 RegisterOutputSlots(graph, layerIndex, layer);
1900}
1901
Finn Williams85d36712021-01-26 22:30:06 +00001902void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06001903{
1904 CHECK_LAYERS(graph, 0, layerIndex);
1905 CHECK_LOCATION();
1906
1907 auto inputs = GetInputs(graph, layerIndex);
1908 CHECK_VALID_SIZE(inputs.size(), 1);
1909
1910 auto outputs = GetOutputs(graph, layerIndex);
1911 CHECK_VALID_SIZE(outputs.size(), 1);
1912
1913 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1914 auto fbDescriptor = fbLayer->descriptor();
1915
1916 armnn::ElementwiseUnaryDescriptor descriptor;
1917 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1918
1919 const std::string& layerName = GetLayerName(graph, layerIndex);
1920 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1921
1922 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1923 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1924
1925 RegisterInputSlots(graph, layerIndex, layer);
1926 RegisterOutputSlots(graph, layerIndex, layer);
1927}
1928
Finn Williams85d36712021-01-26 22:30:06 +00001929void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001930{
1931 CHECK_LAYERS(graph, 0, layerIndex);
1932 CHECK_LOCATION();
1933
1934 auto outputs = GetOutputs(graph, layerIndex);
1935 CHECK_VALID_SIZE(outputs.size(), 1);
1936
Jim Flynnac25a1b2019-02-28 10:40:49 +00001937 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001938 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1939 unsigned int numViews = originsDescriptor->numViews();
1940 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001941
1942 // can now check the number of inputs == number of views
1943 auto inputs = GetInputs(graph, layerIndex);
1944 CHECK_VALID_SIZE(inputs.size(), numViews);
1945
1946 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001947 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001948 for (unsigned int v = 0; v < numViews; ++v)
1949 {
1950 auto originPtr = originsPtr->Get(v);
1951 for (unsigned int d = 0; d < numDimensions; ++d)
1952 {
1953 uint32_t value = originPtr->data()->Get(d);
1954 descriptor.SetViewOriginCoord(v, d, value);
1955 }
1956 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001957 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001958
Jim Flynn906f9462019-05-10 13:55:21 +01001959 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001960 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1961 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1962
1963 RegisterInputSlots(graph, layerIndex, layer);
1964 RegisterOutputSlots(graph, layerIndex, layer);
1965}
1966
Finn Williams85d36712021-01-26 22:30:06 +00001967void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001968{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001969 CHECK_LAYERS(graph, 0, layerIndex);
1970 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001971 CHECK_LOCATION();
1972 CHECK_VALID_SIZE(inputs.size(), 2);
1973
Derek Lamberti8ddae332019-02-21 16:29:43 +00001974 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001975 CHECK_VALID_SIZE(outputs.size(), 1);
1976
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001977 auto layerName = GetLayerName(graph, layerIndex);
1978 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001979
1980 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1981 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1982
Derek Lamberti8ddae332019-02-21 16:29:43 +00001983 RegisterInputSlots(graph, layerIndex, layer);
1984 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001985}
1986
Finn Williams85d36712021-01-26 22:30:06 +00001987void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001988{
1989 CHECK_LAYERS(graph, 0, layerIndex);
1990 CHECK_LOCATION();
1991
1992 auto inputs = GetInputs(graph, layerIndex);
1993 CHECK_VALID_SIZE(inputs.size(), 1);
1994
1995 auto outputs = GetOutputs(graph, layerIndex);
1996 CHECK_VALID_SIZE(outputs.size(), 1);
1997
1998 auto layerName = GetLayerName(graph, layerIndex);
1999
2000 armnn::IConnectableLayer* layer;
2001
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00002002 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002003
2004 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2005 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2006
2007 RegisterInputSlots(graph, layerIndex, layer);
2008 RegisterOutputSlots(graph, layerIndex, layer);
2009}
2010
Finn Williams85d36712021-01-26 22:30:06 +00002011void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002012{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002013 CHECK_LAYERS(graph, 0, layerIndex);
2014 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002015 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002016
Derek Lamberti8ddae332019-02-21 16:29:43 +00002017 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002018 CHECK_VALID_SIZE(outputs.size(), 1);
2019
Derek Lamberti8ddae332019-02-21 16:29:43 +00002020 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002021 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002022 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2023
2024 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
2025 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2026 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002027 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01002028
2029 armnn::IConnectableLayer* layer;
2030 std::vector<unsigned int> ignoreSlots {};
2031
2032 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
2033 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
2034 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002035 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002036 // If the model stores weights and biases as members of the layer we have to read them from there
2037 // but add them to their own ConstantLayer for compatibility
2038 CHECK_VALID_SIZE(inputs.size(), 1);
2039 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2040 layerName.c_str());
2041
2042 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
2043 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2044 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2045 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
2046 ignoreSlots.emplace_back(1u);
2047
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002048 if (fullyConnectedDescriptor.m_BiasEnabled)
2049 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002050 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
2051 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2052 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2053 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
2054 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002055 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002056 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01002057 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002058 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002059 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2060 layerName.c_str());
2061 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
2062 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002063 }
2064
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002065 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2066 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2067
Matthew Sloyan81beae32021-07-13 19:46:11 +01002068 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00002069 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002070}
2071
Finn Williams85d36712021-01-26 22:30:06 +00002072void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002073{
2074 CHECK_LAYERS(graph, 0, layerIndex);
2075
Finn Williams85d36712021-01-26 22:30:06 +00002076 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002077 CHECK_VALID_SIZE(inputs.size(), 1);
2078
Finn Williams85d36712021-01-26 22:30:06 +00002079 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002080 CHECK_VALID_SIZE(outputs.size(), 1);
2081
2082 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2083 auto flatBufferPadList = flatBufferDescriptor->padList();
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002084 auto paddingMode = flatBufferDescriptor->paddingMode();
David Monahan34757812019-06-19 11:47:21 +01002085 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002086
2087 if (flatBufferPadList->Length() % 2 != 0)
2088 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002089 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2090 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002091 }
2092
2093 std::vector<std::pair<unsigned int, unsigned int>> padList;
2094 padList.reserve(flatBufferPadList->Length() / 2);
2095 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2096 {
2097 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2098 }
2099
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002100 armnn::PadDescriptor descriptor(padList, padValue, ToPaddingMode(paddingMode));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002101
2102 auto layerName = GetLayerName(graph, layerIndex);
2103 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2104
2105 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2106 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2107
2108 RegisterInputSlots(graph, layerIndex, layer);
2109 RegisterOutputSlots(graph, layerIndex, layer);
2110}
2111
Finn Williams85d36712021-01-26 22:30:06 +00002112void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002113{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002114 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002115
2116 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002117 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002118
Derek Lamberti8ddae332019-02-21 16:29:43 +00002119 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002120 CHECK_VALID_SIZE(inputs.size(), 1);
2121
Derek Lamberti8ddae332019-02-21 16:29:43 +00002122 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002123 CHECK_VALID_SIZE(outputs.size(), 1);
2124 auto outputInfo = ToTensorInfo(outputs[0]);
2125
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002126 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002127 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2128
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002129 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002130 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2131
Derek Lamberti8ddae332019-02-21 16:29:43 +00002132 RegisterInputSlots(graph, layerIndex, layer);
2133 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002134}
2135
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002136armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002137 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002138{
Jan Eilers8eb25602020-03-09 12:13:48 +00002139 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002140 armnn::Pooling2dDescriptor desc;
2141
2142 switch (pooling2dDesc->poolType())
2143 {
2144 case PoolingAlgorithm_Average:
2145 {
2146 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002147 break;
2148 }
2149 case PoolingAlgorithm_Max:
2150 {
2151 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002152 break;
2153 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002154 case PoolingAlgorithm_L2:
2155 {
2156 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2157 break;
2158 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002159 default:
2160 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002161 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002162 }
2163 }
2164
2165 switch (pooling2dDesc->outputShapeRounding())
2166 {
2167 case OutputShapeRounding_Floor:
2168 {
2169 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2170 break;
2171 }
2172 case OutputShapeRounding_Ceiling:
2173 {
2174 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2175 break;
2176 }
2177 default:
2178 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002179 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002180 }
2181 }
2182
2183 switch (pooling2dDesc->paddingMethod())
2184 {
2185 case PaddingMethod_Exclude:
2186 {
2187 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2188 break;
2189 }
2190 case PaddingMethod_IgnoreValue:
2191 {
2192 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2193 break;
2194 }
2195 default:
2196 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002197 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002198 }
2199 }
2200
2201 switch (pooling2dDesc->dataLayout())
2202 {
2203 case DataLayout_NCHW:
2204 {
2205 desc.m_DataLayout = armnn::DataLayout::NCHW;
2206 break;
2207 }
2208 case DataLayout_NHWC:
2209 {
2210 desc.m_DataLayout = armnn::DataLayout::NHWC;
2211 break;
2212 }
2213 default:
2214 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002215 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002216 }
2217 }
2218
2219 desc.m_PadRight = pooling2dDesc->padRight();
2220 desc.m_PadLeft = pooling2dDesc->padLeft();
2221 desc.m_PadBottom = pooling2dDesc->padBottom();
2222 desc.m_PadTop = pooling2dDesc->padTop();
2223 desc.m_StrideX = pooling2dDesc->strideX();
2224 desc.m_StrideY = pooling2dDesc->strideY();
2225 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2226 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2227
2228 return desc;
2229}
2230
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002231armnn::Pooling3dDescriptor IDeserializer::DeserializerImpl::GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDesc,
2232 unsigned int layerIndex)
2233{
2234 IgnoreUnused(layerIndex);
2235 armnn::Pooling3dDescriptor desc;
Finn Williams85d36712021-01-26 22:30:06 +00002236
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002237 switch (pooling3dDesc->poolType())
2238 {
2239 case PoolingAlgorithm_Average:
2240 {
2241 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2242 break;
2243 }
2244 case PoolingAlgorithm_Max:
2245 {
2246 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
2247 break;
2248 }
2249 case PoolingAlgorithm_L2:
2250 {
2251 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2252 break;
2253 }
2254 default:
2255 {
2256 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
2257 }
2258 }
2259
2260 switch (pooling3dDesc->outputShapeRounding())
2261 {
2262 case OutputShapeRounding_Floor:
2263 {
2264 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2265 break;
2266 }
2267 case OutputShapeRounding_Ceiling:
2268 {
2269 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2270 break;
2271 }
2272 default:
2273 {
2274 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
2275 }
2276 }
2277
2278 switch (pooling3dDesc->paddingMethod())
2279 {
2280 case PaddingMethod_Exclude:
2281 {
2282 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2283 break;
2284 }
2285 case PaddingMethod_IgnoreValue:
2286 {
2287 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2288 break;
2289 }
2290 default:
2291 {
2292 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
2293 }
2294 }
2295
2296 switch (pooling3dDesc->dataLayout())
2297 {
2298 case DataLayout_NCDHW:
2299 {
2300 desc.m_DataLayout = armnn::DataLayout::NCDHW;
2301 break;
2302 }
2303 case DataLayout_NDHWC:
2304 {
2305 desc.m_DataLayout = armnn::DataLayout::NDHWC;
2306 break;
2307 }
2308 default:
2309 {
2310 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
2311 }
2312 }
2313
2314 desc.m_PadRight = pooling3dDesc->padRight();
2315 desc.m_PadLeft = pooling3dDesc->padLeft();
2316 desc.m_PadBottom = pooling3dDesc->padBottom();
2317 desc.m_PadTop = pooling3dDesc->padTop();
2318 desc.m_PadFront = pooling3dDesc->padFront();
2319 desc.m_PadBack = pooling3dDesc->padBack();
2320 desc.m_StrideX = pooling3dDesc->strideX();
2321 desc.m_StrideY = pooling3dDesc->strideY();
2322 desc.m_StrideZ = pooling3dDesc->strideZ();
2323 desc.m_PoolWidth = pooling3dDesc->poolWidth();
2324 desc.m_PoolHeight = pooling3dDesc->poolHeight();
2325 desc.m_PoolDepth = pooling3dDesc->poolDepth();
2326
2327 return desc;
2328}
Finn Williams85d36712021-01-26 22:30:06 +00002329
2330void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002331{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002332 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002333
Derek Lamberti8ddae332019-02-21 16:29:43 +00002334 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002335 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002336 CHECK_VALID_SIZE(inputs.size(), 1);
2337
Derek Lamberti8ddae332019-02-21 16:29:43 +00002338 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002339 CHECK_VALID_SIZE(outputs.size(), 1);
2340 auto outputInfo = ToTensorInfo(outputs[0]);
2341
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002342 auto pooling2dDescriptor = GetPooling2dDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002343 auto layerName = GetLayerName(graph, layerIndex);
2344 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002345 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2346
Derek Lamberti8ddae332019-02-21 16:29:43 +00002347 RegisterInputSlots(graph, layerIndex, layer);
2348 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002349}
2350
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002351void IDeserializer::DeserializerImpl::ParsePooling3d(GraphPtr graph, unsigned int layerIndex)
2352{
2353 CHECK_LAYERS(graph, 0, layerIndex);
2354
2355 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2356 auto inputs = GetInputs(graph, layerIndex);
2357 CHECK_VALID_SIZE(inputs.size(), 1);
2358
2359 auto outputs = GetOutputs(graph, layerIndex);
2360 CHECK_VALID_SIZE(outputs.size(), 1);
2361 auto outputInfo = ToTensorInfo(outputs[0]);
2362
2363 auto pooling3dDescriptor = GetPooling3dDescriptor(pooling3dDes, layerIndex);
2364 auto layerName = GetLayerName(graph, layerIndex);
2365 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2366 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2367
2368 RegisterInputSlots(graph, layerIndex, layer);
2369 RegisterOutputSlots(graph, layerIndex, layer);
2370}
2371
Finn Williams85d36712021-01-26 22:30:06 +00002372void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002373{
2374 CHECK_LAYERS(graph, 0, layerIndex);
2375
2376 auto inputs = GetInputs(graph, layerIndex);
2377 CHECK_VALID_SIZE(inputs.size(), 1);
2378
2379 auto outputs = GetOutputs(graph, layerIndex);
2380 CHECK_VALID_SIZE(outputs.size(), 1);
2381 auto outputInfo = ToTensorInfo(outputs[0]);
2382
2383 auto layerName = GetLayerName(graph, layerIndex);
2384 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2385 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2386
2387 RegisterInputSlots(graph, layerIndex, layer);
2388 RegisterOutputSlots(graph, layerIndex, layer);
2389}
2390
Finn Williams85d36712021-01-26 22:30:06 +00002391armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002392 const std::vector<uint32_t>& targetDimsIn)
2393{
2394 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2395 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2396
2397 if (stretchDim != targetDimsIn.end())
2398 {
2399 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2400 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002401 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2402 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002403 }
2404
2405 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002406 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002407 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2408
2409 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2410 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2411 }
2412
2413 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2414
2415 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2416 reshapeInfo.SetShape(outputShape);
2417
2418 return reshapeInfo;
2419}
2420
Finn Williams85d36712021-01-26 22:30:06 +00002421void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002422{
2423 CHECK_LAYERS(graph, 0, layerIndex);
2424
Finn Williams85d36712021-01-26 22:30:06 +00002425 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002426 CHECK_VALID_SIZE(inputs.size(), 1);
2427
Finn Williams85d36712021-01-26 22:30:06 +00002428 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002429 CHECK_VALID_SIZE(outputs.size(), 1);
2430
2431 auto layerName = GetLayerName(graph, layerIndex);
2432 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2433
2434 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2435 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2436
2437 RegisterInputSlots(graph, layerIndex, layer);
2438 RegisterOutputSlots(graph, layerIndex, layer);
2439}
2440
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002441void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2442{
2443 CHECK_LAYERS(graph, 0, layerIndex);
2444 CHECK_LOCATION();
2445
2446 auto inputs = GetInputs(graph, layerIndex);
2447 CHECK_VALID_SIZE(inputs.size(), 1);
2448
2449 auto outputs = GetOutputs(graph, layerIndex);
2450 CHECK_VALID_SIZE(outputs.size(), 1);
2451
2452 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2453 auto fbDescriptor = fbLayer->descriptor();
2454 auto flatBufferAxis = fbDescriptor->axis();
2455
2456 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002457 descriptor.m_KeepDims = fbDescriptor->keepDims();
2458 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2459 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2460
2461 const std::string& layerName = GetLayerName(graph, layerIndex);
2462 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2463
2464 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2465 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2466
2467 RegisterInputSlots(graph, layerIndex, layer);
2468 RegisterOutputSlots(graph, layerIndex, layer);
2469}
2470
Finn Williams85d36712021-01-26 22:30:06 +00002471void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002472{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002473 CHECK_LAYERS(graph, 0, layerIndex);
2474 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002475
Derek Lamberti8ddae332019-02-21 16:29:43 +00002476 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002477 CHECK_VALID_SIZE(outputs.size(), 1);
2478
2479 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2480 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2481
Derek Lamberti8ddae332019-02-21 16:29:43 +00002482 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002483 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2484
Finn Williams85d36712021-01-26 22:30:06 +00002485 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002486 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2487
2488 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2489 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2490
2491 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2492 {
2493 std::stringstream ss;
2494 ss << "New shape defined in reshape parameters "
2495 << reshapeOutputTensorShape
2496 << " does not equal output shape "
2497 << actualOutputTensorInfo.GetShape()
2498 << ": "
2499 << CHECK_LOCATION().AsString();
2500 throw ParseException(ss.str());
2501 }
2502
2503 armnn::ReshapeDescriptor reshapeDesc;
2504 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2505
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002506 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002507 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2508 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2509
Derek Lamberti8ddae332019-02-21 16:29:43 +00002510 RegisterInputSlots(graph, layerIndex, layer);
2511 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002512}
2513
Finn Williams85d36712021-01-26 22:30:06 +00002514void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002515{
2516 CHECK_LAYERS(graph, 0, layerIndex);
2517
Finn Williams85d36712021-01-26 22:30:06 +00002518 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002519 CHECK_VALID_SIZE(inputs.size(), 1);
2520
Finn Williams85d36712021-01-26 22:30:06 +00002521 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002522 CHECK_VALID_SIZE(outputs.size(), 1);
2523
2524 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2525
2526 armnn::ResizeDescriptor descriptor;
2527 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2528 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2529 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2530 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002531 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2532 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002533
2534 auto layerName = GetLayerName(graph, layerIndex);
2535 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2536
2537 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2538 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2539
2540 RegisterInputSlots(graph, layerIndex, layer);
2541 RegisterOutputSlots(graph, layerIndex, layer);
2542}
2543
Jan Eilers1b2654f2021-09-24 15:45:46 +01002544
2545/// @Note The ResizeBiliniar operation was deprecated and removed in favor of the Resize operation.
2546/// This function is kept for backwards compatibility.
Finn Williams85d36712021-01-26 22:30:06 +00002547void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002548{
2549 CHECK_LAYERS(graph, 0, layerIndex);
2550
Finn Williams85d36712021-01-26 22:30:06 +00002551 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002552 CHECK_VALID_SIZE(inputs.size(), 1);
2553
Finn Williams85d36712021-01-26 22:30:06 +00002554 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002555 CHECK_VALID_SIZE(outputs.size(), 1);
2556
2557 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2558
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002559 armnn::ResizeDescriptor descriptor;
2560 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002561 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002562 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2563 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002564 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2565 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002566
2567 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002568 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002569
2570 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2571 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2572
2573 RegisterInputSlots(graph, layerIndex, layer);
2574 RegisterOutputSlots(graph, layerIndex, layer);
2575}
2576
Keith Davis3ae3f972021-05-21 16:33:48 +01002577void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2578{
2579 CHECK_LAYERS(graph, 0, layerIndex);
2580
2581 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2582 CHECK_VALID_SIZE(inputs.size(), 1);
2583
2584 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2585 CHECK_VALID_SIZE(outputs.size(), 1);
2586
2587 auto layerName = GetLayerName(graph, layerIndex);
2588 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2589
2590 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2591 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2592
2593 RegisterInputSlots(graph, layerIndex, layer);
2594 RegisterOutputSlots(graph, layerIndex, layer);
2595}
2596
Finn Williams85d36712021-01-26 22:30:06 +00002597void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002598{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002599 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002600
Finn Williams85d36712021-01-26 22:30:06 +00002601 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002602 CHECK_VALID_SIZE(inputs.size(), 1);
2603
Finn Williams85d36712021-01-26 22:30:06 +00002604 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002605 CHECK_VALID_SIZE(outputs.size(), 1);
2606
2607 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002608 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Sadik Armaganfd0cae32021-11-08 17:18:31 +00002609 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002610 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002611
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002612 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2613
2614 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2615 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2616
Derek Lamberti8ddae332019-02-21 16:29:43 +00002617 RegisterInputSlots(graph, layerIndex, layer);
2618 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002619}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002620
Finn Williams85d36712021-01-26 22:30:06 +00002621void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002622{
2623 CHECK_LAYERS(graph, 0, layerIndex);
2624
Finn Williams85d36712021-01-26 22:30:06 +00002625 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002626 CHECK_VALID_SIZE(inputs.size(), 1);
2627
Finn Williams85d36712021-01-26 22:30:06 +00002628 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002629 CHECK_VALID_SIZE(outputs.size(), 1);
2630
2631 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2632 auto flatBufferPadList = flatBufferDescriptor->padList();
2633 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2634
2635 if (flatBufferPadList->Length() % 2 != 0)
2636 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002637 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2638 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002639 }
2640
2641 std::vector<std::pair<unsigned int, unsigned int>> padList;
2642 padList.reserve(flatBufferPadList->Length() / 2);
2643 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2644 {
2645 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2646 }
2647
2648 armnn::SpaceToBatchNdDescriptor descriptor;
2649 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2650 descriptor.m_BlockShape =
2651 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2652 descriptor.m_PadList = padList;
2653
2654 auto layerName = GetLayerName(graph, layerIndex);
2655 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2656
2657 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2658 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2659
2660 RegisterInputSlots(graph, layerIndex, layer);
2661 RegisterOutputSlots(graph, layerIndex, layer);
2662}
2663
Finn Williams85d36712021-01-26 22:30:06 +00002664void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002665{
2666 CHECK_LAYERS(graph, 0, layerIndex);
2667
Finn Williams85d36712021-01-26 22:30:06 +00002668 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002669 CHECK_VALID_SIZE(inputs.size(), 1);
2670
Finn Williams85d36712021-01-26 22:30:06 +00002671 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002672 CHECK_VALID_SIZE(outputs.size(), 1);
2673
2674 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2675
2676 armnn::SpaceToDepthDescriptor descriptor;
2677 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2678 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2679
2680 auto layerName = GetLayerName(graph, layerIndex);
2681 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2682
2683 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2684 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2685
2686 RegisterInputSlots(graph, layerIndex, layer);
2687 RegisterOutputSlots(graph, layerIndex, layer);
2688}
2689
Finn Williams85d36712021-01-26 22:30:06 +00002690armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2691 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002692 unsigned int layerIndex)
2693{
Jan Eilers8eb25602020-03-09 12:13:48 +00002694 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002695 armnn::NormalizationDescriptor desc;
2696
2697 switch (normalizationDescriptor->normChannelType())
2698 {
2699 case NormalizationAlgorithmChannel_Across:
2700 {
2701 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2702 break;
2703 }
2704 case NormalizationAlgorithmChannel_Within:
2705 {
2706 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2707 break;
2708 }
2709 default:
2710 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002711 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002712 }
2713 }
2714
2715 switch (normalizationDescriptor->normMethodType())
2716 {
2717 case NormalizationAlgorithmMethod_LocalBrightness:
2718 {
2719 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2720 break;
2721 }
2722 case NormalizationAlgorithmMethod_LocalContrast:
2723 {
2724 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2725 break;
2726 }
2727 default:
2728 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002729 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002730 }
2731 }
2732
2733 switch (normalizationDescriptor->dataLayout())
2734 {
2735 case DataLayout_NCHW:
2736 {
2737 desc.m_DataLayout = armnn::DataLayout::NCHW;
2738 break;
2739 }
2740 case DataLayout_NHWC:
2741 {
2742 desc.m_DataLayout = armnn::DataLayout::NHWC;
2743 break;
2744 }
2745 default:
2746 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002747 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002748 }
2749 }
2750
2751 desc.m_Alpha = normalizationDescriptor->alpha();
2752 desc.m_Beta = normalizationDescriptor->beta();
2753 desc.m_K = normalizationDescriptor->k();
2754 desc.m_NormSize = normalizationDescriptor->normSize();
2755
2756 return desc;
2757}
2758
Finn Williams85d36712021-01-26 22:30:06 +00002759void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002760{
2761 CHECK_LAYERS(graph, 0, layerIndex);
2762
2763 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2764
Finn Williams85d36712021-01-26 22:30:06 +00002765 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002766 CHECK_VALID_SIZE(inputs.size(), 1);
2767
Finn Williams85d36712021-01-26 22:30:06 +00002768 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002769 CHECK_VALID_SIZE(outputs.size(), 1);
2770
2771 auto outputInfo = ToTensorInfo(outputs[0]);
2772
2773 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2774 auto layerName = GetLayerName(graph, layerIndex);
2775
2776 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2777 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2778
2779 RegisterInputSlots(graph, layerIndex, layer);
2780 RegisterOutputSlots(graph, layerIndex, layer);
2781}
2782
Finn Williams85d36712021-01-26 22:30:06 +00002783void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002784{
2785 CHECK_LAYERS(graph, 0, layerIndex);
2786 auto inputs = GetInputs(graph, layerIndex);
2787 CHECK_LOCATION();
2788 CHECK_VALID_SIZE(inputs.size(), 1);
2789
2790 auto outputs = GetOutputs(graph, layerIndex);
2791 CHECK_VALID_SIZE(outputs.size(), 1);
2792
2793 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002794
josh minor4a3c6102020-01-06 16:40:46 -06002795 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2796 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002797 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2798 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2799
2800 RegisterInputSlots(graph, layerIndex, layer);
2801 RegisterOutputSlots(graph, layerIndex, layer);
2802}
2803
Finn Williams85d36712021-01-26 22:30:06 +00002804void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002805{
2806 CHECK_LAYERS(graph, 0, layerIndex);
2807
2808 auto inputs = GetInputs(graph, layerIndex);
2809 CHECK_VALID_SIZE(inputs.size(), 1);
2810
2811 auto outputs = GetOutputs(graph, layerIndex);
2812 CHECK_VALID_SIZE(outputs.size(), 1);
2813
2814 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2815
2816 auto fbBegin = fbDescriptor->begin();
2817 auto fbSize = fbDescriptor->size();
2818
2819 if (fbBegin->Length() != fbSize->Length())
2820 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002821 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2822 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002823 }
2824
2825 armnn::SliceDescriptor descriptor;
2826 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2827 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2828
2829 auto layerName = GetLayerName(graph, layerIndex);
2830 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2831
2832 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2833 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2834
2835 RegisterInputSlots(graph, layerIndex, layer);
2836 RegisterOutputSlots(graph, layerIndex, layer);
2837}
2838
Finn Williams85d36712021-01-26 22:30:06 +00002839void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002840{
2841 CHECK_LAYERS(graph, 0, layerIndex);
2842
Finn Williams85d36712021-01-26 22:30:06 +00002843 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002844 CHECK_VALID_SIZE(inputs.size(), 1);
2845
Finn Williams85d36712021-01-26 22:30:06 +00002846 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002847 CHECK_VALID_SIZE(outputs.size(), 1);
2848
2849 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2850
2851 auto flatBufferBegin = flatBufferDescriptor->begin();
2852 auto flatBufferEnd = flatBufferDescriptor->end();
2853 auto flatBufferStride = flatBufferDescriptor->stride();
2854
2855 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2856 flatBufferBegin->Length() == flatBufferStride->Length()))
2857 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002858 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2859 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002860 }
2861
2862 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2863 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2864 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2865
2866 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2867 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2868 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2869 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2870 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2871 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2872 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2873
2874 auto layerName = GetLayerName(graph, layerIndex);
2875 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2876
2877 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2878 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2879
2880 RegisterInputSlots(graph, layerIndex, layer);
2881 RegisterOutputSlots(graph, layerIndex, layer);
2882}
2883
Finn Williams85d36712021-01-26 22:30:06 +00002884void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002885{
2886 CHECK_LAYERS(graph, 0, layerIndex);
2887 auto inputs = GetInputs(graph, layerIndex);
2888 CHECK_LOCATION();
2889 CHECK_VALID_SIZE(inputs.size(), 2);
2890
2891 auto outputs = GetOutputs(graph, layerIndex);
2892 CHECK_VALID_SIZE(outputs.size(), 1);
2893
2894 auto layerName = GetLayerName(graph, layerIndex);
2895 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2896
2897 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2898 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2899
2900 RegisterInputSlots(graph, layerIndex, layer);
2901 RegisterOutputSlots(graph, layerIndex, layer);
2902}
2903
Finn Williams85d36712021-01-26 22:30:06 +00002904void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002905{
2906 CHECK_LAYERS(graph, 0, layerIndex);
2907
Finn Williams85d36712021-01-26 22:30:06 +00002908 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002909 CHECK_VALID_SIZE(inputs.size(), 2);
2910
Finn Williams85d36712021-01-26 22:30:06 +00002911 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002912 CHECK_VALID_SIZE(outputs.size(), 1);
2913
Teresa Charlin52664732020-06-29 16:27:03 +01002914 armnn::GatherDescriptor descriptor;
2915 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2916
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002917 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002918 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002919
2920 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002921 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2922
2923 RegisterInputSlots(graph, layerIndex, layer);
2924 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002925}
2926
Finn Williams85d36712021-01-26 22:30:06 +00002927void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002928{
2929 CHECK_LAYERS(graph, 0, layerIndex);
2930
Finn Williams85d36712021-01-26 22:30:06 +00002931 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002932 CHECK_VALID_SIZE(inputs.size(), 1);
2933
Finn Williams85d36712021-01-26 22:30:06 +00002934 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002935 CHECK_VALID_SIZE(outputs.size(), 1);
2936
2937 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2938 auto flatBufferAxis = flatBufferDescriptor->axis();
2939 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2940
2941 armnn::MeanDescriptor descriptor;
2942 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2943 descriptor.m_KeepDims = flatBufferKeepDims;
2944
2945 auto layerName = GetLayerName(graph, layerIndex);
2946 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2947
2948 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2949 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2950
2951 RegisterInputSlots(graph, layerIndex, layer);
2952 RegisterOutputSlots(graph, layerIndex, layer);
2953}
2954
Finn Williams85d36712021-01-26 22:30:06 +00002955void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00002956{
2957 CHECK_LAYERS(graph, 0, layerIndex);
2958
Finn Williams85d36712021-01-26 22:30:06 +00002959 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002960 CHECK_VALID_SIZE(inputs.size(), 1);
2961
Finn Williams85d36712021-01-26 22:30:06 +00002962 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002963
2964 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2965 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2966 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2967 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2968 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2969 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2970
2971 // Check numViews and numDimensions corresponds to the ones already serialized ...
2972 // numViews == flatBufferViewSizes.size();
2973 // foreach: numDimensions == flatBufferViewSizes[x].size();
2974
2975 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2976 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2977 {
2978 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2979 {
2980 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2981 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2982 }
2983 }
2984
2985 auto layerName = GetLayerName(graph, layerIndex);
2986 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2987
2988 // I could have as many outputs as views ...
2989 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2990 {
2991 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2992 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2993 }
2994
2995 RegisterInputSlots(graph, layerIndex, layer);
2996 RegisterOutputSlots(graph, layerIndex, layer);
2997}
2998
Finn Williams85d36712021-01-26 22:30:06 +00002999armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00003000{
3001 armnn::LstmDescriptor desc;
3002
3003 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
3004 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
3005 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
3006 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
3007 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
3008 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01003009 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00003010
3011 return desc;
3012}
3013
Finn Williams85d36712021-01-26 22:30:06 +00003014void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00003015{
3016 CHECK_LAYERS(graph, 0, layerIndex);
3017
3018 auto inputs = GetInputs(graph, layerIndex);
3019 CHECK_VALID_SIZE(inputs.size(), 3);
3020
3021 auto outputs = GetOutputs(graph, layerIndex);
3022 CHECK_VALID_SIZE(outputs.size(), 4);
3023
3024 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3025 auto layerName = GetLayerName(graph, layerIndex);
3026 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3027 auto flatBufferInputParams = flatBufferLayer->inputParams();
3028
3029 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
3030
3031 armnn::LstmInputParams lstmInputParams;
3032
3033 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3034 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3035 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3036 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3037 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3038 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3039 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3040 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3041 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3042
3043 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3044 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3045 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3046 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3047 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3048 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3049 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3050 lstmInputParams.m_CellBias = &cellBias;
3051 lstmInputParams.m_OutputGateBias = &outputGateBias;
3052
3053 armnn::ConstTensor inputToInputWeights;
3054 armnn::ConstTensor recurrentToInputWeights;
3055 armnn::ConstTensor cellToInputWeights;
3056 armnn::ConstTensor inputGateBias;
3057 if (!lstmDescriptor.m_CifgEnabled)
3058 {
3059 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3060 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3061 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3062 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3063
3064 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3065 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3066 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3067 lstmInputParams.m_InputGateBias = &inputGateBias;
3068 }
3069
3070 armnn::ConstTensor projectionWeights;
3071 armnn::ConstTensor projectionBias;
3072 if (lstmDescriptor.m_ProjectionEnabled)
3073 {
3074 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3075 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3076
3077 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3078 lstmInputParams.m_ProjectionBias = &projectionBias;
3079 }
3080
3081 armnn::ConstTensor cellToForgetWeights;
3082 armnn::ConstTensor cellToOutputWeights;
3083 if (lstmDescriptor.m_PeepholeEnabled)
3084 {
3085 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3086 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3087
3088 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3089 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3090 }
3091
Jan Eilersf8c62972019-07-17 11:07:49 +01003092 armnn::ConstTensor inputLayerNormWeights;
3093 armnn::ConstTensor forgetLayerNormWeights;
3094 armnn::ConstTensor cellLayerNormWeights;
3095 armnn::ConstTensor outputLayerNormWeights;
3096 if (lstmDescriptor.m_LayerNormEnabled)
3097 {
3098 if (!lstmDescriptor.m_CifgEnabled)
3099 {
3100 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3101 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3102 }
3103 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3104 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3105 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3106
3107 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3108 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3109 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3110 }
3111
Jim Flynn11af3752019-03-19 17:22:29 +00003112 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3113
3114 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3115 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3116
3117 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3118 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3119
3120 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
3121 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
3122
3123 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
3124 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
3125
3126 RegisterInputSlots(graph, layerIndex, layer);
3127 RegisterOutputSlots(graph, layerIndex, layer);
3128}
3129
Finn Williams85d36712021-01-26 22:30:06 +00003130armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01003131{
3132 armnn::QLstmDescriptor desc;
3133
3134 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
3135 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
3136 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
3137 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
3138
3139 desc.m_CellClip = qLstmDescriptor->cellClip();
3140 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
3141
3142 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
3143 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
3144 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
3145 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
3146
3147 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
3148 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
3149
3150 return desc;
3151}
3152
Finn Williams85d36712021-01-26 22:30:06 +00003153void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01003154{
3155 CHECK_LAYERS(graph, 0, layerIndex);
3156
3157 auto inputs = GetInputs(graph, layerIndex);
3158 CHECK_VALID_SIZE(inputs.size(), 3);
3159
3160 auto outputs = GetOutputs(graph, layerIndex);
3161 CHECK_VALID_SIZE(outputs.size(), 3);
3162
3163 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3164 auto layerName = GetLayerName(graph, layerIndex);
3165 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3166 auto flatBufferInputParams = flatBufferLayer->inputParams();
3167
3168 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
3169 armnn::LstmInputParams qLstmInputParams;
3170
3171 // Mandatory params
3172 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3173 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3174 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3175 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3176 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3177 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3178 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3179 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3180 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3181
3182 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3183 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3184 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3185 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3186 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3187 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3188 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
3189 qLstmInputParams.m_CellBias = &cellBias;
3190 qLstmInputParams.m_OutputGateBias = &outputGateBias;
3191
3192 // Optional CIFG params
3193 armnn::ConstTensor inputToInputWeights;
3194 armnn::ConstTensor recurrentToInputWeights;
3195 armnn::ConstTensor inputGateBias;
3196
3197 if (!qLstmDescriptor.m_CifgEnabled)
3198 {
3199 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3200 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3201 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3202
3203 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3204 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3205 qLstmInputParams.m_InputGateBias = &inputGateBias;
3206 }
3207
3208 // Optional projection params
3209 armnn::ConstTensor projectionWeights;
3210 armnn::ConstTensor projectionBias;
3211
3212 if (qLstmDescriptor.m_ProjectionEnabled)
3213 {
3214 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3215 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3216
3217 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3218 qLstmInputParams.m_ProjectionBias = &projectionBias;
3219 }
3220
3221 // Optional peephole params
3222 armnn::ConstTensor cellToInputWeights;
3223 armnn::ConstTensor cellToForgetWeights;
3224 armnn::ConstTensor cellToOutputWeights;
3225
3226 if (qLstmDescriptor.m_PeepholeEnabled)
3227 {
3228 if (!qLstmDescriptor.m_CifgEnabled)
3229 {
3230 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3231 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3232 }
3233
3234 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3235 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3236
3237 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3238 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3239 }
3240
3241 // Optional layer norm params
3242 armnn::ConstTensor inputLayerNormWeights;
3243 armnn::ConstTensor forgetLayerNormWeights;
3244 armnn::ConstTensor cellLayerNormWeights;
3245 armnn::ConstTensor outputLayerNormWeights;
3246
3247 if (qLstmDescriptor.m_LayerNormEnabled)
3248 {
3249 if (!qLstmDescriptor.m_CifgEnabled)
3250 {
3251 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3252 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3253 }
3254
3255 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3256 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3257 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3258
3259 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3260 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3261 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3262 }
3263
3264 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3265
3266 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3267 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3268
3269 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3270 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3271
3272 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3273 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3274
3275 RegisterInputSlots(graph, layerIndex, layer);
3276 RegisterOutputSlots(graph, layerIndex, layer);
3277}
3278
Finn Williams85d36712021-01-26 22:30:06 +00003279void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003280{
3281 CHECK_LAYERS(graph, 0, layerIndex);
3282
3283 auto inputs = GetInputs(graph, layerIndex);
3284 CHECK_VALID_SIZE(inputs.size(), 3);
3285
3286 auto outputs = GetOutputs(graph, layerIndex);
3287 CHECK_VALID_SIZE(outputs.size(), 2);
3288
3289 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3290 auto layerName = GetLayerName(graph, layerIndex);
3291 auto flatBufferInputParams = flatBufferLayer->inputParams();
3292
3293 armnn::QuantizedLstmInputParams lstmInputParams;
3294
3295 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3296 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3297 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3298 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3299 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3300 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3301 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3302 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3303 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3304 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3305 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3306 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3307
3308 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3309 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3310 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3311 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3312 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3313 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3314 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3315 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3316 lstmInputParams.m_InputGateBias = &inputGateBias;
3317 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3318 lstmInputParams.m_CellBias = &cellBias;
3319 lstmInputParams.m_OutputGateBias = &outputGateBias;
3320
3321 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3322
3323 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3324 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3325
3326 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3327 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3328
3329 RegisterInputSlots(graph, layerIndex, layer);
3330 RegisterOutputSlots(graph, layerIndex, layer);
3331}
3332
Finn Williams85d36712021-01-26 22:30:06 +00003333void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003334{
3335 CHECK_LAYERS(graph, 0, layerIndex);
3336
Finn Williams85d36712021-01-26 22:30:06 +00003337 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003338 CHECK_VALID_SIZE(inputs.size(), 1);
3339
Finn Williams85d36712021-01-26 22:30:06 +00003340 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003341 CHECK_VALID_SIZE(outputs.size(), 1);
3342
3343 const std::string layerName = GetLayerName(graph, layerIndex);
3344 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3345
3346 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3347 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3348
3349 RegisterInputSlots(graph, layerIndex, layer);
3350 RegisterOutputSlots(graph, layerIndex, layer);
3351}
3352
Finn Williams85d36712021-01-26 22:30:06 +00003353void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003354{
3355 CHECK_LAYERS(graph, 0, layerIndex);
3356
Finn Williams85d36712021-01-26 22:30:06 +00003357 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003358 CHECK_VALID_SIZE(inputs.size(), 2);
3359
Finn Williams85d36712021-01-26 22:30:06 +00003360 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003361 CHECK_VALID_SIZE(outputs.size(), 1);
3362
3363 const std::string layerName = GetLayerName(graph, layerIndex);
3364 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3365
3366 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3367 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3368
3369 RegisterInputSlots(graph, layerIndex, layer);
3370 RegisterOutputSlots(graph, layerIndex, layer);
3371}
3372
Finn Williams85d36712021-01-26 22:30:06 +00003373void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003374{
3375 CHECK_LAYERS(graph, 0, layerIndex);
3376 auto inputs = GetInputs(graph, layerIndex);
3377 CHECK_LOCATION();
3378 CHECK_VALID_SIZE(inputs.size(), 2);
3379
3380 auto outputs = GetOutputs(graph, layerIndex);
3381 CHECK_VALID_SIZE(outputs.size(), 2);
3382
3383 auto layerName = GetLayerName(graph, layerIndex);
3384 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3385
3386 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3387 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3388
3389 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3390 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3391
3392 RegisterInputSlots(graph, layerIndex, layer);
3393 RegisterOutputSlots(graph, layerIndex, layer);
3394}
3395
Finn Williams85d36712021-01-26 22:30:06 +00003396void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003397{
3398 CHECK_LAYERS(graph, 0, layerIndex);
3399 auto inputs = GetInputs(graph, layerIndex);
3400 CHECK_LOCATION();
3401 CHECK_VALID_SIZE(inputs.size(), 2);
3402
3403 auto outputs = GetOutputs(graph, layerIndex);
3404 CHECK_VALID_SIZE(outputs.size(), 1);
3405
3406 auto layerName = GetLayerName(graph, layerIndex);
3407 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3408
3409 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3410 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3411
3412 RegisterInputSlots(graph, layerIndex, layer);
3413 RegisterOutputSlots(graph, layerIndex, layer);
3414}
3415
Finn Williams85d36712021-01-26 22:30:06 +00003416void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003417{
3418 CHECK_LAYERS(graph, 0, layerIndex);
3419
3420 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3421
3422 auto inputs = GetInputs(graph, layerIndex);
3423 CHECK_VALID_SIZE(inputs.size(), 1);
3424
3425 auto outputs = GetOutputs(graph, layerIndex);
3426 CHECK_VALID_SIZE(outputs.size(), 1);
3427 auto outputInfo = ToTensorInfo(outputs[0]);
3428
3429 auto layerName = GetLayerName(graph, layerIndex);
3430 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
3431
3432 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3433 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3434
3435 RegisterInputSlots(graph, layerIndex, layer);
3436 RegisterOutputSlots(graph, layerIndex, layer);
3437}
3438
Finn Williams85d36712021-01-26 22:30:06 +00003439void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003440{
3441 CHECK_LAYERS(graph, 0, layerIndex);
3442
3443 auto inputs = GetInputs(graph, layerIndex);
3444 CHECK_VALID_SIZE(inputs.size(), 1);
3445
3446 auto outputs = GetOutputs(graph, layerIndex);
3447 CHECK_VALID_SIZE(outputs.size(), 1);
3448
3449 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3450 auto layerName = GetLayerName(graph, layerIndex);
3451 auto serializerDescriptor = serializerLayer->descriptor();
3452
3453 armnn::TransposeConvolution2dDescriptor descriptor;
3454 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3455 descriptor.m_PadRight = serializerDescriptor->padRight();
3456 descriptor.m_PadTop = serializerDescriptor->padTop();
3457 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3458 descriptor.m_StrideX = serializerDescriptor->strideX();
3459 descriptor.m_StrideY = serializerDescriptor->strideY();;
3460 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3461 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3462
3463 // weights & biases
3464 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3465 armnn::Optional<armnn::ConstTensor> optionalBiases;
3466 if (descriptor.m_BiasEnabled)
3467 {
3468 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3469 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3470 }
3471
3472 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3473 weights,
3474 optionalBiases,
3475 layerName.c_str());
3476
3477 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3478 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3479
3480 RegisterInputSlots(graph, layerIndex, layer);
3481 RegisterOutputSlots(graph, layerIndex, layer);
3482}
3483
Finn Williams85d36712021-01-26 22:30:06 +00003484void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003485{
3486 CHECK_LAYERS(graph, 0, layerIndex);
3487 auto inputs = GetInputs(graph, layerIndex);
3488
3489 auto outputs = GetOutputs(graph, layerIndex);
3490 CHECK_VALID_SIZE(outputs.size(), 1);
3491
3492 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3493 unsigned int axis = flatBufferDescriptor->axis();
3494 unsigned int numInputs = flatBufferDescriptor->numInputs();
3495 CHECK_VALID_SIZE(inputs.size(), numInputs);
3496
3497 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3498 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3499 flatBufferInputShape->begin() + flatBufferInputShape->size());
3500
3501 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3502 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3503
3504 for (unsigned int i=0; i<inputs.size(); ++i)
3505 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003506 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003507 if (descriptor.m_InputShape != inputShape)
3508 {
3509 std::stringstream ss;
3510 ss << "Shape of input "
3511 << i
3512 << " "
3513 << inputShape
3514 << " does not equal defined input shape "
3515 << descriptor.m_InputShape
3516 << ": "
3517 << CHECK_LOCATION().AsString();
3518 throw ParseException(ss.str());
3519 }
3520 }
3521
3522 auto layerName = GetLayerName(graph, layerIndex);
3523 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3524
3525 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3526 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3527
3528 RegisterInputSlots(graph, layerIndex, layer);
3529 RegisterOutputSlots(graph, layerIndex, layer);
3530}
3531
Finn Williams85d36712021-01-26 22:30:06 +00003532void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003533{
3534 CHECK_LAYERS(graph, 0, layerIndex);
3535
3536 auto inputs = GetInputs(graph, layerIndex);
3537 auto outputs = GetOutputs(graph, layerIndex);
3538
3539 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3540 auto fbDescriptor = fbLayer->descriptor();
3541
3542 armnn::StandInDescriptor descriptor;
3543 descriptor.m_NumInputs = fbDescriptor->numInputs();
3544 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3545
3546 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3547 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3548
3549 const std::string layerName = GetLayerName(graph, layerIndex);
3550 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3551
3552 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3553 {
3554 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3555 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3556 }
3557
3558 RegisterInputSlots(graph, layerIndex, layer);
3559 RegisterOutputSlots(graph, layerIndex, layer);
3560}
3561
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003562armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3563 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3564{
3565 armnn::UnidirectionalSequenceLstmDescriptor desc;
3566
3567 desc.m_ActivationFunc = descriptor->activationFunc();
3568 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3569 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3570 desc.m_CifgEnabled = descriptor->cifgEnabled();
3571 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3572 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3573 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3574 desc.m_TimeMajor = descriptor->timeMajor();
3575
3576 return desc;
3577}
3578
3579void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3580{
3581 CHECK_LAYERS(graph, 0, layerIndex);
3582
3583 auto inputs = GetInputs(graph, layerIndex);
3584 CHECK_VALID_SIZE(inputs.size(), 3);
3585
3586 auto outputs = GetOutputs(graph, layerIndex);
3587 CHECK_VALID_SIZE(outputs.size(), 1);
3588
3589 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3590 auto layerName = GetLayerName(graph, layerIndex);
3591 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3592 auto flatBufferInputParams = flatBufferLayer->inputParams();
3593
3594 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3595
3596 armnn::LstmInputParams lstmInputParams;
3597
3598 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3599 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3600 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3601 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3602 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3603 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3604 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3605 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3606 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3607
3608 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3609 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3610 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3611 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3612 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3613 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3614 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3615 lstmInputParams.m_CellBias = &cellBias;
3616 lstmInputParams.m_OutputGateBias = &outputGateBias;
3617
3618 armnn::ConstTensor inputToInputWeights;
3619 armnn::ConstTensor recurrentToInputWeights;
3620 armnn::ConstTensor cellToInputWeights;
3621 armnn::ConstTensor inputGateBias;
3622 if (!descriptor.m_CifgEnabled)
3623 {
3624 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3625 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3626 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3627
3628 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3629 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3630 lstmInputParams.m_InputGateBias = &inputGateBias;
3631
3632 if (descriptor.m_PeepholeEnabled)
3633 {
3634 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3635 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3636 }
3637 }
3638
3639 armnn::ConstTensor projectionWeights;
3640 armnn::ConstTensor projectionBias;
3641 if (descriptor.m_ProjectionEnabled)
3642 {
3643 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3644 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3645
3646 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3647 lstmInputParams.m_ProjectionBias = &projectionBias;
3648 }
3649
3650 armnn::ConstTensor cellToForgetWeights;
3651 armnn::ConstTensor cellToOutputWeights;
3652 if (descriptor.m_PeepholeEnabled)
3653 {
3654 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3655 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3656
3657 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3658 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3659 }
3660
3661 armnn::ConstTensor inputLayerNormWeights;
3662 armnn::ConstTensor forgetLayerNormWeights;
3663 armnn::ConstTensor cellLayerNormWeights;
3664 armnn::ConstTensor outputLayerNormWeights;
3665 if (descriptor.m_LayerNormEnabled)
3666 {
3667 if (!descriptor.m_CifgEnabled)
3668 {
3669 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3670 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3671 }
3672 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3673 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3674 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3675
3676 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3677 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3678 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3679 }
3680
3681 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3682 lstmInputParams,
3683 layerName.c_str());
3684
3685 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3686 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3687
3688 RegisterInputSlots(graph, layerIndex, layer);
3689 RegisterOutputSlots(graph, layerIndex, layer);
3690}
3691
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003692} // namespace armnnDeserializer