blob: b5bf9daef0d700760703e03608b83582f3133a64 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Finn Williams85d36712021-01-26 22:30:06 +000037IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
38
39IDeserializer::~IDeserializer() = default;
40
41IDeserializer *IDeserializer::CreateRaw()
42{
43 return new IDeserializer();
44}
45
46IDeserializerPtr IDeserializer::Create()
47{
48 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
49}
50
51void IDeserializer::Destroy(IDeserializer *parser)
52{
53 delete parser;
54}
55
56armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
57{
58 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
59}
60
61armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
62{
63 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
64}
65
66BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
67{
68 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
69}
70
71BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
72{
73 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
74}
75
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000076namespace
77{
Kevin May43a799c2019-02-08 16:31:42 +000078
79const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
80
Finn Williams85d36712021-01-26 22:30:06 +000081 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000082 unsigned int layersIndex,
83 const CheckLocation& location)
84{
85 if (graph->layers() == nullptr)
86 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010087 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
88 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
89 "layers:{1} at {2}",
90 location.m_Function,
91 layersIndex,
92 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000093 }
94 else if (layersIndex >= graph->layers()->size())
95 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010096 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
97 location.m_Function,
98 layersIndex,
99 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000100 }
101}
102
Finn Williams85d36712021-01-26 22:30:06 +0000103void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000104 unsigned int layersIndex,
105 unsigned int layerIndex,
106 const CheckLocation& location)
107{
108 if (graph->layers() == nullptr)
109 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100110 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
111 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
112 "layers:{1} at {2}",
113 location.m_Function,
114 layersIndex,
115 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000116 }
117 else if (layersIndex >= graph->layers()->size())
118 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100119 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
120 "layers:{1} at {2}",
121 location.m_Function,
122 layersIndex,
123 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000124 }
125 else if (layerIndex >= graph->layers()[layersIndex].size()
126 && layerIndex != VIRTUAL_LAYER_ID)
127 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100128 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
129 "layers:{1} layer:{2} at {3}",
130 location.m_Function,
131 layersIndex,
132 layerIndex,
133 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000134 }
135}
136
Finn Williams85d36712021-01-26 22:30:06 +0000137void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000138 const CheckLocation& location)
139{
140 if (rawPtr == nullptr)
141 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100142 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
143 location.m_Function,
144 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000145 }
146}
147
Finn Williams85d36712021-01-26 22:30:06 +0000148void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000149 const CheckLocation& location)
150{
151 if (rawPtr == nullptr)
152 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100153 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
154 location.m_Function,
155 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000156 }
157}
158
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000159void CheckConstTensorSize(const unsigned int constTensorSize,
160 const unsigned int tensorSize,
161 const CheckLocation& location)
162{
163 if (constTensorSize != tensorSize)
164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100165 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
166 location.m_Function,
167 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000168 }
169}
170
Kevin May43a799c2019-02-08 16:31:42 +0000171#define CHECK_TENSOR_PTR(TENSOR_PTR) \
172 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
173
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000174#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
175 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
176
Mike Kellya0766c32019-02-19 17:22:07 +0000177#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
178 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
179
Kevin May43a799c2019-02-08 16:31:42 +0000180#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
181 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
182
183#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
184 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
185}
186
Saoirse Stewart263829c2019-02-19 15:54:14 +0000187bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
188{
189 const unsigned int actualSize = actual.GetNumDimensions();
190 if (actualSize != expected.size())
191 {
192 return false;
193 }
194
195 for (unsigned int i = 0u; i < actualSize; i++)
196 {
197 if (actual[i] != static_cast<unsigned int>(expected[i]))
198 {
199 return false;
200 }
201 }
202
203 return true;
204}
205
Finn Williams85d36712021-01-26 22:30:06 +0000206IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000207: m_Network(nullptr, nullptr),
208//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000209m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000210{
211 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000212 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
213 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
214 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
215 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
216 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
217 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
218 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
mathad01b392e982021-04-07 12:07:30 +0100219 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Finn Williams85d36712021-01-26 22:30:06 +0000220 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
221 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
222 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
223 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
224 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
225 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
226 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
227 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
228 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
229 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
230 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
231 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
232 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
233 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
234 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
235 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
236 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
237 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
238 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
239 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
240 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
241 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
242 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
243 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
244 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
245 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
246 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
247 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
248 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
249 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
250 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
251 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
252 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
253 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
254 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000255 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000256 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
257 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
258 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
259 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
260 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
261 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
262 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
263 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
264 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
265 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
266 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
267 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
268 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
269 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
270 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
271 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000272}
273
Finn Williams85d36712021-01-26 22:30:06 +0000274LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000275{
276 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
277
278 switch(layerType)
279 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100280 case Layer::Layer_AbsLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000282 case Layer::Layer_ActivationLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000284 case Layer::Layer_AdditionLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100286 case Layer::Layer_ArgMinMaxLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000288 case Layer::Layer_BatchToSpaceNdLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000290 case Layer::Layer_BatchNormalizationLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100292 case Layer::Layer_CastLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100294 case Layer::Layer_ComparisonLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100296 case Layer::Layer_ConcatLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000298 case Layer::Layer_ConstantLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000300 case Layer::Layer_Convolution2dLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100302 case Layer::Layer_DepthToSpaceLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000304 case Layer::Layer_DepthwiseConvolution2dLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000306 case Layer::Layer_DequantizeLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000308 case Layer::Layer_DetectionPostProcessLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000310 case Layer::Layer_DivisionLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000312 case Layer::Layer_EqualLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000314 case Layer::Layer_ElementwiseUnaryLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000316 case Layer::Layer_FullyConnectedLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100318 case Layer::Layer_FillLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000320 case Layer::Layer_FloorLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000322 case Layer::Layer_GatherLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000324 case Layer::Layer_GreaterLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000326 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000327 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100328 case Layer::Layer_InstanceNormalizationLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000330 case Layer::Layer_L2NormalizationLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000332 case Layer::Layer_LogicalBinaryLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100334 case Layer::Layer_LogSoftmaxLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000336 case Layer::Layer_LstmLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000338 case Layer::Layer_MeanLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000340 case Layer::Layer_MinimumLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000342 case Layer::Layer_MaximumLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100344 case Layer::Layer_MergeLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000346 case Layer::Layer_MergerLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000348 case Layer::Layer_MultiplicationLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000350 case Layer::Layer_NormalizationLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000352 case Layer::Layer_OutputLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000354 case Layer::Layer_PadLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000356 case Layer::Layer_PermuteLayer:
357 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000358 case Layer::Layer_Pooling2dLayer:
359 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100360 case Layer::Layer_PreluLayer:
361 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100362 case Layer::Layer_QLstmLayer:
363 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000364 case Layer::Layer_QuantizeLayer:
365 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100366 case Layer::Layer_QuantizedLstmLayer:
367 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100368 case Layer::Layer_RankLayer:
369 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000370 case Layer::Layer_ReduceLayer:
371 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000372 case Layer::Layer_ReshapeLayer:
373 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000374 case Layer::Layer_ResizeBilinearLayer:
375 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100376 case Layer::Layer_ResizeLayer:
377 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000378 case Layer::Layer_RsqrtLayer:
379 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100380 case Layer::Layer_SliceLayer:
381 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000382 case Layer::Layer_SoftmaxLayer:
383 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000384 case Layer::Layer_SpaceToBatchNdLayer:
385 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100386 case Layer::Layer_SpaceToDepthLayer:
387 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000388 case Layer::Layer_SplitterLayer:
389 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100390 case Layer::Layer_StackLayer:
391 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100392 case Layer::Layer_StandInLayer:
393 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000394 case Layer::Layer_StridedSliceLayer:
395 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000396 case Layer::Layer_SubtractionLayer:
397 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100398 case Layer::Layer_SwitchLayer:
399 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100400 case Layer::Layer_TransposeConvolution2dLayer:
401 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000402 case Layer::Layer_TransposeLayer:
403 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000404 case Layer::Layer_NONE:
405 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100406 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000407 }
408}
409
Finn Williams85d36712021-01-26 22:30:06 +0000410std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000411{
412 auto layer = GetBaseLayer(graph, index);
413 assert(layer);
414 return layer->layerName()->str();
415}
416
Finn Williams85d36712021-01-26 22:30:06 +0000417int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000418{
419 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
420
421 if (layerType == Layer::Layer_InputLayer)
422 {
423 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
424 }
425 else if ( layerType == Layer::Layer_OutputLayer )
426 {
427 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
428 }
429 return 0;
430}
431
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000432armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000433{
434 switch (dataLayout)
435 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000436 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000437 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000438 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000439 default:
440 return armnn::DataLayout::NCHW;
441 }
442}
443
Mike Kellyaf484012019-02-20 16:53:11 +0000444armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
445{
446 switch (function)
447 {
448 case armnnSerializer::ActivationFunction_Sigmoid:
449 return armnn::ActivationFunction::Sigmoid;
450 case armnnSerializer::ActivationFunction_TanH:
451 return armnn::ActivationFunction::TanH;
452 case armnnSerializer::ActivationFunction_Linear:
453 return armnn::ActivationFunction::Linear;
454 case armnnSerializer::ActivationFunction_ReLu:
455 return armnn::ActivationFunction::ReLu;
456 case armnnSerializer::ActivationFunction_BoundedReLu:
457 return armnn::ActivationFunction::BoundedReLu;
458 case armnnSerializer::ActivationFunction_LeakyReLu:
459 return armnn::ActivationFunction::LeakyReLu;
460 case armnnSerializer::ActivationFunction_Abs:
461 return armnn::ActivationFunction::Abs;
462 case armnnSerializer::ActivationFunction_Sqrt:
463 return armnn::ActivationFunction::Sqrt;
464 case armnnSerializer::ActivationFunction_Square:
465 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000466 case armnnSerializer::ActivationFunction_Elu:
467 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000468 case armnnSerializer::ActivationFunction_HardSwish:
469 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000470 default:
471 return armnn::ActivationFunction::Sigmoid;
472 }
473}
474
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100475armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
476{
477 switch (function)
478 {
479 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
480 return armnn::ArgMinMaxFunction::Max;
481 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
482 default:
483 return armnn::ArgMinMaxFunction::Min;
484 }
485}
486
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100487armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
488{
489 switch (operation)
490 {
491 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
492 return armnn::ComparisonOperation::Equal;
493 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
494 return armnn::ComparisonOperation::Greater;
495 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
496 return armnn::ComparisonOperation::GreaterOrEqual;
497 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
498 return armnn::ComparisonOperation::Less;
499 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
500 return armnn::ComparisonOperation::LessOrEqual;
501 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
502 default:
503 return armnn::ComparisonOperation::NotEqual;
504 }
505}
506
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000507armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
508{
509 switch (operation)
510 {
511 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
512 return armnn::ReduceOperation::Sum;
513 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
514 return armnn::ReduceOperation::Max;
515 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
516 return armnn::ReduceOperation::Mean;
517 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
518 return armnn::ReduceOperation::Min;
519 default:
520 return armnn::ReduceOperation::Sum;
521 }
522}
523
James Conroyaba90cd2020-11-06 16:28:18 +0000524armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
525{
526 switch (operation)
527 {
528 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
529 return armnn::LogicalBinaryOperation::LogicalAnd;
530 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
531 return armnn::LogicalBinaryOperation::LogicalOr;
532 default:
533 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
534 }
535}
536
josh minor4a3c6102020-01-06 16:40:46 -0600537armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
538{
539 switch (operation)
540 {
541 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
542 return armnn::UnaryOperation::Abs;
543 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
544 return armnn::UnaryOperation::Rsqrt;
545 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
546 return armnn::UnaryOperation::Sqrt;
547 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
548 return armnn::UnaryOperation::Exp;
549 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
550 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000551 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
552 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100553 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
554 return armnn::UnaryOperation::Log;
555 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
556 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600557 default:
558 throw armnn::InvalidArgumentException("Unary operation unknown");
559 }
560}
561
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100562armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
563{
564 switch (method)
565 {
566 case armnnSerializer::ResizeMethod_NearestNeighbor:
567 return armnn::ResizeMethod::NearestNeighbor;
568 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000569 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100570 default:
571 return armnn::ResizeMethod::NearestNeighbor;
572 }
573}
574
Finn Williams85d36712021-01-26 22:30:06 +0000575armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000576{
577 armnn::DataType type;
578 CHECK_TENSOR_PTR(tensorPtr);
579
580 switch (tensorPtr->dataType())
581 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000582 case DataType_QAsymmS8:
583 type = armnn::DataType::QAsymmS8;
584 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000585 case DataType_QSymmS8:
586 type = armnn::DataType::QSymmS8;
587 break;
Kevin May43a799c2019-02-08 16:31:42 +0000588 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000589 case DataType_QAsymmU8:
590 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000591 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000592 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000593 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000594 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000595 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000596 case DataType_Signed32:
597 type = armnn::DataType::Signed32;
598 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100599 case DataType_Signed64:
600 type = armnn::DataType::Signed64;
601 break;
Kevin May43a799c2019-02-08 16:31:42 +0000602 case DataType_Float32:
603 type = armnn::DataType::Float32;
604 break;
605 case DataType_Float16:
606 type = armnn::DataType::Float16;
607 break;
608 case DataType_Boolean:
609 type = armnn::DataType::Boolean;
610 break;
611 default:
612 {
613 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100614 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
615 tensorPtr->dataType(),
616 EnumNameDataType(tensorPtr->dataType()),
617 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000618 }
619 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000620
Colm Donelan800b2812021-02-12 12:43:35 +0000621 float quantizationScale = tensorPtr->quantizationScale();
622 int32_t quantizationOffset = tensorPtr->quantizationOffset();
623
Finn Williams2605b232020-06-10 15:53:46 +0100624 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
625 {
Colm Donelan800b2812021-02-12 12:43:35 +0000626 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100627 type,
628 quantizationScale,
629 quantizationOffset);
630 }
Colm Donelan800b2812021-02-12 12:43:35 +0000631 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
632 {
633 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
634 type,
635 quantizationScale,
636 quantizationOffset);
637 return result;
638 }
Kevin May43a799c2019-02-08 16:31:42 +0000639
640 auto dimensions = tensorPtr->dimensions();
641 unsigned int size = dimensions->size();
642 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000643 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
644 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
645 // For backwards compatibility check if the dimensionSpecificity vector is present first.
646 // The default is to have dimensionSpecificity set to all true's anyway.
647 if (tensorPtr->dimensionSpecificity() != nullptr)
648 {
649 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
650 size = dimensionSpecificity->size();
651 for (unsigned int i = 0; i < size; ++i)
652 {
653 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
654 }
655 }
656 // Construct a TensorShape
657 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000658
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000659 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000660 if (quantizationScales)
661 {
662 unsigned int quantizationScalesSize = quantizationScales->size();
663 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
664 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000665 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000666 type,
667 scales,
668 quantizationDim);
669 return result;
670 }
671
Kevin May43a799c2019-02-08 16:31:42 +0000672 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000673 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000674 type,
675 quantizationScale,
676 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000677
Kevin May43a799c2019-02-08 16:31:42 +0000678 return result;
679}
680
Finn Williams85d36712021-01-26 22:30:06 +0000681armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000682{
683 CHECK_CONST_TENSOR_PTR(constTensorPtr);
684 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
685
686 switch (constTensorPtr->data_type())
687 {
688 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000689 {
690 auto byteData = constTensorPtr->data_as_ByteData()->data();
691 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
692 return armnn::ConstTensor(tensorInfo, byteData->data());
693 }
Mike Kellya0766c32019-02-19 17:22:07 +0000694 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000695 {
696 auto shortData = constTensorPtr->data_as_ShortData()->data();
697 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
698 return armnn::ConstTensor(tensorInfo, shortData->data());
699 }
Mike Kellya0766c32019-02-19 17:22:07 +0000700 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000701 {
702 auto intData = constTensorPtr->data_as_IntData()->data();
703 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
704 return armnn::ConstTensor(tensorInfo, intData->data());
705 }
Mike Kellya0766c32019-02-19 17:22:07 +0000706 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000707 {
708 auto longData = constTensorPtr->data_as_LongData()->data();
709 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
710 return armnn::ConstTensor(tensorInfo, longData->data());
711 }
Mike Kellya0766c32019-02-19 17:22:07 +0000712 default:
713 {
714 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100715 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
716 constTensorPtr->data_type(),
717 EnumNameConstTensorData(constTensorPtr->data_type()),
718 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000719 }
720 }
721}
722
Finn Williams85d36712021-01-26 22:30:06 +0000723TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000724{
725 CHECK_LAYERS(graphPtr, 0, layerIndex);
726 auto layer = GetBaseLayer(graphPtr, layerIndex);
727 const auto& numInputs = layer->inputSlots()->size();
728
729 TensorRawPtrVector result(numInputs);
730
731 for (unsigned int i=0; i<numInputs; ++i)
732 {
733 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
734 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
735 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
736 }
737 return result;
738}
739
Finn Williams85d36712021-01-26 22:30:06 +0000740TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000741{
742 CHECK_LAYERS(graphPtr, 0, layerIndex);
743 auto layer = GetBaseLayer(graphPtr, layerIndex);
744 const auto& numOutputs = layer->outputSlots()->size();
745
746 TensorRawPtrVector result(numOutputs);
747
748 for (unsigned int i=0; i<numOutputs; ++i)
749 {
750 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
751 }
752 return result;
753}
754
Finn Williams85d36712021-01-26 22:30:06 +0000755void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000756{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000757 CHECK_LAYERS(graph, 0, layerIndex);
758 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100759 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
760 "layerName: {1} / {2}",
761 layerIndex,
762 layerName,
763 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000764}
765
Finn Williams85d36712021-01-26 22:30:06 +0000766void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000767{
768 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000769 m_InputBindings.clear();
770 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000771}
772
Kevin May43a799c2019-02-08 16:31:42 +0000773
Finn Williams85d36712021-01-26 22:30:06 +0000774INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000775{
776 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000777 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
778 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000779}
780
Finn Williams85d36712021-01-26 22:30:06 +0000781armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000782{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000783 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000784 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
785 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
786 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000787}
788
Finn Williams85d36712021-01-26 22:30:06 +0000789GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000790{
791 if (binaryContent == nullptr)
792 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100793 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
794 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000795 }
796 flatbuffers::Verifier verifier(binaryContent, len);
797 if (verifier.VerifyBuffer<SerializedGraph>() == false)
798 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100799 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
800 "flatbuffers format. size:{0} {1}",
801 len,
802 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000803 }
804 return GetSerializedGraph(binaryContent);
805}
806
Finn Williams85d36712021-01-26 22:30:06 +0000807INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000808{
809 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100810 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000811 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000812 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000813 {
814 if (layer->layer_type() != Layer_InputLayer &&
815 layer->layer_type() != Layer_OutputLayer)
816 {
817 // lookup and call the parser function
818 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000819 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000820 }
821 ++layerIndex;
822 }
823
Derek Lamberti8ddae332019-02-21 16:29:43 +0000824 SetupInputLayers(graph);
825 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000826
827 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100828 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000829 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100830 Connections& connections = graphIt.second;
831 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000832 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100833 const unsigned int outputSlotIndex = outputIt.first;
834 IOutputSlot* outputSlot = outputIt.second;
835 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000836 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100837 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000838 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100839 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000840 }
Kevin May43a799c2019-02-08 16:31:42 +0000841 }
842 }
843 }
844
845 return std::move(m_Network);
846}
847
Finn Williams85d36712021-01-26 22:30:06 +0000848BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000849 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000850{
Jan Eilers8eb25602020-03-09 12:13:48 +0000851 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000852 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000853 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000854 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000855 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000856 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000857 }
858 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100859 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
860 name,
861 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000862}
863
Finn Williams85d36712021-01-26 22:30:06 +0000864BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000865 const std::string& name) const
866{
Jan Eilers8eb25602020-03-09 12:13:48 +0000867 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000868 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000869 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000870 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000871 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000872 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000873 }
874 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100875 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
876 name,
877 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000878}
879
Finn Williams85d36712021-01-26 22:30:06 +0000880unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000881{
882 for (unsigned int i = 0; i < graph->layers()->size(); i++)
883 {
884 auto layer = graph->layers()->Get(i);
885 if (layer->layer_type() == Layer::Layer_InputLayer)
886 {
887 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
888 if (layerBindingId == targetId)
889 {
890 return i;
891 }
892 }
893 }
894 throw ParseException("Input layer with given layerBindingId not found");
895}
896
Finn Williams85d36712021-01-26 22:30:06 +0000897unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000898{
899 for (unsigned int i = 0; i < graph->layers()->size(); i++)
900 {
901 auto layer = graph->layers()->Get(i);
902 if (layer->layer_type() == Layer::Layer_OutputLayer)
903 {
904 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
905 if (layerBindingId == targetId)
906 {
907 return i;
908 }
909 }
910 }
911 throw ParseException("Output layer with given layerBindingId not found");
912}
913
Finn Williams85d36712021-01-26 22:30:06 +0000914unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100915{
916 for (unsigned int i = 0; i < graph->layers()->size(); i++)
917 {
918 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
919 if (layer->index() == targetIndex)
920 {
921 return i;
922 }
923 }
924 throw ParseException("Layer with given index not found");
925}
926
Finn Williams85d36712021-01-26 22:30:06 +0000927IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000928{
Finn Williams85d36712021-01-26 22:30:06 +0000929 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000930
931 if (graph->featureVersions())
932 {
933 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +0100934 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Tee Jungaa920c52019-11-05 10:48:25 +0000935 }
936
937 return versions;
938}
939
Finn Williams85d36712021-01-26 22:30:06 +0000940void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000941{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000942 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100943 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000944 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100945 m_InputBindings.reserve(numInputs);
946
947 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000948 {
Tee Jungaa920c52019-11-05 10:48:25 +0000949 unsigned int inputLayerIndex = 0xFFFFFFFF;
950 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
951 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100952 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000953 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
954 }
955 else
956 {
957 const int inputId = graph->inputIds()->Get(i);
958 inputLayerIndex = GetInputLayerInVector(graph, inputId);
959 }
960
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100961 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000962
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100963 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
964 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100965 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000966
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100967 IConnectableLayer* inputLayer =
968 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000969
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100970 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
971 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
972 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
973
Derek Lamberti8ddae332019-02-21 16:29:43 +0000974 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100975 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000976 }
977}
978
Finn Williams85d36712021-01-26 22:30:06 +0000979void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000980{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000981 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100982 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000983 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100984 m_OutputBindings.reserve(numOutputs);
985
986 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000987 {
Tee Jungaa920c52019-11-05 10:48:25 +0000988 unsigned int outputLayerIndex = 0xFFFFFFFF;
989 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
990 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100991 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000992 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
993 }
994 else
995 {
996 const int outputId = graph->outputIds()->Get(i);
997 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
998 }
999
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001000 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001001
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001002 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1003 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001004 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001005
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001006 IConnectableLayer* outputLayer =
1007 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001008
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001009 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001010 unsigned int sourceLayerIndex =
1011 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001012 unsigned int outputSlotIndex =
1013 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001014 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001015 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1016 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001017 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001018 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001019 }
1020}
1021
Finn Williams85d36712021-01-26 22:30:06 +00001022void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001023 uint32_t layerIndex,
1024 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001025{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001026 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001027 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001028 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1029 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001030 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001031 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1032 " for layer index: {2} {3}",
1033 baseLayer->outputSlots()->size(),
1034 layer->GetNumOutputSlots(),
1035 layerIndex,
1036 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001037 }
1038
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001039 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001040 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001041 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1042 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1043 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1044 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001045 }
1046}
1047
Finn Williams85d36712021-01-26 22:30:06 +00001048void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001049 uint32_t layerIndex,
1050 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001051{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001052 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001053 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001054 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1055 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001056 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001057 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1058 " for layer index:{2} {3}",
1059 baseLayer->inputSlots()->size(),
1060 layer->GetNumInputSlots(),
1061 layerIndex,
1062 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001063 }
1064
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001065 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001066 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001067 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1068 auto fbConnection = fbInputSlot->connection();
1069 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1070 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001071 }
1072}
1073
Finn Williams85d36712021-01-26 22:30:06 +00001074void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001075 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001076 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001077{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001078 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001079 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001080 m_GraphConnections[sourceLayerIndex] = Connections();
1081 }
1082
1083 Connections& connections = m_GraphConnections[sourceLayerIndex];
1084 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1085 {
1086 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001087 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001088 else
1089 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001090 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001091 }
1092}
Kevin May43a799c2019-02-08 16:31:42 +00001093
Finn Williams85d36712021-01-26 22:30:06 +00001094void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001095 uint32_t outputSlotIndex,
1096 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001097{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001098 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1099 {
1100 m_GraphConnections[sourceLayerIndex] = Connections();
1101 }
1102
1103 Connections& connections = m_GraphConnections[sourceLayerIndex];
1104 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1105 {
1106 throw ParseException("Same output slot index processed twice");
1107 }
1108
1109 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001110}
1111
Finn Williams85d36712021-01-26 22:30:06 +00001112void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001113{
1114 CHECK_LAYERS(graph, 0, layerIndex);
1115 auto inputs = GetInputs(graph, layerIndex);
1116 CHECK_LOCATION();
1117 CHECK_VALID_SIZE(inputs.size(), 1);
1118
1119 auto outputs = GetOutputs(graph, layerIndex);
1120 CHECK_VALID_SIZE(outputs.size(), 1);
1121
1122 auto layerName = GetLayerName(graph, layerIndex);
1123
josh minor4a3c6102020-01-06 16:40:46 -06001124 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1125 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001126 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1127 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1128
1129 RegisterInputSlots(graph, layerIndex, layer);
1130 RegisterOutputSlots(graph, layerIndex, layer);
1131}
1132
Finn Williams85d36712021-01-26 22:30:06 +00001133void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001134{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001135 CHECK_LAYERS(graph, 0, layerIndex);
1136 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001137 CHECK_LOCATION();
1138 CHECK_VALID_SIZE(inputs.size(), 1);
1139
Derek Lamberti8ddae332019-02-21 16:29:43 +00001140 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001141 CHECK_VALID_SIZE(outputs.size(), 1);
1142
Derek Lamberti8ddae332019-02-21 16:29:43 +00001143 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001144 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001145 auto serializerDescriptor = serializerLayer->descriptor();
1146
1147 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001148 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001149 descriptor.m_A = serializerDescriptor->a();
1150 descriptor.m_B = serializerDescriptor->b();
1151
1152 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1153 layerName.c_str());
1154 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1155 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1156
Derek Lamberti8ddae332019-02-21 16:29:43 +00001157 RegisterInputSlots(graph, layerIndex, layer);
1158 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001159}
1160
Finn Williams85d36712021-01-26 22:30:06 +00001161void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001162{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001163 CHECK_LAYERS(graph, 0, layerIndex);
1164 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001165 CHECK_LOCATION();
1166 CHECK_VALID_SIZE(inputs.size(), 2);
1167
Derek Lamberti8ddae332019-02-21 16:29:43 +00001168 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001169 CHECK_VALID_SIZE(outputs.size(), 1);
1170
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001171 auto layerName = GetLayerName(graph, layerIndex);
1172 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001173
1174 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1175 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1176
Derek Lamberti8ddae332019-02-21 16:29:43 +00001177 RegisterInputSlots(graph, layerIndex, layer);
1178 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001179}
1180
Finn Williams85d36712021-01-26 22:30:06 +00001181void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001182{
1183 CHECK_LAYERS(graph, 0, layerIndex);
1184 auto inputs = GetInputs(graph, layerIndex);
1185 CHECK_LOCATION();
1186 CHECK_VALID_SIZE(inputs.size(), 1);
1187
1188 auto outputs = GetOutputs(graph, layerIndex);
1189 CHECK_VALID_SIZE(outputs.size(), 1);
1190
1191 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1192 auto serializerDescriptor = serializerLayer->descriptor();
1193
1194 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001195 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001196 descriptor.m_Axis = serializerDescriptor->axis();
1197 auto layerName = GetLayerName(graph, layerIndex);
1198 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1199
1200 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1201 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1202
1203 RegisterInputSlots(graph, layerIndex, layer);
1204 RegisterOutputSlots(graph, layerIndex, layer);
1205}
1206
Finn Williams85d36712021-01-26 22:30:06 +00001207void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001208{
1209 CHECK_LAYERS(graph, 0, layerIndex);
1210
Finn Williams85d36712021-01-26 22:30:06 +00001211 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001212 CHECK_VALID_SIZE(inputs.size(), 1);
1213
Finn Williams85d36712021-01-26 22:30:06 +00001214 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001215 CHECK_VALID_SIZE(outputs.size(), 1);
1216
1217 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1218 auto flatBufferCrops = flatBufferDescriptor->crops();
1219 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1220
1221 if (flatBufferCrops->Length() % 2 != 0)
1222 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001223 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001224 }
1225
1226 std::vector<std::pair<unsigned int, unsigned int>> crops;
1227 crops.reserve(flatBufferCrops->Length() / 2);
1228 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1229 {
1230 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1231 }
1232
1233 armnn::BatchToSpaceNdDescriptor descriptor;
1234 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1235 descriptor.m_BlockShape =
1236 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1237 descriptor.m_Crops = crops;
1238
1239 auto layerName = GetLayerName(graph, layerIndex);
1240 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1241
1242 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1243 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1244
1245 RegisterInputSlots(graph, layerIndex, layer);
1246 RegisterOutputSlots(graph, layerIndex, layer);
1247}
1248
Finn Williams85d36712021-01-26 22:30:06 +00001249void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001250{
1251 CHECK_LAYERS(graph, 0, layerIndex);
1252
1253 auto inputs = GetInputs(graph, layerIndex);
1254 CHECK_VALID_SIZE(inputs.size(), 1);
1255
1256 auto outputs = GetOutputs(graph, layerIndex);
1257 CHECK_VALID_SIZE(outputs.size(), 1);
1258 auto outputInfo = ToTensorInfo(outputs[0]);
1259
ruoyan015c7ab052019-03-04 14:48:02 +00001260 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001261
1262 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1263 auto serializerDescriptor = serializerLayer->descriptor();
1264
1265 armnn::BatchNormalizationDescriptor descriptor;
1266 descriptor.m_Eps = serializerDescriptor->eps();
1267 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1268
1269 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1270 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1271 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1272 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1273
1274 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1275 mean,
1276 variance,
1277 beta,
1278 gamma,
1279 layerName.c_str());
1280 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1281
1282 RegisterInputSlots(graph, layerIndex, layer);
1283 RegisterOutputSlots(graph, layerIndex, layer);
1284}
1285
mathad01b392e982021-04-07 12:07:30 +01001286void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1287{
1288 CHECK_LAYERS(graph, 0, layerIndex);
1289 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1290 CHECK_LOCATION();
1291 CHECK_VALID_SIZE(inputs.size(), 1);
1292
1293 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1294 CHECK_VALID_SIZE(outputs.size(), 1);
1295
1296 auto layerName = GetLayerName(graph, layerIndex);
1297
1298 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1299
1300 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1301 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1302
1303 RegisterInputSlots(graph, layerIndex, layer);
1304 RegisterOutputSlots(graph, layerIndex, layer);
1305}
1306
Finn Williams85d36712021-01-26 22:30:06 +00001307void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001308{
1309 CHECK_LAYERS(graph, 0, layerIndex);
1310 CHECK_LOCATION();
1311
1312 auto outputs = GetOutputs(graph, layerIndex);
1313 CHECK_VALID_SIZE(outputs.size(), 1);
1314
1315 auto layerName = GetLayerName(graph, layerIndex);
1316
1317 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1318 auto serializerInput = serializerLayer->input();
1319
1320 armnn::ConstTensor input = ToConstTensor(serializerInput);
1321
1322 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1323
1324 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1325 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1326
1327 RegisterOutputSlots(graph, layerIndex, layer);
1328}
1329
Finn Williams85d36712021-01-26 22:30:06 +00001330void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001331{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001332 CHECK_LAYERS(graph, 0, layerIndex);
1333 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001334 CHECK_LOCATION();
1335 CHECK_VALID_SIZE(inputs.size(), 1);
1336
Derek Lamberti8ddae332019-02-21 16:29:43 +00001337 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001338 CHECK_VALID_SIZE(outputs.size(), 1);
1339
Derek Lamberti8ddae332019-02-21 16:29:43 +00001340 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001341 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001342 auto serializerDescriptor = serializerLayer->descriptor();
1343
1344 armnn::Convolution2dDescriptor descriptor;
1345 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1346 descriptor.m_PadRight = serializerDescriptor->padRight();
1347 descriptor.m_PadTop = serializerDescriptor->padTop();
1348 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1349 descriptor.m_StrideX = serializerDescriptor->strideX();
1350 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001351 descriptor.m_DilationX = serializerDescriptor->dilationX();
1352 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001353 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1354 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1355
1356 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1357 armnn::ConstTensor biases;
1358
Matteo Martincighfc598e12019-05-14 10:36:13 +01001359 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001360 if (descriptor.m_BiasEnabled)
1361 {
1362 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001363 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001364 }
1365 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1366 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001367 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001368 layerName.c_str());
1369 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1370 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1371
Derek Lamberti8ddae332019-02-21 16:29:43 +00001372 RegisterInputSlots(graph, layerIndex, layer);
1373 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001374}
1375
Finn Williams85d36712021-01-26 22:30:06 +00001376void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001377{
1378 CHECK_LAYERS(graph, 0, layerIndex);
1379
1380 auto inputs = GetInputs(graph, layerIndex);
1381 CHECK_VALID_SIZE(inputs.size(), 1);
1382
1383 auto outputs = GetOutputs(graph, layerIndex);
1384 CHECK_VALID_SIZE(outputs.size(), 1);
1385
1386 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1387
1388 armnn::DepthToSpaceDescriptor descriptor;
1389 descriptor.m_BlockSize = fbDescriptor->blockSize();
1390 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1391
1392 auto layerName = GetLayerName(graph, layerIndex);
1393 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1394
1395 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1396 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1397
1398 RegisterInputSlots(graph, layerIndex, layer);
1399 RegisterOutputSlots(graph, layerIndex, layer);
1400}
1401
Finn Williams85d36712021-01-26 22:30:06 +00001402void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001403{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001404 CHECK_LAYERS(graph, 0, layerIndex);
1405 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001406 CHECK_LOCATION();
1407 CHECK_VALID_SIZE(inputs.size(), 1);
1408
Derek Lamberti8ddae332019-02-21 16:29:43 +00001409 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001410 CHECK_VALID_SIZE(outputs.size(), 1);
1411
Derek Lamberti8ddae332019-02-21 16:29:43 +00001412 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001413 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001414 auto serializerDescriptor = serializerLayer->descriptor();
1415
1416 armnn::DepthwiseConvolution2dDescriptor descriptor;
1417 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1418 descriptor.m_PadRight = serializerDescriptor->padRight();
1419 descriptor.m_PadTop = serializerDescriptor->padTop();
1420 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1421 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001422 descriptor.m_StrideY = serializerDescriptor->strideY();
1423 descriptor.m_DilationX = serializerDescriptor->dilationX();
1424 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001425 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1426 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1427
Jan Eilers53ef7952021-06-02 12:01:25 +01001428 IConnectableLayer* layer;
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001429
Matteo Martincighfc598e12019-05-14 10:36:13 +01001430 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001431 if (descriptor.m_BiasEnabled)
1432 {
Jan Eilers53ef7952021-06-02 12:01:25 +01001433 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001434 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001435 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001436
1437 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1438 // The data layout for weights in ArmNN used to be [M,I,H,W] but now it's changed to [1,H,W,I*M]
1439 // When reading older flatbuffer files we need to add a permutation to get to the new layout.
1440 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1441 {
1442 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1443 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1444 PermutationVector permutationVector = { 3, 2, 0, 1 };
1445 armnn::TensorInfo weightsInfo = weights.GetInfo();
1446 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1447 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1448 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1449 weights.GetMemoryArea(), permuteBuffer.get(),
1450 GetDataTypeSize(weightsInfo.GetDataType()));
1451
1452 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1453 auto weightsShape = weightsInfo.GetShape();
1454 weightsInfo.SetShape({1,
1455 weightsShape[0],
1456 weightsShape[1],
1457 weightsShape[2]*weightsShape[3]});
1458
1459 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1460
1461 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1462 weightsPermuted,
1463 optionalBiases,
1464 layerName.c_str());
1465 }
1466 else
1467 {
1468 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1469 weights,
1470 optionalBiases,
1471 layerName.c_str());
1472 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001473
1474 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1475 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1476
Derek Lamberti8ddae332019-02-21 16:29:43 +00001477 RegisterInputSlots(graph, layerIndex, layer);
1478 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001479}
1480
Finn Williams85d36712021-01-26 22:30:06 +00001481void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001482{
1483 CHECK_LAYERS(graph, 0, layerIndex);
1484 auto inputs = GetInputs(graph, layerIndex);
1485 CHECK_LOCATION();
1486 CHECK_VALID_SIZE(inputs.size(), 2);
1487
1488 auto outputs = GetOutputs(graph, layerIndex);
1489 CHECK_VALID_SIZE(outputs.size(), 4);
1490
1491 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1492 auto layerName = GetLayerName(graph, layerIndex);
1493 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1494
1495 armnn::DetectionPostProcessDescriptor descriptor;
1496 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1497 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1498 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1499 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1500 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1501 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1502 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1503 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1504 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1505 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1506 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1507
1508 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1509
1510 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1511 anchors,
1512 layerName.c_str());
1513
1514 for (unsigned int i = 0; i < 4; i++)
1515 {
1516 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1517 }
1518
1519 RegisterInputSlots(graph, layerIndex, layer);
1520 RegisterOutputSlots(graph, layerIndex, layer);
1521}
1522
Finn Williams85d36712021-01-26 22:30:06 +00001523void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001524{
1525 CHECK_LAYERS(graph, 0, layerIndex);
1526 auto inputs = GetInputs(graph, layerIndex);
1527 CHECK_LOCATION();
1528 CHECK_VALID_SIZE(inputs.size(), 2);
1529
1530 auto outputs = GetOutputs(graph, layerIndex);
1531 CHECK_VALID_SIZE(outputs.size(), 1);
1532
1533 auto layerName = GetLayerName(graph, layerIndex);
1534 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1535
1536 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1537 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1538
1539 RegisterInputSlots(graph, layerIndex, layer);
1540 RegisterOutputSlots(graph, layerIndex, layer);
1541}
1542
Finn Williams85d36712021-01-26 22:30:06 +00001543void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001544{
1545 CHECK_LAYERS(graph, 0, layerIndex);
1546 auto inputs = GetInputs(graph, layerIndex);
1547 CHECK_LOCATION();
1548 CHECK_VALID_SIZE(inputs.size(), 2);
1549
1550 auto outputs = GetOutputs(graph, layerIndex);
1551 CHECK_VALID_SIZE(outputs.size(), 1);
1552
1553 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001554 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1555 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001556
1557 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1558 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1559
1560 RegisterInputSlots(graph, layerIndex, layer);
1561 RegisterOutputSlots(graph, layerIndex, layer);
1562}
1563
Finn Williams85d36712021-01-26 22:30:06 +00001564void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001565{
1566 CHECK_LAYERS(graph, 0, layerIndex);
1567 auto inputs = GetInputs(graph, layerIndex);
1568 CHECK_LOCATION();
1569 CHECK_VALID_SIZE(inputs.size(), 1);
1570
1571 auto outputs = GetOutputs(graph, layerIndex);
1572 CHECK_VALID_SIZE(outputs.size(), 1);
1573
1574 auto layerName = GetLayerName(graph, layerIndex);
1575 armnn::FillDescriptor descriptor(1.0f);
1576 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1577
1578 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1579 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1580
1581 RegisterInputSlots(graph, layerIndex, layer);
1582 RegisterOutputSlots(graph, layerIndex, layer);
1583}
1584
Finn Williams85d36712021-01-26 22:30:06 +00001585void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001586{
1587 CHECK_LAYERS(graph, 0, layerIndex);
1588 auto inputs = GetInputs(graph, layerIndex);
1589 CHECK_LOCATION();
1590 CHECK_VALID_SIZE(inputs.size(), 2);
1591
1592 auto outputs = GetOutputs(graph, layerIndex);
1593 CHECK_VALID_SIZE(outputs.size(), 1);
1594
1595 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001596 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1597 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001598
1599 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1600 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1601
1602 RegisterInputSlots(graph, layerIndex, layer);
1603 RegisterOutputSlots(graph, layerIndex, layer);
1604}
1605
Finn Williams85d36712021-01-26 22:30:06 +00001606void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001607{
1608 CHECK_LAYERS(graph, 0, layerIndex);
1609
1610 auto inputs = GetInputs(graph, layerIndex);
1611 CHECK_VALID_SIZE(inputs.size(), 1);
1612
1613 auto outputs = GetOutputs(graph, layerIndex);
1614 CHECK_VALID_SIZE(outputs.size(), 1);
1615
1616 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1617 auto fbDescriptor = fbLayer->descriptor();
1618
1619 armnn::InstanceNormalizationDescriptor descriptor;
1620 descriptor.m_Gamma = fbDescriptor->gamma();
1621 descriptor.m_Beta = fbDescriptor->beta();
1622 descriptor.m_Eps = fbDescriptor->eps();
1623 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1624
1625 const std::string layerName = GetLayerName(graph, layerIndex);
1626 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1627
1628 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1629 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1630
1631 RegisterInputSlots(graph, layerIndex, layer);
1632 RegisterOutputSlots(graph, layerIndex, layer);
1633}
1634
Finn Williams85d36712021-01-26 22:30:06 +00001635void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001636{
1637 CHECK_LAYERS(graph, 0, layerIndex);
1638
1639 auto inputs = GetInputs(graph, layerIndex);
1640 CHECK_VALID_SIZE(inputs.size(), 1);
1641
1642 auto outputs = GetOutputs(graph, layerIndex);
1643 CHECK_VALID_SIZE(outputs.size(), 1);
1644 auto outputInfo = ToTensorInfo(outputs[0]);
1645
1646 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1647 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1648
1649 auto layerName = GetLayerName(graph, layerIndex);
1650 armnn::L2NormalizationDescriptor descriptor;
1651 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001652 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001653
1654 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1655 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1656
1657 RegisterInputSlots(graph, layerIndex, layer);
1658 RegisterOutputSlots(graph, layerIndex, layer);
1659}
1660
Finn Williams85d36712021-01-26 22:30:06 +00001661void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001662{
1663 CHECK_LAYERS(graph, 0, layerIndex);
1664 CHECK_LOCATION();
1665
1666 auto inputs = GetInputs(graph, layerIndex);
1667 CHECK_VALID_SIZE(inputs.size(), 2);
1668
1669 auto outputs = GetOutputs(graph, layerIndex);
1670 CHECK_VALID_SIZE(outputs.size(), 1);
1671
1672 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1673 auto fbDescriptor = fbLayer->descriptor();
1674
1675 armnn::LogicalBinaryDescriptor descriptor;
1676 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1677
1678 const std::string& layerName = GetLayerName(graph, layerIndex);
1679 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1680
1681 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1682 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1683
1684 RegisterInputSlots(graph, layerIndex, layer);
1685 RegisterOutputSlots(graph, layerIndex, layer);
1686}
1687
Finn Williams85d36712021-01-26 22:30:06 +00001688void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001689{
1690 CHECK_LAYERS(graph, 0, layerIndex);
1691
Finn Williams85d36712021-01-26 22:30:06 +00001692 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001693 CHECK_VALID_SIZE(inputs.size(), 1);
1694
Finn Williams85d36712021-01-26 22:30:06 +00001695 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001696 CHECK_VALID_SIZE(outputs.size(), 1);
1697
1698 armnn::LogSoftmaxDescriptor descriptor;
1699 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1700 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1701 auto layerName = GetLayerName(graph, layerIndex);
1702
1703 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1704
1705 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1706 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1707
1708 RegisterInputSlots(graph, layerIndex, layer);
1709 RegisterOutputSlots(graph, layerIndex, layer);
1710}
1711
Finn Williams85d36712021-01-26 22:30:06 +00001712void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001713{
1714 CHECK_LAYERS(graph, 0, layerIndex);
1715 auto inputs = GetInputs(graph, layerIndex);
1716 CHECK_LOCATION();
1717 CHECK_VALID_SIZE(inputs.size(), 2);
1718
1719 auto outputs = GetOutputs(graph, layerIndex);
1720 CHECK_VALID_SIZE(outputs.size(), 1);
1721
1722 auto layerName = GetLayerName(graph, layerIndex);
1723 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1724
1725 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1726 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1727
1728 RegisterInputSlots(graph, layerIndex, layer);
1729 RegisterOutputSlots(graph, layerIndex, layer);
1730}
1731
Finn Williams85d36712021-01-26 22:30:06 +00001732void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001733{
1734 CHECK_LAYERS(graph, 0, layerIndex);
1735 auto inputs = GetInputs(graph, layerIndex);
1736 CHECK_LOCATION();
1737 CHECK_VALID_SIZE(inputs.size(), 2);
1738
1739 auto outputs = GetOutputs(graph, layerIndex);
1740 CHECK_VALID_SIZE(outputs.size(), 1);
1741
1742 auto layerName = GetLayerName(graph, layerIndex);
1743 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1744
1745 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1746 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1747
1748 RegisterInputSlots(graph, layerIndex, layer);
1749 RegisterOutputSlots(graph, layerIndex, layer);
1750}
1751
Jim Flynne242f2d2019-05-22 14:24:13 +01001752const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1753 unsigned int layerIndex)
1754{
1755 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1756
1757 switch (layerType)
1758 {
1759 case Layer::Layer_ConcatLayer:
1760 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1761 case Layer::Layer_MergerLayer:
1762 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1763 default:
1764 throw armnn::Exception("unknown layer type, should be concat or merger");
1765 }
1766}
1767
Finn Williams85d36712021-01-26 22:30:06 +00001768void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001769{
1770 CHECK_LAYERS(graph, 0, layerIndex);
1771 CHECK_LOCATION();
1772
1773 auto inputs = GetInputs(graph, layerIndex);
1774 CHECK_VALID_SIZE(inputs.size(), 2);
1775
1776 auto outputs = GetOutputs(graph, layerIndex);
1777 CHECK_VALID_SIZE(outputs.size(), 1);
1778
1779 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1780 auto fbDescriptor = fbLayer->descriptor();
1781
1782 armnn::ComparisonDescriptor descriptor;
1783 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1784
1785 const std::string& layerName = GetLayerName(graph, layerIndex);
1786 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1787
1788 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1789 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1790
1791 RegisterInputSlots(graph, layerIndex, layer);
1792 RegisterOutputSlots(graph, layerIndex, layer);
1793}
1794
Finn Williams85d36712021-01-26 22:30:06 +00001795void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06001796{
1797 CHECK_LAYERS(graph, 0, layerIndex);
1798 CHECK_LOCATION();
1799
1800 auto inputs = GetInputs(graph, layerIndex);
1801 CHECK_VALID_SIZE(inputs.size(), 1);
1802
1803 auto outputs = GetOutputs(graph, layerIndex);
1804 CHECK_VALID_SIZE(outputs.size(), 1);
1805
1806 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1807 auto fbDescriptor = fbLayer->descriptor();
1808
1809 armnn::ElementwiseUnaryDescriptor descriptor;
1810 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1811
1812 const std::string& layerName = GetLayerName(graph, layerIndex);
1813 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1814
1815 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1816 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1817
1818 RegisterInputSlots(graph, layerIndex, layer);
1819 RegisterOutputSlots(graph, layerIndex, layer);
1820}
1821
Finn Williams85d36712021-01-26 22:30:06 +00001822void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001823{
1824 CHECK_LAYERS(graph, 0, layerIndex);
1825 CHECK_LOCATION();
1826
1827 auto outputs = GetOutputs(graph, layerIndex);
1828 CHECK_VALID_SIZE(outputs.size(), 1);
1829
Jim Flynnac25a1b2019-02-28 10:40:49 +00001830 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001831 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1832 unsigned int numViews = originsDescriptor->numViews();
1833 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001834
1835 // can now check the number of inputs == number of views
1836 auto inputs = GetInputs(graph, layerIndex);
1837 CHECK_VALID_SIZE(inputs.size(), numViews);
1838
1839 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001840 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001841 for (unsigned int v = 0; v < numViews; ++v)
1842 {
1843 auto originPtr = originsPtr->Get(v);
1844 for (unsigned int d = 0; d < numDimensions; ++d)
1845 {
1846 uint32_t value = originPtr->data()->Get(d);
1847 descriptor.SetViewOriginCoord(v, d, value);
1848 }
1849 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001850 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001851
Jim Flynn906f9462019-05-10 13:55:21 +01001852 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001853 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1854 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1855
1856 RegisterInputSlots(graph, layerIndex, layer);
1857 RegisterOutputSlots(graph, layerIndex, layer);
1858}
1859
Finn Williams85d36712021-01-26 22:30:06 +00001860void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001861{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001862 CHECK_LAYERS(graph, 0, layerIndex);
1863 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001864 CHECK_LOCATION();
1865 CHECK_VALID_SIZE(inputs.size(), 2);
1866
Derek Lamberti8ddae332019-02-21 16:29:43 +00001867 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001868 CHECK_VALID_SIZE(outputs.size(), 1);
1869
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001870 auto layerName = GetLayerName(graph, layerIndex);
1871 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001872
1873 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1874 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1875
Derek Lamberti8ddae332019-02-21 16:29:43 +00001876 RegisterInputSlots(graph, layerIndex, layer);
1877 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001878}
1879
Finn Williams85d36712021-01-26 22:30:06 +00001880void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001881{
1882 CHECK_LAYERS(graph, 0, layerIndex);
1883 CHECK_LOCATION();
1884
1885 auto inputs = GetInputs(graph, layerIndex);
1886 CHECK_VALID_SIZE(inputs.size(), 1);
1887
1888 auto outputs = GetOutputs(graph, layerIndex);
1889 CHECK_VALID_SIZE(outputs.size(), 1);
1890
1891 auto layerName = GetLayerName(graph, layerIndex);
1892
1893 armnn::IConnectableLayer* layer;
1894
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001895 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001896
1897 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1898 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1899
1900 RegisterInputSlots(graph, layerIndex, layer);
1901 RegisterOutputSlots(graph, layerIndex, layer);
1902}
1903
Finn Williams85d36712021-01-26 22:30:06 +00001904void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001905{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001906 CHECK_LAYERS(graph, 0, layerIndex);
1907 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001908 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001909
Derek Lamberti8ddae332019-02-21 16:29:43 +00001910 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001911 CHECK_VALID_SIZE(outputs.size(), 1);
1912
Derek Lamberti8ddae332019-02-21 16:29:43 +00001913 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001914 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001915 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1916
1917 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1918 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1919 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001920 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
1921 uint32_t numInputs = 1;
1922 if (!fullyConnectedDescriptor.m_ConstantWeights)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001923 {
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001924 numInputs = 2;
1925 if (fullyConnectedDescriptor.m_BiasEnabled)
1926 {
1927 numInputs = 3;
1928 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001929 }
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001930 CHECK_VALID_SIZE(inputs.size(), numInputs);
1931
1932 armnn::Optional <armnn::ConstTensor> optionalWeights = armnn::EmptyOptional();
1933 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1934 if (fullyConnectedDescriptor.m_ConstantWeights)
1935 {
1936 armnn::ConstTensor weightsTensorData = ToConstTensor(flatBufferLayer->weights());
1937 optionalWeights = armnn::Optional<armnn::ConstTensor>(weightsTensorData);
1938
1939 if (flatBufferDescriptor->biasEnabled())
1940 {
1941 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1942 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
1943 }
1944 }
1945
1946 armnn::IConnectableLayer* layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1947 optionalWeights,
1948 optionalBiases,
1949 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001950
1951 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1952 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1953
Derek Lamberti8ddae332019-02-21 16:29:43 +00001954 RegisterInputSlots(graph, layerIndex, layer);
1955 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001956}
1957
Finn Williams85d36712021-01-26 22:30:06 +00001958void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001959{
1960 CHECK_LAYERS(graph, 0, layerIndex);
1961
Finn Williams85d36712021-01-26 22:30:06 +00001962 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001963 CHECK_VALID_SIZE(inputs.size(), 1);
1964
Finn Williams85d36712021-01-26 22:30:06 +00001965 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001966 CHECK_VALID_SIZE(outputs.size(), 1);
1967
1968 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1969 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001970 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001971
1972 if (flatBufferPadList->Length() % 2 != 0)
1973 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001974 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
1975 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001976 }
1977
1978 std::vector<std::pair<unsigned int, unsigned int>> padList;
1979 padList.reserve(flatBufferPadList->Length() / 2);
1980 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1981 {
1982 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1983 }
1984
David Monahan34757812019-06-19 11:47:21 +01001985 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001986
1987 auto layerName = GetLayerName(graph, layerIndex);
1988 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1989
1990 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1991 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1992
1993 RegisterInputSlots(graph, layerIndex, layer);
1994 RegisterOutputSlots(graph, layerIndex, layer);
1995}
1996
Finn Williams85d36712021-01-26 22:30:06 +00001997void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001998{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001999 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002000
2001 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002002 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002003
Derek Lamberti8ddae332019-02-21 16:29:43 +00002004 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002005 CHECK_VALID_SIZE(inputs.size(), 1);
2006
Derek Lamberti8ddae332019-02-21 16:29:43 +00002007 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002008 CHECK_VALID_SIZE(outputs.size(), 1);
2009 auto outputInfo = ToTensorInfo(outputs[0]);
2010
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002011 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002012 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2013
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002014 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002015 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2016
Derek Lamberti8ddae332019-02-21 16:29:43 +00002017 RegisterInputSlots(graph, layerIndex, layer);
2018 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002019}
2020
Finn Williams85d36712021-01-26 22:30:06 +00002021armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPoolingDescriptor(PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002022 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002023{
Jan Eilers8eb25602020-03-09 12:13:48 +00002024 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002025 armnn::Pooling2dDescriptor desc;
2026
2027 switch (pooling2dDesc->poolType())
2028 {
2029 case PoolingAlgorithm_Average:
2030 {
2031 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002032 break;
2033 }
2034 case PoolingAlgorithm_Max:
2035 {
2036 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002037 break;
2038 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002039 case PoolingAlgorithm_L2:
2040 {
2041 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2042 break;
2043 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002044 default:
2045 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002046 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002047 }
2048 }
2049
2050 switch (pooling2dDesc->outputShapeRounding())
2051 {
2052 case OutputShapeRounding_Floor:
2053 {
2054 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2055 break;
2056 }
2057 case OutputShapeRounding_Ceiling:
2058 {
2059 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2060 break;
2061 }
2062 default:
2063 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002064 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002065 }
2066 }
2067
2068 switch (pooling2dDesc->paddingMethod())
2069 {
2070 case PaddingMethod_Exclude:
2071 {
2072 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2073 break;
2074 }
2075 case PaddingMethod_IgnoreValue:
2076 {
2077 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2078 break;
2079 }
2080 default:
2081 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002082 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002083 }
2084 }
2085
2086 switch (pooling2dDesc->dataLayout())
2087 {
2088 case DataLayout_NCHW:
2089 {
2090 desc.m_DataLayout = armnn::DataLayout::NCHW;
2091 break;
2092 }
2093 case DataLayout_NHWC:
2094 {
2095 desc.m_DataLayout = armnn::DataLayout::NHWC;
2096 break;
2097 }
2098 default:
2099 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002100 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002101 }
2102 }
2103
2104 desc.m_PadRight = pooling2dDesc->padRight();
2105 desc.m_PadLeft = pooling2dDesc->padLeft();
2106 desc.m_PadBottom = pooling2dDesc->padBottom();
2107 desc.m_PadTop = pooling2dDesc->padTop();
2108 desc.m_StrideX = pooling2dDesc->strideX();
2109 desc.m_StrideY = pooling2dDesc->strideY();
2110 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2111 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2112
2113 return desc;
2114}
2115
Finn Williams85d36712021-01-26 22:30:06 +00002116
2117
2118void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002119{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002120 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002121
Derek Lamberti8ddae332019-02-21 16:29:43 +00002122 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002123 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002124 CHECK_VALID_SIZE(inputs.size(), 1);
2125
Derek Lamberti8ddae332019-02-21 16:29:43 +00002126 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002127 CHECK_VALID_SIZE(outputs.size(), 1);
2128 auto outputInfo = ToTensorInfo(outputs[0]);
2129
2130 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002131 auto layerName = GetLayerName(graph, layerIndex);
2132 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002133 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2134
Derek Lamberti8ddae332019-02-21 16:29:43 +00002135 RegisterInputSlots(graph, layerIndex, layer);
2136 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002137}
2138
Finn Williams85d36712021-01-26 22:30:06 +00002139void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002140{
2141 CHECK_LAYERS(graph, 0, layerIndex);
2142
2143 auto inputs = GetInputs(graph, layerIndex);
2144 CHECK_VALID_SIZE(inputs.size(), 1);
2145
2146 auto outputs = GetOutputs(graph, layerIndex);
2147 CHECK_VALID_SIZE(outputs.size(), 1);
2148 auto outputInfo = ToTensorInfo(outputs[0]);
2149
2150 auto layerName = GetLayerName(graph, layerIndex);
2151 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2152 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2153
2154 RegisterInputSlots(graph, layerIndex, layer);
2155 RegisterOutputSlots(graph, layerIndex, layer);
2156}
2157
Finn Williams85d36712021-01-26 22:30:06 +00002158armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002159 const std::vector<uint32_t>& targetDimsIn)
2160{
2161 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2162 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2163
2164 if (stretchDim != targetDimsIn.end())
2165 {
2166 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2167 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002168 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2169 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002170 }
2171
2172 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002173 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002174 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2175
2176 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2177 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2178 }
2179
2180 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2181
2182 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2183 reshapeInfo.SetShape(outputShape);
2184
2185 return reshapeInfo;
2186}
2187
Finn Williams85d36712021-01-26 22:30:06 +00002188void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002189{
2190 CHECK_LAYERS(graph, 0, layerIndex);
2191
Finn Williams85d36712021-01-26 22:30:06 +00002192 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002193 CHECK_VALID_SIZE(inputs.size(), 1);
2194
Finn Williams85d36712021-01-26 22:30:06 +00002195 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002196 CHECK_VALID_SIZE(outputs.size(), 1);
2197
2198 auto layerName = GetLayerName(graph, layerIndex);
2199 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2200
2201 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2202 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2203
2204 RegisterInputSlots(graph, layerIndex, layer);
2205 RegisterOutputSlots(graph, layerIndex, layer);
2206}
2207
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002208void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2209{
2210 CHECK_LAYERS(graph, 0, layerIndex);
2211 CHECK_LOCATION();
2212
2213 auto inputs = GetInputs(graph, layerIndex);
2214 CHECK_VALID_SIZE(inputs.size(), 1);
2215
2216 auto outputs = GetOutputs(graph, layerIndex);
2217 CHECK_VALID_SIZE(outputs.size(), 1);
2218
2219 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2220 auto fbDescriptor = fbLayer->descriptor();
2221 auto flatBufferAxis = fbDescriptor->axis();
2222
2223 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002224 descriptor.m_KeepDims = fbDescriptor->keepDims();
2225 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2226 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2227
2228 const std::string& layerName = GetLayerName(graph, layerIndex);
2229 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2230
2231 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2232 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2233
2234 RegisterInputSlots(graph, layerIndex, layer);
2235 RegisterOutputSlots(graph, layerIndex, layer);
2236}
2237
Finn Williams85d36712021-01-26 22:30:06 +00002238void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002239{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002240 CHECK_LAYERS(graph, 0, layerIndex);
2241 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002242
Derek Lamberti8ddae332019-02-21 16:29:43 +00002243 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002244 CHECK_VALID_SIZE(outputs.size(), 1);
2245
2246 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2247 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2248
Derek Lamberti8ddae332019-02-21 16:29:43 +00002249 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002250 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2251
Finn Williams85d36712021-01-26 22:30:06 +00002252 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002253 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2254
2255 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2256 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2257
2258 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2259 {
2260 std::stringstream ss;
2261 ss << "New shape defined in reshape parameters "
2262 << reshapeOutputTensorShape
2263 << " does not equal output shape "
2264 << actualOutputTensorInfo.GetShape()
2265 << ": "
2266 << CHECK_LOCATION().AsString();
2267 throw ParseException(ss.str());
2268 }
2269
2270 armnn::ReshapeDescriptor reshapeDesc;
2271 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2272
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002273 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002274 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2275 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2276
Derek Lamberti8ddae332019-02-21 16:29:43 +00002277 RegisterInputSlots(graph, layerIndex, layer);
2278 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002279}
2280
Finn Williams85d36712021-01-26 22:30:06 +00002281void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002282{
2283 CHECK_LAYERS(graph, 0, layerIndex);
2284
Finn Williams85d36712021-01-26 22:30:06 +00002285 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002286 CHECK_VALID_SIZE(inputs.size(), 1);
2287
Finn Williams85d36712021-01-26 22:30:06 +00002288 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002289 CHECK_VALID_SIZE(outputs.size(), 1);
2290
2291 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2292
2293 armnn::ResizeDescriptor descriptor;
2294 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2295 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2296 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2297 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002298 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2299 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002300
2301 auto layerName = GetLayerName(graph, layerIndex);
2302 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2303
2304 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2305 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2306
2307 RegisterInputSlots(graph, layerIndex, layer);
2308 RegisterOutputSlots(graph, layerIndex, layer);
2309}
2310
Finn Williams85d36712021-01-26 22:30:06 +00002311void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002312{
2313 CHECK_LAYERS(graph, 0, layerIndex);
2314
Finn Williams85d36712021-01-26 22:30:06 +00002315 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002316 CHECK_VALID_SIZE(inputs.size(), 1);
2317
Finn Williams85d36712021-01-26 22:30:06 +00002318 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002319 CHECK_VALID_SIZE(outputs.size(), 1);
2320
2321 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2322
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002323 armnn::ResizeDescriptor descriptor;
2324 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002325 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002326 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2327 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002328 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2329 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002330
2331 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002332 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002333
2334 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2335 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2336
2337 RegisterInputSlots(graph, layerIndex, layer);
2338 RegisterOutputSlots(graph, layerIndex, layer);
2339}
2340
Finn Williams85d36712021-01-26 22:30:06 +00002341void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002342{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002343 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002344
Finn Williams85d36712021-01-26 22:30:06 +00002345 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002346 CHECK_VALID_SIZE(inputs.size(), 1);
2347
Finn Williams85d36712021-01-26 22:30:06 +00002348 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002349 CHECK_VALID_SIZE(outputs.size(), 1);
2350
2351 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002352 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002353 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002354
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002355 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2356
2357 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2358 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2359
Derek Lamberti8ddae332019-02-21 16:29:43 +00002360 RegisterInputSlots(graph, layerIndex, layer);
2361 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002362}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002363
Finn Williams85d36712021-01-26 22:30:06 +00002364void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002365{
2366 CHECK_LAYERS(graph, 0, layerIndex);
2367
Finn Williams85d36712021-01-26 22:30:06 +00002368 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002369 CHECK_VALID_SIZE(inputs.size(), 1);
2370
Finn Williams85d36712021-01-26 22:30:06 +00002371 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002372 CHECK_VALID_SIZE(outputs.size(), 1);
2373
2374 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2375 auto flatBufferPadList = flatBufferDescriptor->padList();
2376 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2377
2378 if (flatBufferPadList->Length() % 2 != 0)
2379 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002380 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2381 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002382 }
2383
2384 std::vector<std::pair<unsigned int, unsigned int>> padList;
2385 padList.reserve(flatBufferPadList->Length() / 2);
2386 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2387 {
2388 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2389 }
2390
2391 armnn::SpaceToBatchNdDescriptor descriptor;
2392 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2393 descriptor.m_BlockShape =
2394 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2395 descriptor.m_PadList = padList;
2396
2397 auto layerName = GetLayerName(graph, layerIndex);
2398 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2399
2400 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2401 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2402
2403 RegisterInputSlots(graph, layerIndex, layer);
2404 RegisterOutputSlots(graph, layerIndex, layer);
2405}
2406
Finn Williams85d36712021-01-26 22:30:06 +00002407void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002408{
2409 CHECK_LAYERS(graph, 0, layerIndex);
2410
Finn Williams85d36712021-01-26 22:30:06 +00002411 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002412 CHECK_VALID_SIZE(inputs.size(), 1);
2413
Finn Williams85d36712021-01-26 22:30:06 +00002414 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002415 CHECK_VALID_SIZE(outputs.size(), 1);
2416
2417 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2418
2419 armnn::SpaceToDepthDescriptor descriptor;
2420 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2421 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2422
2423 auto layerName = GetLayerName(graph, layerIndex);
2424 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2425
2426 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2427 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2428
2429 RegisterInputSlots(graph, layerIndex, layer);
2430 RegisterOutputSlots(graph, layerIndex, layer);
2431}
2432
Finn Williams85d36712021-01-26 22:30:06 +00002433armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2434 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002435 unsigned int layerIndex)
2436{
Jan Eilers8eb25602020-03-09 12:13:48 +00002437 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002438 armnn::NormalizationDescriptor desc;
2439
2440 switch (normalizationDescriptor->normChannelType())
2441 {
2442 case NormalizationAlgorithmChannel_Across:
2443 {
2444 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2445 break;
2446 }
2447 case NormalizationAlgorithmChannel_Within:
2448 {
2449 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2450 break;
2451 }
2452 default:
2453 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002454 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002455 }
2456 }
2457
2458 switch (normalizationDescriptor->normMethodType())
2459 {
2460 case NormalizationAlgorithmMethod_LocalBrightness:
2461 {
2462 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2463 break;
2464 }
2465 case NormalizationAlgorithmMethod_LocalContrast:
2466 {
2467 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2468 break;
2469 }
2470 default:
2471 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002472 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002473 }
2474 }
2475
2476 switch (normalizationDescriptor->dataLayout())
2477 {
2478 case DataLayout_NCHW:
2479 {
2480 desc.m_DataLayout = armnn::DataLayout::NCHW;
2481 break;
2482 }
2483 case DataLayout_NHWC:
2484 {
2485 desc.m_DataLayout = armnn::DataLayout::NHWC;
2486 break;
2487 }
2488 default:
2489 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002490 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002491 }
2492 }
2493
2494 desc.m_Alpha = normalizationDescriptor->alpha();
2495 desc.m_Beta = normalizationDescriptor->beta();
2496 desc.m_K = normalizationDescriptor->k();
2497 desc.m_NormSize = normalizationDescriptor->normSize();
2498
2499 return desc;
2500}
2501
Finn Williams85d36712021-01-26 22:30:06 +00002502void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002503{
2504 CHECK_LAYERS(graph, 0, layerIndex);
2505
2506 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2507
Finn Williams85d36712021-01-26 22:30:06 +00002508 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002509 CHECK_VALID_SIZE(inputs.size(), 1);
2510
Finn Williams85d36712021-01-26 22:30:06 +00002511 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002512 CHECK_VALID_SIZE(outputs.size(), 1);
2513
2514 auto outputInfo = ToTensorInfo(outputs[0]);
2515
2516 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2517 auto layerName = GetLayerName(graph, layerIndex);
2518
2519 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2520 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2521
2522 RegisterInputSlots(graph, layerIndex, layer);
2523 RegisterOutputSlots(graph, layerIndex, layer);
2524}
2525
Finn Williams85d36712021-01-26 22:30:06 +00002526void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002527{
2528 CHECK_LAYERS(graph, 0, layerIndex);
2529 auto inputs = GetInputs(graph, layerIndex);
2530 CHECK_LOCATION();
2531 CHECK_VALID_SIZE(inputs.size(), 1);
2532
2533 auto outputs = GetOutputs(graph, layerIndex);
2534 CHECK_VALID_SIZE(outputs.size(), 1);
2535
2536 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002537
josh minor4a3c6102020-01-06 16:40:46 -06002538 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2539 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002540 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2541 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2542
2543 RegisterInputSlots(graph, layerIndex, layer);
2544 RegisterOutputSlots(graph, layerIndex, layer);
2545}
2546
Finn Williams85d36712021-01-26 22:30:06 +00002547void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002548{
2549 CHECK_LAYERS(graph, 0, layerIndex);
2550
2551 auto inputs = GetInputs(graph, layerIndex);
2552 CHECK_VALID_SIZE(inputs.size(), 1);
2553
2554 auto outputs = GetOutputs(graph, layerIndex);
2555 CHECK_VALID_SIZE(outputs.size(), 1);
2556
2557 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2558
2559 auto fbBegin = fbDescriptor->begin();
2560 auto fbSize = fbDescriptor->size();
2561
2562 if (fbBegin->Length() != fbSize->Length())
2563 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002564 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2565 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002566 }
2567
2568 armnn::SliceDescriptor descriptor;
2569 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2570 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2571
2572 auto layerName = GetLayerName(graph, layerIndex);
2573 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2574
2575 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2576 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2577
2578 RegisterInputSlots(graph, layerIndex, layer);
2579 RegisterOutputSlots(graph, layerIndex, layer);
2580}
2581
Finn Williams85d36712021-01-26 22:30:06 +00002582void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002583{
2584 CHECK_LAYERS(graph, 0, layerIndex);
2585
Finn Williams85d36712021-01-26 22:30:06 +00002586 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002587 CHECK_VALID_SIZE(inputs.size(), 1);
2588
Finn Williams85d36712021-01-26 22:30:06 +00002589 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002590 CHECK_VALID_SIZE(outputs.size(), 1);
2591
2592 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2593
2594 auto flatBufferBegin = flatBufferDescriptor->begin();
2595 auto flatBufferEnd = flatBufferDescriptor->end();
2596 auto flatBufferStride = flatBufferDescriptor->stride();
2597
2598 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2599 flatBufferBegin->Length() == flatBufferStride->Length()))
2600 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002601 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2602 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002603 }
2604
2605 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2606 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2607 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2608
2609 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2610 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2611 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2612 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2613 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2614 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2615 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2616
2617 auto layerName = GetLayerName(graph, layerIndex);
2618 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2619
2620 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2621 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2622
2623 RegisterInputSlots(graph, layerIndex, layer);
2624 RegisterOutputSlots(graph, layerIndex, layer);
2625}
2626
Finn Williams85d36712021-01-26 22:30:06 +00002627void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002628{
2629 CHECK_LAYERS(graph, 0, layerIndex);
2630 auto inputs = GetInputs(graph, layerIndex);
2631 CHECK_LOCATION();
2632 CHECK_VALID_SIZE(inputs.size(), 2);
2633
2634 auto outputs = GetOutputs(graph, layerIndex);
2635 CHECK_VALID_SIZE(outputs.size(), 1);
2636
2637 auto layerName = GetLayerName(graph, layerIndex);
2638 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2639
2640 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2641 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2642
2643 RegisterInputSlots(graph, layerIndex, layer);
2644 RegisterOutputSlots(graph, layerIndex, layer);
2645}
2646
Finn Williams85d36712021-01-26 22:30:06 +00002647void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002648{
2649 CHECK_LAYERS(graph, 0, layerIndex);
2650
Finn Williams85d36712021-01-26 22:30:06 +00002651 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002652 CHECK_VALID_SIZE(inputs.size(), 2);
2653
Finn Williams85d36712021-01-26 22:30:06 +00002654 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002655 CHECK_VALID_SIZE(outputs.size(), 1);
2656
Teresa Charlin52664732020-06-29 16:27:03 +01002657 armnn::GatherDescriptor descriptor;
2658 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2659
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002660 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002661 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002662
2663 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002664 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2665
2666 RegisterInputSlots(graph, layerIndex, layer);
2667 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002668}
2669
Finn Williams85d36712021-01-26 22:30:06 +00002670void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002671{
2672 CHECK_LAYERS(graph, 0, layerIndex);
2673
Finn Williams85d36712021-01-26 22:30:06 +00002674 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002675 CHECK_VALID_SIZE(inputs.size(), 1);
2676
Finn Williams85d36712021-01-26 22:30:06 +00002677 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002678 CHECK_VALID_SIZE(outputs.size(), 1);
2679
2680 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2681 auto flatBufferAxis = flatBufferDescriptor->axis();
2682 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2683
2684 armnn::MeanDescriptor descriptor;
2685 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2686 descriptor.m_KeepDims = flatBufferKeepDims;
2687
2688 auto layerName = GetLayerName(graph, layerIndex);
2689 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2690
2691 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2692 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2693
2694 RegisterInputSlots(graph, layerIndex, layer);
2695 RegisterOutputSlots(graph, layerIndex, layer);
2696}
2697
Finn Williams85d36712021-01-26 22:30:06 +00002698void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00002699{
2700 CHECK_LAYERS(graph, 0, layerIndex);
2701
Finn Williams85d36712021-01-26 22:30:06 +00002702 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002703 CHECK_VALID_SIZE(inputs.size(), 1);
2704
Finn Williams85d36712021-01-26 22:30:06 +00002705 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002706
2707 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2708 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2709 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2710 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2711 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2712 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2713
2714 // Check numViews and numDimensions corresponds to the ones already serialized ...
2715 // numViews == flatBufferViewSizes.size();
2716 // foreach: numDimensions == flatBufferViewSizes[x].size();
2717
2718 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2719 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2720 {
2721 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2722 {
2723 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2724 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2725 }
2726 }
2727
2728 auto layerName = GetLayerName(graph, layerIndex);
2729 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2730
2731 // I could have as many outputs as views ...
2732 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2733 {
2734 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2735 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2736 }
2737
2738 RegisterInputSlots(graph, layerIndex, layer);
2739 RegisterOutputSlots(graph, layerIndex, layer);
2740}
2741
Finn Williams85d36712021-01-26 22:30:06 +00002742armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00002743{
2744 armnn::LstmDescriptor desc;
2745
2746 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2747 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2748 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2749 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2750 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2751 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002752 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002753
2754 return desc;
2755}
2756
Finn Williams85d36712021-01-26 22:30:06 +00002757void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00002758{
2759 CHECK_LAYERS(graph, 0, layerIndex);
2760
2761 auto inputs = GetInputs(graph, layerIndex);
2762 CHECK_VALID_SIZE(inputs.size(), 3);
2763
2764 auto outputs = GetOutputs(graph, layerIndex);
2765 CHECK_VALID_SIZE(outputs.size(), 4);
2766
2767 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2768 auto layerName = GetLayerName(graph, layerIndex);
2769 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2770 auto flatBufferInputParams = flatBufferLayer->inputParams();
2771
2772 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2773
2774 armnn::LstmInputParams lstmInputParams;
2775
2776 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2777 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2778 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2779 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2780 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2781 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2782 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2783 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2784 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2785
2786 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2787 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2788 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2789 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2790 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2791 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2792 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2793 lstmInputParams.m_CellBias = &cellBias;
2794 lstmInputParams.m_OutputGateBias = &outputGateBias;
2795
2796 armnn::ConstTensor inputToInputWeights;
2797 armnn::ConstTensor recurrentToInputWeights;
2798 armnn::ConstTensor cellToInputWeights;
2799 armnn::ConstTensor inputGateBias;
2800 if (!lstmDescriptor.m_CifgEnabled)
2801 {
2802 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2803 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2804 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2805 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2806
2807 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2808 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2809 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2810 lstmInputParams.m_InputGateBias = &inputGateBias;
2811 }
2812
2813 armnn::ConstTensor projectionWeights;
2814 armnn::ConstTensor projectionBias;
2815 if (lstmDescriptor.m_ProjectionEnabled)
2816 {
2817 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2818 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2819
2820 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2821 lstmInputParams.m_ProjectionBias = &projectionBias;
2822 }
2823
2824 armnn::ConstTensor cellToForgetWeights;
2825 armnn::ConstTensor cellToOutputWeights;
2826 if (lstmDescriptor.m_PeepholeEnabled)
2827 {
2828 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2829 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2830
2831 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2832 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2833 }
2834
Jan Eilersf8c62972019-07-17 11:07:49 +01002835 armnn::ConstTensor inputLayerNormWeights;
2836 armnn::ConstTensor forgetLayerNormWeights;
2837 armnn::ConstTensor cellLayerNormWeights;
2838 armnn::ConstTensor outputLayerNormWeights;
2839 if (lstmDescriptor.m_LayerNormEnabled)
2840 {
2841 if (!lstmDescriptor.m_CifgEnabled)
2842 {
2843 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2844 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2845 }
2846 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2847 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2848 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2849
2850 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2851 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2852 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2853 }
2854
Jim Flynn11af3752019-03-19 17:22:29 +00002855 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2856
2857 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2858 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2859
2860 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2861 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2862
2863 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2864 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2865
2866 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2867 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2868
2869 RegisterInputSlots(graph, layerIndex, layer);
2870 RegisterOutputSlots(graph, layerIndex, layer);
2871}
2872
Finn Williams85d36712021-01-26 22:30:06 +00002873armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01002874{
2875 armnn::QLstmDescriptor desc;
2876
2877 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2878 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2879 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2880 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2881
2882 desc.m_CellClip = qLstmDescriptor->cellClip();
2883 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2884
2885 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2886 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2887 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2888 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2889
2890 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2891 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2892
2893 return desc;
2894}
2895
Finn Williams85d36712021-01-26 22:30:06 +00002896void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01002897{
2898 CHECK_LAYERS(graph, 0, layerIndex);
2899
2900 auto inputs = GetInputs(graph, layerIndex);
2901 CHECK_VALID_SIZE(inputs.size(), 3);
2902
2903 auto outputs = GetOutputs(graph, layerIndex);
2904 CHECK_VALID_SIZE(outputs.size(), 3);
2905
2906 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2907 auto layerName = GetLayerName(graph, layerIndex);
2908 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2909 auto flatBufferInputParams = flatBufferLayer->inputParams();
2910
2911 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2912 armnn::LstmInputParams qLstmInputParams;
2913
2914 // Mandatory params
2915 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2916 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2917 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2918 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2919 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2920 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2921 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2922 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2923 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2924
2925 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2926 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2927 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2928 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2929 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2930 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2931 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2932 qLstmInputParams.m_CellBias = &cellBias;
2933 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2934
2935 // Optional CIFG params
2936 armnn::ConstTensor inputToInputWeights;
2937 armnn::ConstTensor recurrentToInputWeights;
2938 armnn::ConstTensor inputGateBias;
2939
2940 if (!qLstmDescriptor.m_CifgEnabled)
2941 {
2942 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2943 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2944 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2945
2946 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2947 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2948 qLstmInputParams.m_InputGateBias = &inputGateBias;
2949 }
2950
2951 // Optional projection params
2952 armnn::ConstTensor projectionWeights;
2953 armnn::ConstTensor projectionBias;
2954
2955 if (qLstmDescriptor.m_ProjectionEnabled)
2956 {
2957 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2958 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2959
2960 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2961 qLstmInputParams.m_ProjectionBias = &projectionBias;
2962 }
2963
2964 // Optional peephole params
2965 armnn::ConstTensor cellToInputWeights;
2966 armnn::ConstTensor cellToForgetWeights;
2967 armnn::ConstTensor cellToOutputWeights;
2968
2969 if (qLstmDescriptor.m_PeepholeEnabled)
2970 {
2971 if (!qLstmDescriptor.m_CifgEnabled)
2972 {
2973 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2974 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2975 }
2976
2977 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2978 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2979
2980 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2981 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2982 }
2983
2984 // Optional layer norm params
2985 armnn::ConstTensor inputLayerNormWeights;
2986 armnn::ConstTensor forgetLayerNormWeights;
2987 armnn::ConstTensor cellLayerNormWeights;
2988 armnn::ConstTensor outputLayerNormWeights;
2989
2990 if (qLstmDescriptor.m_LayerNormEnabled)
2991 {
2992 if (!qLstmDescriptor.m_CifgEnabled)
2993 {
2994 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2995 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2996 }
2997
2998 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2999 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3000 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3001
3002 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3003 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3004 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3005 }
3006
3007 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3008
3009 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3010 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3011
3012 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3013 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3014
3015 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3016 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3017
3018 RegisterInputSlots(graph, layerIndex, layer);
3019 RegisterOutputSlots(graph, layerIndex, layer);
3020}
3021
Finn Williams85d36712021-01-26 22:30:06 +00003022void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003023{
3024 CHECK_LAYERS(graph, 0, layerIndex);
3025
3026 auto inputs = GetInputs(graph, layerIndex);
3027 CHECK_VALID_SIZE(inputs.size(), 3);
3028
3029 auto outputs = GetOutputs(graph, layerIndex);
3030 CHECK_VALID_SIZE(outputs.size(), 2);
3031
3032 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3033 auto layerName = GetLayerName(graph, layerIndex);
3034 auto flatBufferInputParams = flatBufferLayer->inputParams();
3035
3036 armnn::QuantizedLstmInputParams lstmInputParams;
3037
3038 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3039 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3040 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3041 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3042 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3043 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3044 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3045 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3046 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3047 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3048 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3049 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3050
3051 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3052 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3053 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3054 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3055 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3056 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3057 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3058 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3059 lstmInputParams.m_InputGateBias = &inputGateBias;
3060 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3061 lstmInputParams.m_CellBias = &cellBias;
3062 lstmInputParams.m_OutputGateBias = &outputGateBias;
3063
3064 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3065
3066 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3067 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3068
3069 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3070 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3071
3072 RegisterInputSlots(graph, layerIndex, layer);
3073 RegisterOutputSlots(graph, layerIndex, layer);
3074}
3075
Finn Williams85d36712021-01-26 22:30:06 +00003076void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003077{
3078 CHECK_LAYERS(graph, 0, layerIndex);
3079
Finn Williams85d36712021-01-26 22:30:06 +00003080 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003081 CHECK_VALID_SIZE(inputs.size(), 1);
3082
Finn Williams85d36712021-01-26 22:30:06 +00003083 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003084 CHECK_VALID_SIZE(outputs.size(), 1);
3085
3086 const std::string layerName = GetLayerName(graph, layerIndex);
3087 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3088
3089 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3090 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3091
3092 RegisterInputSlots(graph, layerIndex, layer);
3093 RegisterOutputSlots(graph, layerIndex, layer);
3094}
3095
Finn Williams85d36712021-01-26 22:30:06 +00003096void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003097{
3098 CHECK_LAYERS(graph, 0, layerIndex);
3099
Finn Williams85d36712021-01-26 22:30:06 +00003100 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003101 CHECK_VALID_SIZE(inputs.size(), 2);
3102
Finn Williams85d36712021-01-26 22:30:06 +00003103 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003104 CHECK_VALID_SIZE(outputs.size(), 1);
3105
3106 const std::string layerName = GetLayerName(graph, layerIndex);
3107 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3108
3109 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3110 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3111
3112 RegisterInputSlots(graph, layerIndex, layer);
3113 RegisterOutputSlots(graph, layerIndex, layer);
3114}
3115
Finn Williams85d36712021-01-26 22:30:06 +00003116void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003117{
3118 CHECK_LAYERS(graph, 0, layerIndex);
3119 auto inputs = GetInputs(graph, layerIndex);
3120 CHECK_LOCATION();
3121 CHECK_VALID_SIZE(inputs.size(), 2);
3122
3123 auto outputs = GetOutputs(graph, layerIndex);
3124 CHECK_VALID_SIZE(outputs.size(), 2);
3125
3126 auto layerName = GetLayerName(graph, layerIndex);
3127 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3128
3129 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3130 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3131
3132 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3133 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3134
3135 RegisterInputSlots(graph, layerIndex, layer);
3136 RegisterOutputSlots(graph, layerIndex, layer);
3137}
3138
Finn Williams85d36712021-01-26 22:30:06 +00003139void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003140{
3141 CHECK_LAYERS(graph, 0, layerIndex);
3142 auto inputs = GetInputs(graph, layerIndex);
3143 CHECK_LOCATION();
3144 CHECK_VALID_SIZE(inputs.size(), 2);
3145
3146 auto outputs = GetOutputs(graph, layerIndex);
3147 CHECK_VALID_SIZE(outputs.size(), 1);
3148
3149 auto layerName = GetLayerName(graph, layerIndex);
3150 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3151
3152 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3153 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3154
3155 RegisterInputSlots(graph, layerIndex, layer);
3156 RegisterOutputSlots(graph, layerIndex, layer);
3157}
3158
Finn Williams85d36712021-01-26 22:30:06 +00003159void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003160{
3161 CHECK_LAYERS(graph, 0, layerIndex);
3162
3163 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3164
3165 auto inputs = GetInputs(graph, layerIndex);
3166 CHECK_VALID_SIZE(inputs.size(), 1);
3167
3168 auto outputs = GetOutputs(graph, layerIndex);
3169 CHECK_VALID_SIZE(outputs.size(), 1);
3170 auto outputInfo = ToTensorInfo(outputs[0]);
3171
3172 auto layerName = GetLayerName(graph, layerIndex);
3173 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
3174
3175 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3176 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3177
3178 RegisterInputSlots(graph, layerIndex, layer);
3179 RegisterOutputSlots(graph, layerIndex, layer);
3180}
3181
Finn Williams85d36712021-01-26 22:30:06 +00003182void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003183{
3184 CHECK_LAYERS(graph, 0, layerIndex);
3185
3186 auto inputs = GetInputs(graph, layerIndex);
3187 CHECK_VALID_SIZE(inputs.size(), 1);
3188
3189 auto outputs = GetOutputs(graph, layerIndex);
3190 CHECK_VALID_SIZE(outputs.size(), 1);
3191
3192 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3193 auto layerName = GetLayerName(graph, layerIndex);
3194 auto serializerDescriptor = serializerLayer->descriptor();
3195
3196 armnn::TransposeConvolution2dDescriptor descriptor;
3197 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3198 descriptor.m_PadRight = serializerDescriptor->padRight();
3199 descriptor.m_PadTop = serializerDescriptor->padTop();
3200 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3201 descriptor.m_StrideX = serializerDescriptor->strideX();
3202 descriptor.m_StrideY = serializerDescriptor->strideY();;
3203 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3204 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3205
3206 // weights & biases
3207 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3208 armnn::Optional<armnn::ConstTensor> optionalBiases;
3209 if (descriptor.m_BiasEnabled)
3210 {
3211 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3212 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3213 }
3214
3215 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3216 weights,
3217 optionalBiases,
3218 layerName.c_str());
3219
3220 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3221 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3222
3223 RegisterInputSlots(graph, layerIndex, layer);
3224 RegisterOutputSlots(graph, layerIndex, layer);
3225}
3226
Finn Williams85d36712021-01-26 22:30:06 +00003227void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003228{
3229 CHECK_LAYERS(graph, 0, layerIndex);
3230 auto inputs = GetInputs(graph, layerIndex);
3231
3232 auto outputs = GetOutputs(graph, layerIndex);
3233 CHECK_VALID_SIZE(outputs.size(), 1);
3234
3235 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3236 unsigned int axis = flatBufferDescriptor->axis();
3237 unsigned int numInputs = flatBufferDescriptor->numInputs();
3238 CHECK_VALID_SIZE(inputs.size(), numInputs);
3239
3240 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3241 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3242 flatBufferInputShape->begin() + flatBufferInputShape->size());
3243
3244 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3245 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3246
3247 for (unsigned int i=0; i<inputs.size(); ++i)
3248 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003249 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003250 if (descriptor.m_InputShape != inputShape)
3251 {
3252 std::stringstream ss;
3253 ss << "Shape of input "
3254 << i
3255 << " "
3256 << inputShape
3257 << " does not equal defined input shape "
3258 << descriptor.m_InputShape
3259 << ": "
3260 << CHECK_LOCATION().AsString();
3261 throw ParseException(ss.str());
3262 }
3263 }
3264
3265 auto layerName = GetLayerName(graph, layerIndex);
3266 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3267
3268 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3269 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3270
3271 RegisterInputSlots(graph, layerIndex, layer);
3272 RegisterOutputSlots(graph, layerIndex, layer);
3273}
3274
Finn Williams85d36712021-01-26 22:30:06 +00003275void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003276{
3277 CHECK_LAYERS(graph, 0, layerIndex);
3278
3279 auto inputs = GetInputs(graph, layerIndex);
3280 auto outputs = GetOutputs(graph, layerIndex);
3281
3282 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3283 auto fbDescriptor = fbLayer->descriptor();
3284
3285 armnn::StandInDescriptor descriptor;
3286 descriptor.m_NumInputs = fbDescriptor->numInputs();
3287 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3288
3289 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3290 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3291
3292 const std::string layerName = GetLayerName(graph, layerIndex);
3293 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3294
3295 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3296 {
3297 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3298 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3299 }
3300
3301 RegisterInputSlots(graph, layerIndex, layer);
3302 RegisterOutputSlots(graph, layerIndex, layer);
3303}
3304
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003305} // namespace armnnDeserializer