blob: 976986eec39e38bdfa55e3008d47117f32deaab6 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Finn Williams85d36712021-01-26 22:30:06 +000037IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
38
39IDeserializer::~IDeserializer() = default;
40
41IDeserializer *IDeserializer::CreateRaw()
42{
43 return new IDeserializer();
44}
45
46IDeserializerPtr IDeserializer::Create()
47{
48 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
49}
50
51void IDeserializer::Destroy(IDeserializer *parser)
52{
53 delete parser;
54}
55
56armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
57{
58 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
59}
60
61armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
62{
63 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
64}
65
66BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
67{
68 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
69}
70
71BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
72{
73 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
74}
75
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000076namespace
77{
Kevin May43a799c2019-02-08 16:31:42 +000078
79const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
80
Finn Williams85d36712021-01-26 22:30:06 +000081 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000082 unsigned int layersIndex,
83 const CheckLocation& location)
84{
85 if (graph->layers() == nullptr)
86 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010087 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
88 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
89 "layers:{1} at {2}",
90 location.m_Function,
91 layersIndex,
92 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000093 }
94 else if (layersIndex >= graph->layers()->size())
95 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010096 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
97 location.m_Function,
98 layersIndex,
99 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000100 }
101}
102
Finn Williams85d36712021-01-26 22:30:06 +0000103void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000104 unsigned int layersIndex,
105 unsigned int layerIndex,
106 const CheckLocation& location)
107{
108 if (graph->layers() == nullptr)
109 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100110 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
111 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
112 "layers:{1} at {2}",
113 location.m_Function,
114 layersIndex,
115 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000116 }
117 else if (layersIndex >= graph->layers()->size())
118 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100119 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
120 "layers:{1} at {2}",
121 location.m_Function,
122 layersIndex,
123 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000124 }
125 else if (layerIndex >= graph->layers()[layersIndex].size()
126 && layerIndex != VIRTUAL_LAYER_ID)
127 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100128 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
129 "layers:{1} layer:{2} at {3}",
130 location.m_Function,
131 layersIndex,
132 layerIndex,
133 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000134 }
135}
136
Finn Williams85d36712021-01-26 22:30:06 +0000137void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000138 const CheckLocation& location)
139{
140 if (rawPtr == nullptr)
141 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100142 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
143 location.m_Function,
144 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000145 }
146}
147
Finn Williams85d36712021-01-26 22:30:06 +0000148void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000149 const CheckLocation& location)
150{
151 if (rawPtr == nullptr)
152 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100153 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
154 location.m_Function,
155 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000156 }
157}
158
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000159void CheckConstTensorSize(const unsigned int constTensorSize,
160 const unsigned int tensorSize,
161 const CheckLocation& location)
162{
163 if (constTensorSize != tensorSize)
164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100165 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
166 location.m_Function,
167 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000168 }
169}
170
Kevin May43a799c2019-02-08 16:31:42 +0000171#define CHECK_TENSOR_PTR(TENSOR_PTR) \
172 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
173
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000174#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
175 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
176
Mike Kellya0766c32019-02-19 17:22:07 +0000177#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
178 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
179
Kevin May43a799c2019-02-08 16:31:42 +0000180#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
181 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
182
183#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
184 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
185}
186
Saoirse Stewart263829c2019-02-19 15:54:14 +0000187bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
188{
189 const unsigned int actualSize = actual.GetNumDimensions();
190 if (actualSize != expected.size())
191 {
192 return false;
193 }
194
195 for (unsigned int i = 0u; i < actualSize; i++)
196 {
197 if (actual[i] != static_cast<unsigned int>(expected[i]))
198 {
199 return false;
200 }
201 }
202
203 return true;
204}
205
Finn Williams85d36712021-01-26 22:30:06 +0000206IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000207: m_Network(nullptr, nullptr),
208//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000209m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000210{
211 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000212 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
213 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
214 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
215 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
216 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
217 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
218 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
mathad01b392e982021-04-07 12:07:30 +0100219 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Finn Williams85d36712021-01-26 22:30:06 +0000220 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
221 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
222 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
223 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
224 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
225 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
226 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
227 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
228 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
229 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
230 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
231 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
232 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
233 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
234 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
235 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
236 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
237 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
238 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
239 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
240 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
241 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
242 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
243 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
244 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
245 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
246 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
247 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
248 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
249 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
250 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
251 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
252 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
253 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
254 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000255 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000256 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
257 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
258 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
259 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
260 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
261 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
262 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
263 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
264 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
265 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
266 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
267 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
268 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
269 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
270 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
271 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000272}
273
Finn Williams85d36712021-01-26 22:30:06 +0000274LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000275{
276 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
277
278 switch(layerType)
279 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100280 case Layer::Layer_AbsLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000282 case Layer::Layer_ActivationLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000284 case Layer::Layer_AdditionLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100286 case Layer::Layer_ArgMinMaxLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000288 case Layer::Layer_BatchToSpaceNdLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000290 case Layer::Layer_BatchNormalizationLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100292 case Layer::Layer_CastLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100294 case Layer::Layer_ComparisonLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100296 case Layer::Layer_ConcatLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000298 case Layer::Layer_ConstantLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000300 case Layer::Layer_Convolution2dLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100302 case Layer::Layer_DepthToSpaceLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000304 case Layer::Layer_DepthwiseConvolution2dLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000306 case Layer::Layer_DequantizeLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000308 case Layer::Layer_DetectionPostProcessLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000310 case Layer::Layer_DivisionLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000312 case Layer::Layer_EqualLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000314 case Layer::Layer_ElementwiseUnaryLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000316 case Layer::Layer_FullyConnectedLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100318 case Layer::Layer_FillLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000320 case Layer::Layer_FloorLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000322 case Layer::Layer_GatherLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000324 case Layer::Layer_GreaterLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000326 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000327 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100328 case Layer::Layer_InstanceNormalizationLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000330 case Layer::Layer_L2NormalizationLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000332 case Layer::Layer_LogicalBinaryLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100334 case Layer::Layer_LogSoftmaxLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000336 case Layer::Layer_LstmLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000338 case Layer::Layer_MeanLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000340 case Layer::Layer_MinimumLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000342 case Layer::Layer_MaximumLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100344 case Layer::Layer_MergeLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000346 case Layer::Layer_MergerLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000348 case Layer::Layer_MultiplicationLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000350 case Layer::Layer_NormalizationLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000352 case Layer::Layer_OutputLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000354 case Layer::Layer_PadLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000356 case Layer::Layer_PermuteLayer:
357 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000358 case Layer::Layer_Pooling2dLayer:
359 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100360 case Layer::Layer_PreluLayer:
361 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100362 case Layer::Layer_QLstmLayer:
363 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000364 case Layer::Layer_QuantizeLayer:
365 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100366 case Layer::Layer_QuantizedLstmLayer:
367 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100368 case Layer::Layer_RankLayer:
369 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000370 case Layer::Layer_ReduceLayer:
371 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000372 case Layer::Layer_ReshapeLayer:
373 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000374 case Layer::Layer_ResizeBilinearLayer:
375 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100376 case Layer::Layer_ResizeLayer:
377 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000378 case Layer::Layer_RsqrtLayer:
379 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100380 case Layer::Layer_SliceLayer:
381 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000382 case Layer::Layer_SoftmaxLayer:
383 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000384 case Layer::Layer_SpaceToBatchNdLayer:
385 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100386 case Layer::Layer_SpaceToDepthLayer:
387 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000388 case Layer::Layer_SplitterLayer:
389 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100390 case Layer::Layer_StackLayer:
391 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100392 case Layer::Layer_StandInLayer:
393 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000394 case Layer::Layer_StridedSliceLayer:
395 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000396 case Layer::Layer_SubtractionLayer:
397 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100398 case Layer::Layer_SwitchLayer:
399 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100400 case Layer::Layer_TransposeConvolution2dLayer:
401 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000402 case Layer::Layer_TransposeLayer:
403 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000404 case Layer::Layer_NONE:
405 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100406 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000407 }
408}
409
Finn Williams85d36712021-01-26 22:30:06 +0000410std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000411{
412 auto layer = GetBaseLayer(graph, index);
413 assert(layer);
414 return layer->layerName()->str();
415}
416
Finn Williams85d36712021-01-26 22:30:06 +0000417int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000418{
419 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
420
421 if (layerType == Layer::Layer_InputLayer)
422 {
423 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
424 }
425 else if ( layerType == Layer::Layer_OutputLayer )
426 {
427 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
428 }
429 return 0;
430}
431
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000432armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000433{
434 switch (dataLayout)
435 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000436 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000437 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000438 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000439 default:
440 return armnn::DataLayout::NCHW;
441 }
442}
443
Mike Kellyaf484012019-02-20 16:53:11 +0000444armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
445{
446 switch (function)
447 {
448 case armnnSerializer::ActivationFunction_Sigmoid:
449 return armnn::ActivationFunction::Sigmoid;
450 case armnnSerializer::ActivationFunction_TanH:
451 return armnn::ActivationFunction::TanH;
452 case armnnSerializer::ActivationFunction_Linear:
453 return armnn::ActivationFunction::Linear;
454 case armnnSerializer::ActivationFunction_ReLu:
455 return armnn::ActivationFunction::ReLu;
456 case armnnSerializer::ActivationFunction_BoundedReLu:
457 return armnn::ActivationFunction::BoundedReLu;
458 case armnnSerializer::ActivationFunction_LeakyReLu:
459 return armnn::ActivationFunction::LeakyReLu;
460 case armnnSerializer::ActivationFunction_Abs:
461 return armnn::ActivationFunction::Abs;
462 case armnnSerializer::ActivationFunction_Sqrt:
463 return armnn::ActivationFunction::Sqrt;
464 case armnnSerializer::ActivationFunction_Square:
465 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000466 case armnnSerializer::ActivationFunction_Elu:
467 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000468 case armnnSerializer::ActivationFunction_HardSwish:
469 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000470 default:
471 return armnn::ActivationFunction::Sigmoid;
472 }
473}
474
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100475armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
476{
477 switch (function)
478 {
479 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
480 return armnn::ArgMinMaxFunction::Max;
481 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
482 default:
483 return armnn::ArgMinMaxFunction::Min;
484 }
485}
486
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100487armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
488{
489 switch (operation)
490 {
491 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
492 return armnn::ComparisonOperation::Equal;
493 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
494 return armnn::ComparisonOperation::Greater;
495 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
496 return armnn::ComparisonOperation::GreaterOrEqual;
497 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
498 return armnn::ComparisonOperation::Less;
499 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
500 return armnn::ComparisonOperation::LessOrEqual;
501 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
502 default:
503 return armnn::ComparisonOperation::NotEqual;
504 }
505}
506
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000507armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
508{
509 switch (operation)
510 {
511 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
512 return armnn::ReduceOperation::Sum;
513 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
514 return armnn::ReduceOperation::Max;
515 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
516 return armnn::ReduceOperation::Mean;
517 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
518 return armnn::ReduceOperation::Min;
519 default:
520 return armnn::ReduceOperation::Sum;
521 }
522}
523
James Conroyaba90cd2020-11-06 16:28:18 +0000524armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
525{
526 switch (operation)
527 {
528 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
529 return armnn::LogicalBinaryOperation::LogicalAnd;
530 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
531 return armnn::LogicalBinaryOperation::LogicalOr;
532 default:
533 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
534 }
535}
536
josh minor4a3c6102020-01-06 16:40:46 -0600537armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
538{
539 switch (operation)
540 {
541 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
542 return armnn::UnaryOperation::Abs;
543 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
544 return armnn::UnaryOperation::Rsqrt;
545 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
546 return armnn::UnaryOperation::Sqrt;
547 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
548 return armnn::UnaryOperation::Exp;
549 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
550 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000551 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
552 return armnn::UnaryOperation::LogicalNot;
josh minor4a3c6102020-01-06 16:40:46 -0600553 default:
554 throw armnn::InvalidArgumentException("Unary operation unknown");
555 }
556}
557
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100558armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
559{
560 switch (method)
561 {
562 case armnnSerializer::ResizeMethod_NearestNeighbor:
563 return armnn::ResizeMethod::NearestNeighbor;
564 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000565 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100566 default:
567 return armnn::ResizeMethod::NearestNeighbor;
568 }
569}
570
Finn Williams85d36712021-01-26 22:30:06 +0000571armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000572{
573 armnn::DataType type;
574 CHECK_TENSOR_PTR(tensorPtr);
575
576 switch (tensorPtr->dataType())
577 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000578 case DataType_QAsymmS8:
579 type = armnn::DataType::QAsymmS8;
580 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000581 case DataType_QSymmS8:
582 type = armnn::DataType::QSymmS8;
583 break;
Kevin May43a799c2019-02-08 16:31:42 +0000584 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000585 case DataType_QAsymmU8:
586 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000587 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000588 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000589 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000590 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000591 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000592 case DataType_Signed32:
593 type = armnn::DataType::Signed32;
594 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100595 case DataType_Signed64:
596 type = armnn::DataType::Signed64;
597 break;
Kevin May43a799c2019-02-08 16:31:42 +0000598 case DataType_Float32:
599 type = armnn::DataType::Float32;
600 break;
601 case DataType_Float16:
602 type = armnn::DataType::Float16;
603 break;
604 case DataType_Boolean:
605 type = armnn::DataType::Boolean;
606 break;
607 default:
608 {
609 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100610 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
611 tensorPtr->dataType(),
612 EnumNameDataType(tensorPtr->dataType()),
613 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000614 }
615 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000616
Colm Donelan800b2812021-02-12 12:43:35 +0000617 float quantizationScale = tensorPtr->quantizationScale();
618 int32_t quantizationOffset = tensorPtr->quantizationOffset();
619
Finn Williams2605b232020-06-10 15:53:46 +0100620 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
621 {
Colm Donelan800b2812021-02-12 12:43:35 +0000622 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100623 type,
624 quantizationScale,
625 quantizationOffset);
626 }
Colm Donelan800b2812021-02-12 12:43:35 +0000627 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
628 {
629 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
630 type,
631 quantizationScale,
632 quantizationOffset);
633 return result;
634 }
Kevin May43a799c2019-02-08 16:31:42 +0000635
636 auto dimensions = tensorPtr->dimensions();
637 unsigned int size = dimensions->size();
638 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000639 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
640 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
641 // For backwards compatibility check if the dimensionSpecificity vector is present first.
642 // The default is to have dimensionSpecificity set to all true's anyway.
643 if (tensorPtr->dimensionSpecificity() != nullptr)
644 {
645 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
646 size = dimensionSpecificity->size();
647 for (unsigned int i = 0; i < size; ++i)
648 {
649 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
650 }
651 }
652 // Construct a TensorShape
653 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000654
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000655 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000656 if (quantizationScales)
657 {
658 unsigned int quantizationScalesSize = quantizationScales->size();
659 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
660 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000661 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000662 type,
663 scales,
664 quantizationDim);
665 return result;
666 }
667
Kevin May43a799c2019-02-08 16:31:42 +0000668 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000669 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000670 type,
671 quantizationScale,
672 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000673
Kevin May43a799c2019-02-08 16:31:42 +0000674 return result;
675}
676
Finn Williams85d36712021-01-26 22:30:06 +0000677armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000678{
679 CHECK_CONST_TENSOR_PTR(constTensorPtr);
680 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
681
682 switch (constTensorPtr->data_type())
683 {
684 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000685 {
686 auto byteData = constTensorPtr->data_as_ByteData()->data();
687 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
688 return armnn::ConstTensor(tensorInfo, byteData->data());
689 }
Mike Kellya0766c32019-02-19 17:22:07 +0000690 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000691 {
692 auto shortData = constTensorPtr->data_as_ShortData()->data();
693 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
694 return armnn::ConstTensor(tensorInfo, shortData->data());
695 }
Mike Kellya0766c32019-02-19 17:22:07 +0000696 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000697 {
698 auto intData = constTensorPtr->data_as_IntData()->data();
699 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
700 return armnn::ConstTensor(tensorInfo, intData->data());
701 }
Mike Kellya0766c32019-02-19 17:22:07 +0000702 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000703 {
704 auto longData = constTensorPtr->data_as_LongData()->data();
705 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
706 return armnn::ConstTensor(tensorInfo, longData->data());
707 }
Mike Kellya0766c32019-02-19 17:22:07 +0000708 default:
709 {
710 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100711 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
712 constTensorPtr->data_type(),
713 EnumNameConstTensorData(constTensorPtr->data_type()),
714 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000715 }
716 }
717}
718
Finn Williams85d36712021-01-26 22:30:06 +0000719TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000720{
721 CHECK_LAYERS(graphPtr, 0, layerIndex);
722 auto layer = GetBaseLayer(graphPtr, layerIndex);
723 const auto& numInputs = layer->inputSlots()->size();
724
725 TensorRawPtrVector result(numInputs);
726
727 for (unsigned int i=0; i<numInputs; ++i)
728 {
729 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
730 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
731 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
732 }
733 return result;
734}
735
Finn Williams85d36712021-01-26 22:30:06 +0000736TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000737{
738 CHECK_LAYERS(graphPtr, 0, layerIndex);
739 auto layer = GetBaseLayer(graphPtr, layerIndex);
740 const auto& numOutputs = layer->outputSlots()->size();
741
742 TensorRawPtrVector result(numOutputs);
743
744 for (unsigned int i=0; i<numOutputs; ++i)
745 {
746 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
747 }
748 return result;
749}
750
Finn Williams85d36712021-01-26 22:30:06 +0000751void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000752{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000753 CHECK_LAYERS(graph, 0, layerIndex);
754 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100755 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
756 "layerName: {1} / {2}",
757 layerIndex,
758 layerName,
759 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000760}
761
Finn Williams85d36712021-01-26 22:30:06 +0000762void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000763{
764 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000765 m_InputBindings.clear();
766 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000767}
768
Kevin May43a799c2019-02-08 16:31:42 +0000769
Finn Williams85d36712021-01-26 22:30:06 +0000770INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000771{
772 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000773 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
774 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000775}
776
Finn Williams85d36712021-01-26 22:30:06 +0000777armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000778{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000779 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000780 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
781 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
782 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000783}
784
Finn Williams85d36712021-01-26 22:30:06 +0000785GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000786{
787 if (binaryContent == nullptr)
788 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100789 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
790 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000791 }
792 flatbuffers::Verifier verifier(binaryContent, len);
793 if (verifier.VerifyBuffer<SerializedGraph>() == false)
794 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100795 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
796 "flatbuffers format. size:{0} {1}",
797 len,
798 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000799 }
800 return GetSerializedGraph(binaryContent);
801}
802
Finn Williams85d36712021-01-26 22:30:06 +0000803INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000804{
805 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100806 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000807 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000808 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000809 {
810 if (layer->layer_type() != Layer_InputLayer &&
811 layer->layer_type() != Layer_OutputLayer)
812 {
813 // lookup and call the parser function
814 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000815 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000816 }
817 ++layerIndex;
818 }
819
Derek Lamberti8ddae332019-02-21 16:29:43 +0000820 SetupInputLayers(graph);
821 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000822
823 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100824 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000825 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100826 Connections& connections = graphIt.second;
827 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000828 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100829 const unsigned int outputSlotIndex = outputIt.first;
830 IOutputSlot* outputSlot = outputIt.second;
831 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000832 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100833 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000834 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100835 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000836 }
Kevin May43a799c2019-02-08 16:31:42 +0000837 }
838 }
839 }
840
841 return std::move(m_Network);
842}
843
Finn Williams85d36712021-01-26 22:30:06 +0000844BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000845 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000846{
Jan Eilers8eb25602020-03-09 12:13:48 +0000847 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000848 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000849 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000850 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000851 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000852 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000853 }
854 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100855 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
856 name,
857 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000858}
859
Finn Williams85d36712021-01-26 22:30:06 +0000860BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000861 const std::string& name) const
862{
Jan Eilers8eb25602020-03-09 12:13:48 +0000863 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000864 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000865 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000866 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000867 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000868 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000869 }
870 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100871 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
872 name,
873 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000874}
875
Finn Williams85d36712021-01-26 22:30:06 +0000876unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000877{
878 for (unsigned int i = 0; i < graph->layers()->size(); i++)
879 {
880 auto layer = graph->layers()->Get(i);
881 if (layer->layer_type() == Layer::Layer_InputLayer)
882 {
883 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
884 if (layerBindingId == targetId)
885 {
886 return i;
887 }
888 }
889 }
890 throw ParseException("Input layer with given layerBindingId not found");
891}
892
Finn Williams85d36712021-01-26 22:30:06 +0000893unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000894{
895 for (unsigned int i = 0; i < graph->layers()->size(); i++)
896 {
897 auto layer = graph->layers()->Get(i);
898 if (layer->layer_type() == Layer::Layer_OutputLayer)
899 {
900 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
901 if (layerBindingId == targetId)
902 {
903 return i;
904 }
905 }
906 }
907 throw ParseException("Output layer with given layerBindingId not found");
908}
909
Finn Williams85d36712021-01-26 22:30:06 +0000910unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100911{
912 for (unsigned int i = 0; i < graph->layers()->size(); i++)
913 {
914 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
915 if (layer->index() == targetIndex)
916 {
917 return i;
918 }
919 }
920 throw ParseException("Layer with given index not found");
921}
922
Finn Williams85d36712021-01-26 22:30:06 +0000923IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000924{
Finn Williams85d36712021-01-26 22:30:06 +0000925 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000926
927 if (graph->featureVersions())
928 {
929 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
930 }
931
932 return versions;
933}
934
Finn Williams85d36712021-01-26 22:30:06 +0000935void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000936{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000937 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100938 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000939 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100940 m_InputBindings.reserve(numInputs);
941
942 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000943 {
Tee Jungaa920c52019-11-05 10:48:25 +0000944 unsigned int inputLayerIndex = 0xFFFFFFFF;
945 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
946 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100947 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000948 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
949 }
950 else
951 {
952 const int inputId = graph->inputIds()->Get(i);
953 inputLayerIndex = GetInputLayerInVector(graph, inputId);
954 }
955
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100956 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000957
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100958 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
959 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100960 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000961
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100962 IConnectableLayer* inputLayer =
963 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000964
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100965 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
966 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
967 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
968
Derek Lamberti8ddae332019-02-21 16:29:43 +0000969 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100970 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000971 }
972}
973
Finn Williams85d36712021-01-26 22:30:06 +0000974void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000975{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000976 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100977 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000978 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100979 m_OutputBindings.reserve(numOutputs);
980
981 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000982 {
Tee Jungaa920c52019-11-05 10:48:25 +0000983 unsigned int outputLayerIndex = 0xFFFFFFFF;
984 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
985 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100986 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000987 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
988 }
989 else
990 {
991 const int outputId = graph->outputIds()->Get(i);
992 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
993 }
994
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100995 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000996
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100997 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
998 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +0100999 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001000
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001001 IConnectableLayer* outputLayer =
1002 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001003
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001004 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001005 unsigned int sourceLayerIndex =
1006 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001007 unsigned int outputSlotIndex =
1008 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001009 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001010 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1011 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001012 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001013 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001014 }
1015}
1016
Finn Williams85d36712021-01-26 22:30:06 +00001017void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001018 uint32_t layerIndex,
1019 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001020{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001021 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001022 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001023 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1024 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001025 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001026 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1027 " for layer index: {2} {3}",
1028 baseLayer->outputSlots()->size(),
1029 layer->GetNumOutputSlots(),
1030 layerIndex,
1031 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001032 }
1033
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001034 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001035 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001036 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1037 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1038 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1039 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001040 }
1041}
1042
Finn Williams85d36712021-01-26 22:30:06 +00001043void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001044 uint32_t layerIndex,
1045 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001046{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001047 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001048 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001049 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1050 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001051 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001052 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1053 " for layer index:{2} {3}",
1054 baseLayer->inputSlots()->size(),
1055 layer->GetNumInputSlots(),
1056 layerIndex,
1057 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001058 }
1059
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001060 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001061 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001062 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1063 auto fbConnection = fbInputSlot->connection();
1064 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1065 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001066 }
1067}
1068
Finn Williams85d36712021-01-26 22:30:06 +00001069void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001070 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001071 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001072{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001073 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001074 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001075 m_GraphConnections[sourceLayerIndex] = Connections();
1076 }
1077
1078 Connections& connections = m_GraphConnections[sourceLayerIndex];
1079 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1080 {
1081 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001082 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001083 else
1084 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001085 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001086 }
1087}
Kevin May43a799c2019-02-08 16:31:42 +00001088
Finn Williams85d36712021-01-26 22:30:06 +00001089void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001090 uint32_t outputSlotIndex,
1091 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001092{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001093 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1094 {
1095 m_GraphConnections[sourceLayerIndex] = Connections();
1096 }
1097
1098 Connections& connections = m_GraphConnections[sourceLayerIndex];
1099 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1100 {
1101 throw ParseException("Same output slot index processed twice");
1102 }
1103
1104 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001105}
1106
Finn Williams85d36712021-01-26 22:30:06 +00001107void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001108{
1109 CHECK_LAYERS(graph, 0, layerIndex);
1110 auto inputs = GetInputs(graph, layerIndex);
1111 CHECK_LOCATION();
1112 CHECK_VALID_SIZE(inputs.size(), 1);
1113
1114 auto outputs = GetOutputs(graph, layerIndex);
1115 CHECK_VALID_SIZE(outputs.size(), 1);
1116
1117 auto layerName = GetLayerName(graph, layerIndex);
1118
josh minor4a3c6102020-01-06 16:40:46 -06001119 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1120 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001121 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1122 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1123
1124 RegisterInputSlots(graph, layerIndex, layer);
1125 RegisterOutputSlots(graph, layerIndex, layer);
1126}
1127
Finn Williams85d36712021-01-26 22:30:06 +00001128void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001129{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001130 CHECK_LAYERS(graph, 0, layerIndex);
1131 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001132 CHECK_LOCATION();
1133 CHECK_VALID_SIZE(inputs.size(), 1);
1134
Derek Lamberti8ddae332019-02-21 16:29:43 +00001135 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001136 CHECK_VALID_SIZE(outputs.size(), 1);
1137
Derek Lamberti8ddae332019-02-21 16:29:43 +00001138 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001139 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001140 auto serializerDescriptor = serializerLayer->descriptor();
1141
1142 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001143 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001144 descriptor.m_A = serializerDescriptor->a();
1145 descriptor.m_B = serializerDescriptor->b();
1146
1147 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1148 layerName.c_str());
1149 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1150 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1151
Derek Lamberti8ddae332019-02-21 16:29:43 +00001152 RegisterInputSlots(graph, layerIndex, layer);
1153 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001154}
1155
Finn Williams85d36712021-01-26 22:30:06 +00001156void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001157{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001158 CHECK_LAYERS(graph, 0, layerIndex);
1159 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001160 CHECK_LOCATION();
1161 CHECK_VALID_SIZE(inputs.size(), 2);
1162
Derek Lamberti8ddae332019-02-21 16:29:43 +00001163 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001164 CHECK_VALID_SIZE(outputs.size(), 1);
1165
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001166 auto layerName = GetLayerName(graph, layerIndex);
1167 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001168
1169 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1170 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1171
Derek Lamberti8ddae332019-02-21 16:29:43 +00001172 RegisterInputSlots(graph, layerIndex, layer);
1173 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001174}
1175
Finn Williams85d36712021-01-26 22:30:06 +00001176void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001177{
1178 CHECK_LAYERS(graph, 0, layerIndex);
1179 auto inputs = GetInputs(graph, layerIndex);
1180 CHECK_LOCATION();
1181 CHECK_VALID_SIZE(inputs.size(), 1);
1182
1183 auto outputs = GetOutputs(graph, layerIndex);
1184 CHECK_VALID_SIZE(outputs.size(), 1);
1185
1186 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1187 auto serializerDescriptor = serializerLayer->descriptor();
1188
1189 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001190 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001191 descriptor.m_Axis = serializerDescriptor->axis();
1192 auto layerName = GetLayerName(graph, layerIndex);
1193 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1194
1195 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1196 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1197
1198 RegisterInputSlots(graph, layerIndex, layer);
1199 RegisterOutputSlots(graph, layerIndex, layer);
1200}
1201
Finn Williams85d36712021-01-26 22:30:06 +00001202void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001203{
1204 CHECK_LAYERS(graph, 0, layerIndex);
1205
Finn Williams85d36712021-01-26 22:30:06 +00001206 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001207 CHECK_VALID_SIZE(inputs.size(), 1);
1208
Finn Williams85d36712021-01-26 22:30:06 +00001209 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001210 CHECK_VALID_SIZE(outputs.size(), 1);
1211
1212 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1213 auto flatBufferCrops = flatBufferDescriptor->crops();
1214 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1215
1216 if (flatBufferCrops->Length() % 2 != 0)
1217 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001218 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001219 }
1220
1221 std::vector<std::pair<unsigned int, unsigned int>> crops;
1222 crops.reserve(flatBufferCrops->Length() / 2);
1223 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1224 {
1225 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1226 }
1227
1228 armnn::BatchToSpaceNdDescriptor descriptor;
1229 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1230 descriptor.m_BlockShape =
1231 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1232 descriptor.m_Crops = crops;
1233
1234 auto layerName = GetLayerName(graph, layerIndex);
1235 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1236
1237 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1238 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1239
1240 RegisterInputSlots(graph, layerIndex, layer);
1241 RegisterOutputSlots(graph, layerIndex, layer);
1242}
1243
Finn Williams85d36712021-01-26 22:30:06 +00001244void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001245{
1246 CHECK_LAYERS(graph, 0, layerIndex);
1247
1248 auto inputs = GetInputs(graph, layerIndex);
1249 CHECK_VALID_SIZE(inputs.size(), 1);
1250
1251 auto outputs = GetOutputs(graph, layerIndex);
1252 CHECK_VALID_SIZE(outputs.size(), 1);
1253 auto outputInfo = ToTensorInfo(outputs[0]);
1254
ruoyan015c7ab052019-03-04 14:48:02 +00001255 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001256
1257 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1258 auto serializerDescriptor = serializerLayer->descriptor();
1259
1260 armnn::BatchNormalizationDescriptor descriptor;
1261 descriptor.m_Eps = serializerDescriptor->eps();
1262 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1263
1264 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1265 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1266 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1267 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1268
1269 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1270 mean,
1271 variance,
1272 beta,
1273 gamma,
1274 layerName.c_str());
1275 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1276
1277 RegisterInputSlots(graph, layerIndex, layer);
1278 RegisterOutputSlots(graph, layerIndex, layer);
1279}
1280
mathad01b392e982021-04-07 12:07:30 +01001281void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1282{
1283 CHECK_LAYERS(graph, 0, layerIndex);
1284 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1285 CHECK_LOCATION();
1286 CHECK_VALID_SIZE(inputs.size(), 1);
1287
1288 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1289 CHECK_VALID_SIZE(outputs.size(), 1);
1290
1291 auto layerName = GetLayerName(graph, layerIndex);
1292
1293 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1294
1295 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1296 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1297
1298 RegisterInputSlots(graph, layerIndex, layer);
1299 RegisterOutputSlots(graph, layerIndex, layer);
1300}
1301
Finn Williams85d36712021-01-26 22:30:06 +00001302void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001303{
1304 CHECK_LAYERS(graph, 0, layerIndex);
1305 CHECK_LOCATION();
1306
1307 auto outputs = GetOutputs(graph, layerIndex);
1308 CHECK_VALID_SIZE(outputs.size(), 1);
1309
1310 auto layerName = GetLayerName(graph, layerIndex);
1311
1312 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1313 auto serializerInput = serializerLayer->input();
1314
1315 armnn::ConstTensor input = ToConstTensor(serializerInput);
1316
1317 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1318
1319 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1320 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1321
1322 RegisterOutputSlots(graph, layerIndex, layer);
1323}
1324
Finn Williams85d36712021-01-26 22:30:06 +00001325void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001326{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001327 CHECK_LAYERS(graph, 0, layerIndex);
1328 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001329 CHECK_LOCATION();
1330 CHECK_VALID_SIZE(inputs.size(), 1);
1331
Derek Lamberti8ddae332019-02-21 16:29:43 +00001332 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001333 CHECK_VALID_SIZE(outputs.size(), 1);
1334
Derek Lamberti8ddae332019-02-21 16:29:43 +00001335 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001336 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001337 auto serializerDescriptor = serializerLayer->descriptor();
1338
1339 armnn::Convolution2dDescriptor descriptor;
1340 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1341 descriptor.m_PadRight = serializerDescriptor->padRight();
1342 descriptor.m_PadTop = serializerDescriptor->padTop();
1343 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1344 descriptor.m_StrideX = serializerDescriptor->strideX();
1345 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001346 descriptor.m_DilationX = serializerDescriptor->dilationX();
1347 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001348 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1349 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1350
1351 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1352 armnn::ConstTensor biases;
1353
Matteo Martincighfc598e12019-05-14 10:36:13 +01001354 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001355 if (descriptor.m_BiasEnabled)
1356 {
1357 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001358 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001359 }
1360 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1361 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001362 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001363 layerName.c_str());
1364 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1365 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1366
Derek Lamberti8ddae332019-02-21 16:29:43 +00001367 RegisterInputSlots(graph, layerIndex, layer);
1368 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001369}
1370
Finn Williams85d36712021-01-26 22:30:06 +00001371void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001372{
1373 CHECK_LAYERS(graph, 0, layerIndex);
1374
1375 auto inputs = GetInputs(graph, layerIndex);
1376 CHECK_VALID_SIZE(inputs.size(), 1);
1377
1378 auto outputs = GetOutputs(graph, layerIndex);
1379 CHECK_VALID_SIZE(outputs.size(), 1);
1380
1381 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1382
1383 armnn::DepthToSpaceDescriptor descriptor;
1384 descriptor.m_BlockSize = fbDescriptor->blockSize();
1385 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1386
1387 auto layerName = GetLayerName(graph, layerIndex);
1388 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1389
1390 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1391 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1392
1393 RegisterInputSlots(graph, layerIndex, layer);
1394 RegisterOutputSlots(graph, layerIndex, layer);
1395}
1396
Finn Williams85d36712021-01-26 22:30:06 +00001397void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001398{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001399 CHECK_LAYERS(graph, 0, layerIndex);
1400 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001401 CHECK_LOCATION();
1402 CHECK_VALID_SIZE(inputs.size(), 1);
1403
Derek Lamberti8ddae332019-02-21 16:29:43 +00001404 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001405 CHECK_VALID_SIZE(outputs.size(), 1);
1406
Derek Lamberti8ddae332019-02-21 16:29:43 +00001407 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001408 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001409 auto serializerDescriptor = serializerLayer->descriptor();
1410
1411 armnn::DepthwiseConvolution2dDescriptor descriptor;
1412 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1413 descriptor.m_PadRight = serializerDescriptor->padRight();
1414 descriptor.m_PadTop = serializerDescriptor->padTop();
1415 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1416 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001417 descriptor.m_StrideY = serializerDescriptor->strideY();
1418 descriptor.m_DilationX = serializerDescriptor->dilationX();
1419 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001420 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1421 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1422
1423 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1424 armnn::ConstTensor biases;
1425
Matteo Martincighfc598e12019-05-14 10:36:13 +01001426 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001427 if (descriptor.m_BiasEnabled)
1428 {
1429 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001430 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001431 }
1432 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1433 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001434 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001435 layerName.c_str());
1436
1437 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1438 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1439
Derek Lamberti8ddae332019-02-21 16:29:43 +00001440 RegisterInputSlots(graph, layerIndex, layer);
1441 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001442}
1443
Finn Williams85d36712021-01-26 22:30:06 +00001444void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001445{
1446 CHECK_LAYERS(graph, 0, layerIndex);
1447 auto inputs = GetInputs(graph, layerIndex);
1448 CHECK_LOCATION();
1449 CHECK_VALID_SIZE(inputs.size(), 2);
1450
1451 auto outputs = GetOutputs(graph, layerIndex);
1452 CHECK_VALID_SIZE(outputs.size(), 4);
1453
1454 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1455 auto layerName = GetLayerName(graph, layerIndex);
1456 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1457
1458 armnn::DetectionPostProcessDescriptor descriptor;
1459 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1460 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1461 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1462 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1463 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1464 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1465 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1466 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1467 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1468 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1469 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1470
1471 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1472
1473 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1474 anchors,
1475 layerName.c_str());
1476
1477 for (unsigned int i = 0; i < 4; i++)
1478 {
1479 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1480 }
1481
1482 RegisterInputSlots(graph, layerIndex, layer);
1483 RegisterOutputSlots(graph, layerIndex, layer);
1484}
1485
Finn Williams85d36712021-01-26 22:30:06 +00001486void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001487{
1488 CHECK_LAYERS(graph, 0, layerIndex);
1489 auto inputs = GetInputs(graph, layerIndex);
1490 CHECK_LOCATION();
1491 CHECK_VALID_SIZE(inputs.size(), 2);
1492
1493 auto outputs = GetOutputs(graph, layerIndex);
1494 CHECK_VALID_SIZE(outputs.size(), 1);
1495
1496 auto layerName = GetLayerName(graph, layerIndex);
1497 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1498
1499 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1500 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1501
1502 RegisterInputSlots(graph, layerIndex, layer);
1503 RegisterOutputSlots(graph, layerIndex, layer);
1504}
1505
Finn Williams85d36712021-01-26 22:30:06 +00001506void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001507{
1508 CHECK_LAYERS(graph, 0, layerIndex);
1509 auto inputs = GetInputs(graph, layerIndex);
1510 CHECK_LOCATION();
1511 CHECK_VALID_SIZE(inputs.size(), 2);
1512
1513 auto outputs = GetOutputs(graph, layerIndex);
1514 CHECK_VALID_SIZE(outputs.size(), 1);
1515
1516 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001517 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1518 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001519
1520 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1521 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1522
1523 RegisterInputSlots(graph, layerIndex, layer);
1524 RegisterOutputSlots(graph, layerIndex, layer);
1525}
1526
Finn Williams85d36712021-01-26 22:30:06 +00001527void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001528{
1529 CHECK_LAYERS(graph, 0, layerIndex);
1530 auto inputs = GetInputs(graph, layerIndex);
1531 CHECK_LOCATION();
1532 CHECK_VALID_SIZE(inputs.size(), 1);
1533
1534 auto outputs = GetOutputs(graph, layerIndex);
1535 CHECK_VALID_SIZE(outputs.size(), 1);
1536
1537 auto layerName = GetLayerName(graph, layerIndex);
1538 armnn::FillDescriptor descriptor(1.0f);
1539 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1540
1541 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1542 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1543
1544 RegisterInputSlots(graph, layerIndex, layer);
1545 RegisterOutputSlots(graph, layerIndex, layer);
1546}
1547
Finn Williams85d36712021-01-26 22:30:06 +00001548void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001549{
1550 CHECK_LAYERS(graph, 0, layerIndex);
1551 auto inputs = GetInputs(graph, layerIndex);
1552 CHECK_LOCATION();
1553 CHECK_VALID_SIZE(inputs.size(), 2);
1554
1555 auto outputs = GetOutputs(graph, layerIndex);
1556 CHECK_VALID_SIZE(outputs.size(), 1);
1557
1558 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001559 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1560 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001561
1562 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1563 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1564
1565 RegisterInputSlots(graph, layerIndex, layer);
1566 RegisterOutputSlots(graph, layerIndex, layer);
1567}
1568
Finn Williams85d36712021-01-26 22:30:06 +00001569void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001570{
1571 CHECK_LAYERS(graph, 0, layerIndex);
1572
1573 auto inputs = GetInputs(graph, layerIndex);
1574 CHECK_VALID_SIZE(inputs.size(), 1);
1575
1576 auto outputs = GetOutputs(graph, layerIndex);
1577 CHECK_VALID_SIZE(outputs.size(), 1);
1578
1579 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1580 auto fbDescriptor = fbLayer->descriptor();
1581
1582 armnn::InstanceNormalizationDescriptor descriptor;
1583 descriptor.m_Gamma = fbDescriptor->gamma();
1584 descriptor.m_Beta = fbDescriptor->beta();
1585 descriptor.m_Eps = fbDescriptor->eps();
1586 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1587
1588 const std::string layerName = GetLayerName(graph, layerIndex);
1589 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1590
1591 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1592 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1593
1594 RegisterInputSlots(graph, layerIndex, layer);
1595 RegisterOutputSlots(graph, layerIndex, layer);
1596}
1597
Finn Williams85d36712021-01-26 22:30:06 +00001598void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001599{
1600 CHECK_LAYERS(graph, 0, layerIndex);
1601
1602 auto inputs = GetInputs(graph, layerIndex);
1603 CHECK_VALID_SIZE(inputs.size(), 1);
1604
1605 auto outputs = GetOutputs(graph, layerIndex);
1606 CHECK_VALID_SIZE(outputs.size(), 1);
1607 auto outputInfo = ToTensorInfo(outputs[0]);
1608
1609 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1610 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1611
1612 auto layerName = GetLayerName(graph, layerIndex);
1613 armnn::L2NormalizationDescriptor descriptor;
1614 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001615 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001616
1617 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1618 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1619
1620 RegisterInputSlots(graph, layerIndex, layer);
1621 RegisterOutputSlots(graph, layerIndex, layer);
1622}
1623
Finn Williams85d36712021-01-26 22:30:06 +00001624void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001625{
1626 CHECK_LAYERS(graph, 0, layerIndex);
1627 CHECK_LOCATION();
1628
1629 auto inputs = GetInputs(graph, layerIndex);
1630 CHECK_VALID_SIZE(inputs.size(), 2);
1631
1632 auto outputs = GetOutputs(graph, layerIndex);
1633 CHECK_VALID_SIZE(outputs.size(), 1);
1634
1635 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1636 auto fbDescriptor = fbLayer->descriptor();
1637
1638 armnn::LogicalBinaryDescriptor descriptor;
1639 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1640
1641 const std::string& layerName = GetLayerName(graph, layerIndex);
1642 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1643
1644 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1645 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1646
1647 RegisterInputSlots(graph, layerIndex, layer);
1648 RegisterOutputSlots(graph, layerIndex, layer);
1649}
1650
Finn Williams85d36712021-01-26 22:30:06 +00001651void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001652{
1653 CHECK_LAYERS(graph, 0, layerIndex);
1654
Finn Williams85d36712021-01-26 22:30:06 +00001655 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001656 CHECK_VALID_SIZE(inputs.size(), 1);
1657
Finn Williams85d36712021-01-26 22:30:06 +00001658 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001659 CHECK_VALID_SIZE(outputs.size(), 1);
1660
1661 armnn::LogSoftmaxDescriptor descriptor;
1662 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1663 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1664 auto layerName = GetLayerName(graph, layerIndex);
1665
1666 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1667
1668 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1669 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1670
1671 RegisterInputSlots(graph, layerIndex, layer);
1672 RegisterOutputSlots(graph, layerIndex, layer);
1673}
1674
Finn Williams85d36712021-01-26 22:30:06 +00001675void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001676{
1677 CHECK_LAYERS(graph, 0, layerIndex);
1678 auto inputs = GetInputs(graph, layerIndex);
1679 CHECK_LOCATION();
1680 CHECK_VALID_SIZE(inputs.size(), 2);
1681
1682 auto outputs = GetOutputs(graph, layerIndex);
1683 CHECK_VALID_SIZE(outputs.size(), 1);
1684
1685 auto layerName = GetLayerName(graph, layerIndex);
1686 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1687
1688 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1689 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1690
1691 RegisterInputSlots(graph, layerIndex, layer);
1692 RegisterOutputSlots(graph, layerIndex, layer);
1693}
1694
Finn Williams85d36712021-01-26 22:30:06 +00001695void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001696{
1697 CHECK_LAYERS(graph, 0, layerIndex);
1698 auto inputs = GetInputs(graph, layerIndex);
1699 CHECK_LOCATION();
1700 CHECK_VALID_SIZE(inputs.size(), 2);
1701
1702 auto outputs = GetOutputs(graph, layerIndex);
1703 CHECK_VALID_SIZE(outputs.size(), 1);
1704
1705 auto layerName = GetLayerName(graph, layerIndex);
1706 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1707
1708 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1709 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1710
1711 RegisterInputSlots(graph, layerIndex, layer);
1712 RegisterOutputSlots(graph, layerIndex, layer);
1713}
1714
Jim Flynne242f2d2019-05-22 14:24:13 +01001715const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1716 unsigned int layerIndex)
1717{
1718 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1719
1720 switch (layerType)
1721 {
1722 case Layer::Layer_ConcatLayer:
1723 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1724 case Layer::Layer_MergerLayer:
1725 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1726 default:
1727 throw armnn::Exception("unknown layer type, should be concat or merger");
1728 }
1729}
1730
Finn Williams85d36712021-01-26 22:30:06 +00001731void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001732{
1733 CHECK_LAYERS(graph, 0, layerIndex);
1734 CHECK_LOCATION();
1735
1736 auto inputs = GetInputs(graph, layerIndex);
1737 CHECK_VALID_SIZE(inputs.size(), 2);
1738
1739 auto outputs = GetOutputs(graph, layerIndex);
1740 CHECK_VALID_SIZE(outputs.size(), 1);
1741
1742 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1743 auto fbDescriptor = fbLayer->descriptor();
1744
1745 armnn::ComparisonDescriptor descriptor;
1746 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1747
1748 const std::string& layerName = GetLayerName(graph, layerIndex);
1749 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1750
1751 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1752 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1753
1754 RegisterInputSlots(graph, layerIndex, layer);
1755 RegisterOutputSlots(graph, layerIndex, layer);
1756}
1757
Finn Williams85d36712021-01-26 22:30:06 +00001758void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06001759{
1760 CHECK_LAYERS(graph, 0, layerIndex);
1761 CHECK_LOCATION();
1762
1763 auto inputs = GetInputs(graph, layerIndex);
1764 CHECK_VALID_SIZE(inputs.size(), 1);
1765
1766 auto outputs = GetOutputs(graph, layerIndex);
1767 CHECK_VALID_SIZE(outputs.size(), 1);
1768
1769 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1770 auto fbDescriptor = fbLayer->descriptor();
1771
1772 armnn::ElementwiseUnaryDescriptor descriptor;
1773 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1774
1775 const std::string& layerName = GetLayerName(graph, layerIndex);
1776 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1777
1778 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1779 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1780
1781 RegisterInputSlots(graph, layerIndex, layer);
1782 RegisterOutputSlots(graph, layerIndex, layer);
1783}
1784
Finn Williams85d36712021-01-26 22:30:06 +00001785void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001786{
1787 CHECK_LAYERS(graph, 0, layerIndex);
1788 CHECK_LOCATION();
1789
1790 auto outputs = GetOutputs(graph, layerIndex);
1791 CHECK_VALID_SIZE(outputs.size(), 1);
1792
Jim Flynnac25a1b2019-02-28 10:40:49 +00001793 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001794 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1795 unsigned int numViews = originsDescriptor->numViews();
1796 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001797
1798 // can now check the number of inputs == number of views
1799 auto inputs = GetInputs(graph, layerIndex);
1800 CHECK_VALID_SIZE(inputs.size(), numViews);
1801
1802 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001803 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001804 for (unsigned int v = 0; v < numViews; ++v)
1805 {
1806 auto originPtr = originsPtr->Get(v);
1807 for (unsigned int d = 0; d < numDimensions; ++d)
1808 {
1809 uint32_t value = originPtr->data()->Get(d);
1810 descriptor.SetViewOriginCoord(v, d, value);
1811 }
1812 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001813 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001814
Jim Flynn906f9462019-05-10 13:55:21 +01001815 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001816 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1817 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1818
1819 RegisterInputSlots(graph, layerIndex, layer);
1820 RegisterOutputSlots(graph, layerIndex, layer);
1821}
1822
Finn Williams85d36712021-01-26 22:30:06 +00001823void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001824{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001825 CHECK_LAYERS(graph, 0, layerIndex);
1826 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001827 CHECK_LOCATION();
1828 CHECK_VALID_SIZE(inputs.size(), 2);
1829
Derek Lamberti8ddae332019-02-21 16:29:43 +00001830 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001831 CHECK_VALID_SIZE(outputs.size(), 1);
1832
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001833 auto layerName = GetLayerName(graph, layerIndex);
1834 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001835
1836 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1837 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1838
Derek Lamberti8ddae332019-02-21 16:29:43 +00001839 RegisterInputSlots(graph, layerIndex, layer);
1840 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001841}
1842
Finn Williams85d36712021-01-26 22:30:06 +00001843void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001844{
1845 CHECK_LAYERS(graph, 0, layerIndex);
1846 CHECK_LOCATION();
1847
1848 auto inputs = GetInputs(graph, layerIndex);
1849 CHECK_VALID_SIZE(inputs.size(), 1);
1850
1851 auto outputs = GetOutputs(graph, layerIndex);
1852 CHECK_VALID_SIZE(outputs.size(), 1);
1853
1854 auto layerName = GetLayerName(graph, layerIndex);
1855
1856 armnn::IConnectableLayer* layer;
1857
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001858 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001859
1860 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1861 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1862
1863 RegisterInputSlots(graph, layerIndex, layer);
1864 RegisterOutputSlots(graph, layerIndex, layer);
1865}
1866
Finn Williams85d36712021-01-26 22:30:06 +00001867void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001868{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001869 CHECK_LAYERS(graph, 0, layerIndex);
1870 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001871 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001872
Derek Lamberti8ddae332019-02-21 16:29:43 +00001873 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001874 CHECK_VALID_SIZE(outputs.size(), 1);
1875
Derek Lamberti8ddae332019-02-21 16:29:43 +00001876 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001877 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001878 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1879
1880 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1881 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1882 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001883 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
1884 uint32_t numInputs = 1;
1885 if (!fullyConnectedDescriptor.m_ConstantWeights)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001886 {
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001887 numInputs = 2;
1888 if (fullyConnectedDescriptor.m_BiasEnabled)
1889 {
1890 numInputs = 3;
1891 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001892 }
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001893 CHECK_VALID_SIZE(inputs.size(), numInputs);
1894
1895 armnn::Optional <armnn::ConstTensor> optionalWeights = armnn::EmptyOptional();
1896 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1897 if (fullyConnectedDescriptor.m_ConstantWeights)
1898 {
1899 armnn::ConstTensor weightsTensorData = ToConstTensor(flatBufferLayer->weights());
1900 optionalWeights = armnn::Optional<armnn::ConstTensor>(weightsTensorData);
1901
1902 if (flatBufferDescriptor->biasEnabled())
1903 {
1904 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1905 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
1906 }
1907 }
1908
1909 armnn::IConnectableLayer* layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1910 optionalWeights,
1911 optionalBiases,
1912 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001913
1914 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1915 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1916
Derek Lamberti8ddae332019-02-21 16:29:43 +00001917 RegisterInputSlots(graph, layerIndex, layer);
1918 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001919}
1920
Finn Williams85d36712021-01-26 22:30:06 +00001921void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001922{
1923 CHECK_LAYERS(graph, 0, layerIndex);
1924
Finn Williams85d36712021-01-26 22:30:06 +00001925 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001926 CHECK_VALID_SIZE(inputs.size(), 1);
1927
Finn Williams85d36712021-01-26 22:30:06 +00001928 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001929 CHECK_VALID_SIZE(outputs.size(), 1);
1930
1931 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1932 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001933 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001934
1935 if (flatBufferPadList->Length() % 2 != 0)
1936 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001937 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
1938 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001939 }
1940
1941 std::vector<std::pair<unsigned int, unsigned int>> padList;
1942 padList.reserve(flatBufferPadList->Length() / 2);
1943 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1944 {
1945 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1946 }
1947
David Monahan34757812019-06-19 11:47:21 +01001948 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001949
1950 auto layerName = GetLayerName(graph, layerIndex);
1951 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1952
1953 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1954 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1955
1956 RegisterInputSlots(graph, layerIndex, layer);
1957 RegisterOutputSlots(graph, layerIndex, layer);
1958}
1959
Finn Williams85d36712021-01-26 22:30:06 +00001960void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001961{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001962 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001963
1964 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001965 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001966
Derek Lamberti8ddae332019-02-21 16:29:43 +00001967 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001968 CHECK_VALID_SIZE(inputs.size(), 1);
1969
Derek Lamberti8ddae332019-02-21 16:29:43 +00001970 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001971 CHECK_VALID_SIZE(outputs.size(), 1);
1972 auto outputInfo = ToTensorInfo(outputs[0]);
1973
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001974 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001975 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1976
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001977 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001978 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1979
Derek Lamberti8ddae332019-02-21 16:29:43 +00001980 RegisterInputSlots(graph, layerIndex, layer);
1981 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001982}
1983
Finn Williams85d36712021-01-26 22:30:06 +00001984armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPoolingDescriptor(PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001985 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001986{
Jan Eilers8eb25602020-03-09 12:13:48 +00001987 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001988 armnn::Pooling2dDescriptor desc;
1989
1990 switch (pooling2dDesc->poolType())
1991 {
1992 case PoolingAlgorithm_Average:
1993 {
1994 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001995 break;
1996 }
1997 case PoolingAlgorithm_Max:
1998 {
1999 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002000 break;
2001 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002002 case PoolingAlgorithm_L2:
2003 {
2004 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2005 break;
2006 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002007 default:
2008 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002009 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002010 }
2011 }
2012
2013 switch (pooling2dDesc->outputShapeRounding())
2014 {
2015 case OutputShapeRounding_Floor:
2016 {
2017 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2018 break;
2019 }
2020 case OutputShapeRounding_Ceiling:
2021 {
2022 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2023 break;
2024 }
2025 default:
2026 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002027 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002028 }
2029 }
2030
2031 switch (pooling2dDesc->paddingMethod())
2032 {
2033 case PaddingMethod_Exclude:
2034 {
2035 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2036 break;
2037 }
2038 case PaddingMethod_IgnoreValue:
2039 {
2040 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2041 break;
2042 }
2043 default:
2044 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002045 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002046 }
2047 }
2048
2049 switch (pooling2dDesc->dataLayout())
2050 {
2051 case DataLayout_NCHW:
2052 {
2053 desc.m_DataLayout = armnn::DataLayout::NCHW;
2054 break;
2055 }
2056 case DataLayout_NHWC:
2057 {
2058 desc.m_DataLayout = armnn::DataLayout::NHWC;
2059 break;
2060 }
2061 default:
2062 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002063 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002064 }
2065 }
2066
2067 desc.m_PadRight = pooling2dDesc->padRight();
2068 desc.m_PadLeft = pooling2dDesc->padLeft();
2069 desc.m_PadBottom = pooling2dDesc->padBottom();
2070 desc.m_PadTop = pooling2dDesc->padTop();
2071 desc.m_StrideX = pooling2dDesc->strideX();
2072 desc.m_StrideY = pooling2dDesc->strideY();
2073 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2074 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2075
2076 return desc;
2077}
2078
Finn Williams85d36712021-01-26 22:30:06 +00002079
2080
2081void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002082{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002083 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002084
Derek Lamberti8ddae332019-02-21 16:29:43 +00002085 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002086 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002087 CHECK_VALID_SIZE(inputs.size(), 1);
2088
Derek Lamberti8ddae332019-02-21 16:29:43 +00002089 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002090 CHECK_VALID_SIZE(outputs.size(), 1);
2091 auto outputInfo = ToTensorInfo(outputs[0]);
2092
2093 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002094 auto layerName = GetLayerName(graph, layerIndex);
2095 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002096 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2097
Derek Lamberti8ddae332019-02-21 16:29:43 +00002098 RegisterInputSlots(graph, layerIndex, layer);
2099 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002100}
2101
Finn Williams85d36712021-01-26 22:30:06 +00002102void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002103{
2104 CHECK_LAYERS(graph, 0, layerIndex);
2105
2106 auto inputs = GetInputs(graph, layerIndex);
2107 CHECK_VALID_SIZE(inputs.size(), 1);
2108
2109 auto outputs = GetOutputs(graph, layerIndex);
2110 CHECK_VALID_SIZE(outputs.size(), 1);
2111 auto outputInfo = ToTensorInfo(outputs[0]);
2112
2113 auto layerName = GetLayerName(graph, layerIndex);
2114 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2115 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2116
2117 RegisterInputSlots(graph, layerIndex, layer);
2118 RegisterOutputSlots(graph, layerIndex, layer);
2119}
2120
Finn Williams85d36712021-01-26 22:30:06 +00002121armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002122 const std::vector<uint32_t>& targetDimsIn)
2123{
2124 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2125 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2126
2127 if (stretchDim != targetDimsIn.end())
2128 {
2129 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2130 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002131 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2132 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002133 }
2134
2135 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002136 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002137 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2138
2139 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2140 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2141 }
2142
2143 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2144
2145 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2146 reshapeInfo.SetShape(outputShape);
2147
2148 return reshapeInfo;
2149}
2150
Finn Williams85d36712021-01-26 22:30:06 +00002151void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002152{
2153 CHECK_LAYERS(graph, 0, layerIndex);
2154
Finn Williams85d36712021-01-26 22:30:06 +00002155 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002156 CHECK_VALID_SIZE(inputs.size(), 1);
2157
Finn Williams85d36712021-01-26 22:30:06 +00002158 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002159 CHECK_VALID_SIZE(outputs.size(), 1);
2160
2161 auto layerName = GetLayerName(graph, layerIndex);
2162 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2163
2164 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2165 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2166
2167 RegisterInputSlots(graph, layerIndex, layer);
2168 RegisterOutputSlots(graph, layerIndex, layer);
2169}
2170
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002171void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2172{
2173 CHECK_LAYERS(graph, 0, layerIndex);
2174 CHECK_LOCATION();
2175
2176 auto inputs = GetInputs(graph, layerIndex);
2177 CHECK_VALID_SIZE(inputs.size(), 1);
2178
2179 auto outputs = GetOutputs(graph, layerIndex);
2180 CHECK_VALID_SIZE(outputs.size(), 1);
2181
2182 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2183 auto fbDescriptor = fbLayer->descriptor();
2184 auto flatBufferAxis = fbDescriptor->axis();
2185
2186 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002187 descriptor.m_KeepDims = fbDescriptor->keepDims();
2188 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2189 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2190
2191 const std::string& layerName = GetLayerName(graph, layerIndex);
2192 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2193
2194 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2195 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2196
2197 RegisterInputSlots(graph, layerIndex, layer);
2198 RegisterOutputSlots(graph, layerIndex, layer);
2199}
2200
Finn Williams85d36712021-01-26 22:30:06 +00002201void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002202{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002203 CHECK_LAYERS(graph, 0, layerIndex);
2204 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002205
Derek Lamberti8ddae332019-02-21 16:29:43 +00002206 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002207 CHECK_VALID_SIZE(outputs.size(), 1);
2208
2209 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2210 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2211
Derek Lamberti8ddae332019-02-21 16:29:43 +00002212 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002213 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2214
Finn Williams85d36712021-01-26 22:30:06 +00002215 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002216 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2217
2218 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2219 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2220
2221 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2222 {
2223 std::stringstream ss;
2224 ss << "New shape defined in reshape parameters "
2225 << reshapeOutputTensorShape
2226 << " does not equal output shape "
2227 << actualOutputTensorInfo.GetShape()
2228 << ": "
2229 << CHECK_LOCATION().AsString();
2230 throw ParseException(ss.str());
2231 }
2232
2233 armnn::ReshapeDescriptor reshapeDesc;
2234 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2235
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002236 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002237 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2238 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2239
Derek Lamberti8ddae332019-02-21 16:29:43 +00002240 RegisterInputSlots(graph, layerIndex, layer);
2241 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002242}
2243
Finn Williams85d36712021-01-26 22:30:06 +00002244void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002245{
2246 CHECK_LAYERS(graph, 0, layerIndex);
2247
Finn Williams85d36712021-01-26 22:30:06 +00002248 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002249 CHECK_VALID_SIZE(inputs.size(), 1);
2250
Finn Williams85d36712021-01-26 22:30:06 +00002251 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002252 CHECK_VALID_SIZE(outputs.size(), 1);
2253
2254 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2255
2256 armnn::ResizeDescriptor descriptor;
2257 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2258 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2259 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2260 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002261 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2262 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002263
2264 auto layerName = GetLayerName(graph, layerIndex);
2265 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2266
2267 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2268 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2269
2270 RegisterInputSlots(graph, layerIndex, layer);
2271 RegisterOutputSlots(graph, layerIndex, layer);
2272}
2273
Finn Williams85d36712021-01-26 22:30:06 +00002274void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002275{
2276 CHECK_LAYERS(graph, 0, layerIndex);
2277
Finn Williams85d36712021-01-26 22:30:06 +00002278 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002279 CHECK_VALID_SIZE(inputs.size(), 1);
2280
Finn Williams85d36712021-01-26 22:30:06 +00002281 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002282 CHECK_VALID_SIZE(outputs.size(), 1);
2283
2284 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2285
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002286 armnn::ResizeDescriptor descriptor;
2287 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002288 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002289 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2290 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002291 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2292 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002293
2294 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002295 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002296
2297 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2298 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2299
2300 RegisterInputSlots(graph, layerIndex, layer);
2301 RegisterOutputSlots(graph, layerIndex, layer);
2302}
2303
Finn Williams85d36712021-01-26 22:30:06 +00002304void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002305{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002306 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002307
Finn Williams85d36712021-01-26 22:30:06 +00002308 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002309 CHECK_VALID_SIZE(inputs.size(), 1);
2310
Finn Williams85d36712021-01-26 22:30:06 +00002311 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002312 CHECK_VALID_SIZE(outputs.size(), 1);
2313
2314 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002315 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002316 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002317
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002318 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2319
2320 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2321 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2322
Derek Lamberti8ddae332019-02-21 16:29:43 +00002323 RegisterInputSlots(graph, layerIndex, layer);
2324 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002325}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002326
Finn Williams85d36712021-01-26 22:30:06 +00002327void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002328{
2329 CHECK_LAYERS(graph, 0, layerIndex);
2330
Finn Williams85d36712021-01-26 22:30:06 +00002331 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002332 CHECK_VALID_SIZE(inputs.size(), 1);
2333
Finn Williams85d36712021-01-26 22:30:06 +00002334 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002335 CHECK_VALID_SIZE(outputs.size(), 1);
2336
2337 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2338 auto flatBufferPadList = flatBufferDescriptor->padList();
2339 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2340
2341 if (flatBufferPadList->Length() % 2 != 0)
2342 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002343 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2344 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002345 }
2346
2347 std::vector<std::pair<unsigned int, unsigned int>> padList;
2348 padList.reserve(flatBufferPadList->Length() / 2);
2349 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2350 {
2351 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2352 }
2353
2354 armnn::SpaceToBatchNdDescriptor descriptor;
2355 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2356 descriptor.m_BlockShape =
2357 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2358 descriptor.m_PadList = padList;
2359
2360 auto layerName = GetLayerName(graph, layerIndex);
2361 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2362
2363 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2364 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2365
2366 RegisterInputSlots(graph, layerIndex, layer);
2367 RegisterOutputSlots(graph, layerIndex, layer);
2368}
2369
Finn Williams85d36712021-01-26 22:30:06 +00002370void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002371{
2372 CHECK_LAYERS(graph, 0, layerIndex);
2373
Finn Williams85d36712021-01-26 22:30:06 +00002374 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002375 CHECK_VALID_SIZE(inputs.size(), 1);
2376
Finn Williams85d36712021-01-26 22:30:06 +00002377 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002378 CHECK_VALID_SIZE(outputs.size(), 1);
2379
2380 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2381
2382 armnn::SpaceToDepthDescriptor descriptor;
2383 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2384 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2385
2386 auto layerName = GetLayerName(graph, layerIndex);
2387 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2388
2389 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2390 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2391
2392 RegisterInputSlots(graph, layerIndex, layer);
2393 RegisterOutputSlots(graph, layerIndex, layer);
2394}
2395
Finn Williams85d36712021-01-26 22:30:06 +00002396armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2397 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002398 unsigned int layerIndex)
2399{
Jan Eilers8eb25602020-03-09 12:13:48 +00002400 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002401 armnn::NormalizationDescriptor desc;
2402
2403 switch (normalizationDescriptor->normChannelType())
2404 {
2405 case NormalizationAlgorithmChannel_Across:
2406 {
2407 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2408 break;
2409 }
2410 case NormalizationAlgorithmChannel_Within:
2411 {
2412 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2413 break;
2414 }
2415 default:
2416 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002417 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002418 }
2419 }
2420
2421 switch (normalizationDescriptor->normMethodType())
2422 {
2423 case NormalizationAlgorithmMethod_LocalBrightness:
2424 {
2425 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2426 break;
2427 }
2428 case NormalizationAlgorithmMethod_LocalContrast:
2429 {
2430 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2431 break;
2432 }
2433 default:
2434 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002435 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002436 }
2437 }
2438
2439 switch (normalizationDescriptor->dataLayout())
2440 {
2441 case DataLayout_NCHW:
2442 {
2443 desc.m_DataLayout = armnn::DataLayout::NCHW;
2444 break;
2445 }
2446 case DataLayout_NHWC:
2447 {
2448 desc.m_DataLayout = armnn::DataLayout::NHWC;
2449 break;
2450 }
2451 default:
2452 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002453 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002454 }
2455 }
2456
2457 desc.m_Alpha = normalizationDescriptor->alpha();
2458 desc.m_Beta = normalizationDescriptor->beta();
2459 desc.m_K = normalizationDescriptor->k();
2460 desc.m_NormSize = normalizationDescriptor->normSize();
2461
2462 return desc;
2463}
2464
Finn Williams85d36712021-01-26 22:30:06 +00002465void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002466{
2467 CHECK_LAYERS(graph, 0, layerIndex);
2468
2469 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2470
Finn Williams85d36712021-01-26 22:30:06 +00002471 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002472 CHECK_VALID_SIZE(inputs.size(), 1);
2473
Finn Williams85d36712021-01-26 22:30:06 +00002474 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002475 CHECK_VALID_SIZE(outputs.size(), 1);
2476
2477 auto outputInfo = ToTensorInfo(outputs[0]);
2478
2479 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2480 auto layerName = GetLayerName(graph, layerIndex);
2481
2482 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2483 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2484
2485 RegisterInputSlots(graph, layerIndex, layer);
2486 RegisterOutputSlots(graph, layerIndex, layer);
2487}
2488
Finn Williams85d36712021-01-26 22:30:06 +00002489void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002490{
2491 CHECK_LAYERS(graph, 0, layerIndex);
2492 auto inputs = GetInputs(graph, layerIndex);
2493 CHECK_LOCATION();
2494 CHECK_VALID_SIZE(inputs.size(), 1);
2495
2496 auto outputs = GetOutputs(graph, layerIndex);
2497 CHECK_VALID_SIZE(outputs.size(), 1);
2498
2499 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002500
josh minor4a3c6102020-01-06 16:40:46 -06002501 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2502 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002503 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2504 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2505
2506 RegisterInputSlots(graph, layerIndex, layer);
2507 RegisterOutputSlots(graph, layerIndex, layer);
2508}
2509
Finn Williams85d36712021-01-26 22:30:06 +00002510void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002511{
2512 CHECK_LAYERS(graph, 0, layerIndex);
2513
2514 auto inputs = GetInputs(graph, layerIndex);
2515 CHECK_VALID_SIZE(inputs.size(), 1);
2516
2517 auto outputs = GetOutputs(graph, layerIndex);
2518 CHECK_VALID_SIZE(outputs.size(), 1);
2519
2520 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2521
2522 auto fbBegin = fbDescriptor->begin();
2523 auto fbSize = fbDescriptor->size();
2524
2525 if (fbBegin->Length() != fbSize->Length())
2526 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002527 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2528 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002529 }
2530
2531 armnn::SliceDescriptor descriptor;
2532 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2533 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2534
2535 auto layerName = GetLayerName(graph, layerIndex);
2536 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2537
2538 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2539 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2540
2541 RegisterInputSlots(graph, layerIndex, layer);
2542 RegisterOutputSlots(graph, layerIndex, layer);
2543}
2544
Finn Williams85d36712021-01-26 22:30:06 +00002545void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002546{
2547 CHECK_LAYERS(graph, 0, layerIndex);
2548
Finn Williams85d36712021-01-26 22:30:06 +00002549 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002550 CHECK_VALID_SIZE(inputs.size(), 1);
2551
Finn Williams85d36712021-01-26 22:30:06 +00002552 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002553 CHECK_VALID_SIZE(outputs.size(), 1);
2554
2555 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2556
2557 auto flatBufferBegin = flatBufferDescriptor->begin();
2558 auto flatBufferEnd = flatBufferDescriptor->end();
2559 auto flatBufferStride = flatBufferDescriptor->stride();
2560
2561 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2562 flatBufferBegin->Length() == flatBufferStride->Length()))
2563 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002564 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2565 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002566 }
2567
2568 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2569 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2570 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2571
2572 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2573 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2574 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2575 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2576 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2577 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2578 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2579
2580 auto layerName = GetLayerName(graph, layerIndex);
2581 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2582
2583 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2584 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2585
2586 RegisterInputSlots(graph, layerIndex, layer);
2587 RegisterOutputSlots(graph, layerIndex, layer);
2588}
2589
Finn Williams85d36712021-01-26 22:30:06 +00002590void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002591{
2592 CHECK_LAYERS(graph, 0, layerIndex);
2593 auto inputs = GetInputs(graph, layerIndex);
2594 CHECK_LOCATION();
2595 CHECK_VALID_SIZE(inputs.size(), 2);
2596
2597 auto outputs = GetOutputs(graph, layerIndex);
2598 CHECK_VALID_SIZE(outputs.size(), 1);
2599
2600 auto layerName = GetLayerName(graph, layerIndex);
2601 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2602
2603 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2604 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2605
2606 RegisterInputSlots(graph, layerIndex, layer);
2607 RegisterOutputSlots(graph, layerIndex, layer);
2608}
2609
Finn Williams85d36712021-01-26 22:30:06 +00002610void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002611{
2612 CHECK_LAYERS(graph, 0, layerIndex);
2613
Finn Williams85d36712021-01-26 22:30:06 +00002614 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002615 CHECK_VALID_SIZE(inputs.size(), 2);
2616
Finn Williams85d36712021-01-26 22:30:06 +00002617 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002618 CHECK_VALID_SIZE(outputs.size(), 1);
2619
Teresa Charlin52664732020-06-29 16:27:03 +01002620 armnn::GatherDescriptor descriptor;
2621 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2622
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002623 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002624 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002625
2626 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002627 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2628
2629 RegisterInputSlots(graph, layerIndex, layer);
2630 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002631}
2632
Finn Williams85d36712021-01-26 22:30:06 +00002633void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002634{
2635 CHECK_LAYERS(graph, 0, layerIndex);
2636
Finn Williams85d36712021-01-26 22:30:06 +00002637 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002638 CHECK_VALID_SIZE(inputs.size(), 1);
2639
Finn Williams85d36712021-01-26 22:30:06 +00002640 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002641 CHECK_VALID_SIZE(outputs.size(), 1);
2642
2643 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2644 auto flatBufferAxis = flatBufferDescriptor->axis();
2645 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2646
2647 armnn::MeanDescriptor descriptor;
2648 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2649 descriptor.m_KeepDims = flatBufferKeepDims;
2650
2651 auto layerName = GetLayerName(graph, layerIndex);
2652 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2653
2654 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2655 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2656
2657 RegisterInputSlots(graph, layerIndex, layer);
2658 RegisterOutputSlots(graph, layerIndex, layer);
2659}
2660
Finn Williams85d36712021-01-26 22:30:06 +00002661void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00002662{
2663 CHECK_LAYERS(graph, 0, layerIndex);
2664
Finn Williams85d36712021-01-26 22:30:06 +00002665 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002666 CHECK_VALID_SIZE(inputs.size(), 1);
2667
Finn Williams85d36712021-01-26 22:30:06 +00002668 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002669
2670 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2671 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2672 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2673 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2674 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2675 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2676
2677 // Check numViews and numDimensions corresponds to the ones already serialized ...
2678 // numViews == flatBufferViewSizes.size();
2679 // foreach: numDimensions == flatBufferViewSizes[x].size();
2680
2681 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2682 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2683 {
2684 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2685 {
2686 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2687 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2688 }
2689 }
2690
2691 auto layerName = GetLayerName(graph, layerIndex);
2692 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2693
2694 // I could have as many outputs as views ...
2695 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2696 {
2697 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2698 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2699 }
2700
2701 RegisterInputSlots(graph, layerIndex, layer);
2702 RegisterOutputSlots(graph, layerIndex, layer);
2703}
2704
Finn Williams85d36712021-01-26 22:30:06 +00002705armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00002706{
2707 armnn::LstmDescriptor desc;
2708
2709 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2710 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2711 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2712 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2713 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2714 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002715 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002716
2717 return desc;
2718}
2719
Finn Williams85d36712021-01-26 22:30:06 +00002720void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00002721{
2722 CHECK_LAYERS(graph, 0, layerIndex);
2723
2724 auto inputs = GetInputs(graph, layerIndex);
2725 CHECK_VALID_SIZE(inputs.size(), 3);
2726
2727 auto outputs = GetOutputs(graph, layerIndex);
2728 CHECK_VALID_SIZE(outputs.size(), 4);
2729
2730 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2731 auto layerName = GetLayerName(graph, layerIndex);
2732 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2733 auto flatBufferInputParams = flatBufferLayer->inputParams();
2734
2735 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2736
2737 armnn::LstmInputParams lstmInputParams;
2738
2739 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2740 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2741 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2742 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2743 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2744 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2745 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2746 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2747 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2748
2749 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2750 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2751 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2752 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2753 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2754 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2755 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2756 lstmInputParams.m_CellBias = &cellBias;
2757 lstmInputParams.m_OutputGateBias = &outputGateBias;
2758
2759 armnn::ConstTensor inputToInputWeights;
2760 armnn::ConstTensor recurrentToInputWeights;
2761 armnn::ConstTensor cellToInputWeights;
2762 armnn::ConstTensor inputGateBias;
2763 if (!lstmDescriptor.m_CifgEnabled)
2764 {
2765 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2766 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2767 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2768 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2769
2770 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2771 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2772 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2773 lstmInputParams.m_InputGateBias = &inputGateBias;
2774 }
2775
2776 armnn::ConstTensor projectionWeights;
2777 armnn::ConstTensor projectionBias;
2778 if (lstmDescriptor.m_ProjectionEnabled)
2779 {
2780 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2781 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2782
2783 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2784 lstmInputParams.m_ProjectionBias = &projectionBias;
2785 }
2786
2787 armnn::ConstTensor cellToForgetWeights;
2788 armnn::ConstTensor cellToOutputWeights;
2789 if (lstmDescriptor.m_PeepholeEnabled)
2790 {
2791 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2792 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2793
2794 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2795 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2796 }
2797
Jan Eilersf8c62972019-07-17 11:07:49 +01002798 armnn::ConstTensor inputLayerNormWeights;
2799 armnn::ConstTensor forgetLayerNormWeights;
2800 armnn::ConstTensor cellLayerNormWeights;
2801 armnn::ConstTensor outputLayerNormWeights;
2802 if (lstmDescriptor.m_LayerNormEnabled)
2803 {
2804 if (!lstmDescriptor.m_CifgEnabled)
2805 {
2806 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2807 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2808 }
2809 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2810 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2811 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2812
2813 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2814 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2815 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2816 }
2817
Jim Flynn11af3752019-03-19 17:22:29 +00002818 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2819
2820 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2821 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2822
2823 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2824 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2825
2826 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2827 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2828
2829 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2830 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2831
2832 RegisterInputSlots(graph, layerIndex, layer);
2833 RegisterOutputSlots(graph, layerIndex, layer);
2834}
2835
Finn Williams85d36712021-01-26 22:30:06 +00002836armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01002837{
2838 armnn::QLstmDescriptor desc;
2839
2840 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2841 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2842 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2843 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2844
2845 desc.m_CellClip = qLstmDescriptor->cellClip();
2846 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2847
2848 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2849 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2850 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2851 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2852
2853 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2854 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2855
2856 return desc;
2857}
2858
Finn Williams85d36712021-01-26 22:30:06 +00002859void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01002860{
2861 CHECK_LAYERS(graph, 0, layerIndex);
2862
2863 auto inputs = GetInputs(graph, layerIndex);
2864 CHECK_VALID_SIZE(inputs.size(), 3);
2865
2866 auto outputs = GetOutputs(graph, layerIndex);
2867 CHECK_VALID_SIZE(outputs.size(), 3);
2868
2869 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2870 auto layerName = GetLayerName(graph, layerIndex);
2871 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2872 auto flatBufferInputParams = flatBufferLayer->inputParams();
2873
2874 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2875 armnn::LstmInputParams qLstmInputParams;
2876
2877 // Mandatory params
2878 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2879 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2880 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2881 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2882 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2883 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2884 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2885 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2886 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2887
2888 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2889 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2890 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2891 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2892 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2893 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2894 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2895 qLstmInputParams.m_CellBias = &cellBias;
2896 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2897
2898 // Optional CIFG params
2899 armnn::ConstTensor inputToInputWeights;
2900 armnn::ConstTensor recurrentToInputWeights;
2901 armnn::ConstTensor inputGateBias;
2902
2903 if (!qLstmDescriptor.m_CifgEnabled)
2904 {
2905 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2906 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2907 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2908
2909 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2910 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2911 qLstmInputParams.m_InputGateBias = &inputGateBias;
2912 }
2913
2914 // Optional projection params
2915 armnn::ConstTensor projectionWeights;
2916 armnn::ConstTensor projectionBias;
2917
2918 if (qLstmDescriptor.m_ProjectionEnabled)
2919 {
2920 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2921 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2922
2923 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2924 qLstmInputParams.m_ProjectionBias = &projectionBias;
2925 }
2926
2927 // Optional peephole params
2928 armnn::ConstTensor cellToInputWeights;
2929 armnn::ConstTensor cellToForgetWeights;
2930 armnn::ConstTensor cellToOutputWeights;
2931
2932 if (qLstmDescriptor.m_PeepholeEnabled)
2933 {
2934 if (!qLstmDescriptor.m_CifgEnabled)
2935 {
2936 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2937 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2938 }
2939
2940 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2941 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2942
2943 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2944 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2945 }
2946
2947 // Optional layer norm params
2948 armnn::ConstTensor inputLayerNormWeights;
2949 armnn::ConstTensor forgetLayerNormWeights;
2950 armnn::ConstTensor cellLayerNormWeights;
2951 armnn::ConstTensor outputLayerNormWeights;
2952
2953 if (qLstmDescriptor.m_LayerNormEnabled)
2954 {
2955 if (!qLstmDescriptor.m_CifgEnabled)
2956 {
2957 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2958 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2959 }
2960
2961 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2962 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2963 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2964
2965 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2966 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2967 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2968 }
2969
2970 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
2971
2972 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
2973 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
2974
2975 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
2976 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
2977
2978 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
2979 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2980
2981 RegisterInputSlots(graph, layerIndex, layer);
2982 RegisterOutputSlots(graph, layerIndex, layer);
2983}
2984
Finn Williams85d36712021-01-26 22:30:06 +00002985void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01002986{
2987 CHECK_LAYERS(graph, 0, layerIndex);
2988
2989 auto inputs = GetInputs(graph, layerIndex);
2990 CHECK_VALID_SIZE(inputs.size(), 3);
2991
2992 auto outputs = GetOutputs(graph, layerIndex);
2993 CHECK_VALID_SIZE(outputs.size(), 2);
2994
2995 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2996 auto layerName = GetLayerName(graph, layerIndex);
2997 auto flatBufferInputParams = flatBufferLayer->inputParams();
2998
2999 armnn::QuantizedLstmInputParams lstmInputParams;
3000
3001 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3002 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3003 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3004 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3005 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3006 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3007 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3008 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3009 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3010 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3011 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3012 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3013
3014 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3015 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3016 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3017 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3018 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3019 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3020 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3021 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3022 lstmInputParams.m_InputGateBias = &inputGateBias;
3023 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3024 lstmInputParams.m_CellBias = &cellBias;
3025 lstmInputParams.m_OutputGateBias = &outputGateBias;
3026
3027 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3028
3029 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3030 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3031
3032 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3033 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3034
3035 RegisterInputSlots(graph, layerIndex, layer);
3036 RegisterOutputSlots(graph, layerIndex, layer);
3037}
3038
Finn Williams85d36712021-01-26 22:30:06 +00003039void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003040{
3041 CHECK_LAYERS(graph, 0, layerIndex);
3042
Finn Williams85d36712021-01-26 22:30:06 +00003043 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003044 CHECK_VALID_SIZE(inputs.size(), 1);
3045
Finn Williams85d36712021-01-26 22:30:06 +00003046 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003047 CHECK_VALID_SIZE(outputs.size(), 1);
3048
3049 const std::string layerName = GetLayerName(graph, layerIndex);
3050 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3051
3052 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3053 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3054
3055 RegisterInputSlots(graph, layerIndex, layer);
3056 RegisterOutputSlots(graph, layerIndex, layer);
3057}
3058
Finn Williams85d36712021-01-26 22:30:06 +00003059void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003060{
3061 CHECK_LAYERS(graph, 0, layerIndex);
3062
Finn Williams85d36712021-01-26 22:30:06 +00003063 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003064 CHECK_VALID_SIZE(inputs.size(), 2);
3065
Finn Williams85d36712021-01-26 22:30:06 +00003066 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003067 CHECK_VALID_SIZE(outputs.size(), 1);
3068
3069 const std::string layerName = GetLayerName(graph, layerIndex);
3070 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3071
3072 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3073 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3074
3075 RegisterInputSlots(graph, layerIndex, layer);
3076 RegisterOutputSlots(graph, layerIndex, layer);
3077}
3078
Finn Williams85d36712021-01-26 22:30:06 +00003079void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003080{
3081 CHECK_LAYERS(graph, 0, layerIndex);
3082 auto inputs = GetInputs(graph, layerIndex);
3083 CHECK_LOCATION();
3084 CHECK_VALID_SIZE(inputs.size(), 2);
3085
3086 auto outputs = GetOutputs(graph, layerIndex);
3087 CHECK_VALID_SIZE(outputs.size(), 2);
3088
3089 auto layerName = GetLayerName(graph, layerIndex);
3090 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3091
3092 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3093 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3094
3095 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3096 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3097
3098 RegisterInputSlots(graph, layerIndex, layer);
3099 RegisterOutputSlots(graph, layerIndex, layer);
3100}
3101
Finn Williams85d36712021-01-26 22:30:06 +00003102void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003103{
3104 CHECK_LAYERS(graph, 0, layerIndex);
3105 auto inputs = GetInputs(graph, layerIndex);
3106 CHECK_LOCATION();
3107 CHECK_VALID_SIZE(inputs.size(), 2);
3108
3109 auto outputs = GetOutputs(graph, layerIndex);
3110 CHECK_VALID_SIZE(outputs.size(), 1);
3111
3112 auto layerName = GetLayerName(graph, layerIndex);
3113 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3114
3115 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3116 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3117
3118 RegisterInputSlots(graph, layerIndex, layer);
3119 RegisterOutputSlots(graph, layerIndex, layer);
3120}
3121
Finn Williams85d36712021-01-26 22:30:06 +00003122void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003123{
3124 CHECK_LAYERS(graph, 0, layerIndex);
3125
3126 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3127
3128 auto inputs = GetInputs(graph, layerIndex);
3129 CHECK_VALID_SIZE(inputs.size(), 1);
3130
3131 auto outputs = GetOutputs(graph, layerIndex);
3132 CHECK_VALID_SIZE(outputs.size(), 1);
3133 auto outputInfo = ToTensorInfo(outputs[0]);
3134
3135 auto layerName = GetLayerName(graph, layerIndex);
3136 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
3137
3138 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3139 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3140
3141 RegisterInputSlots(graph, layerIndex, layer);
3142 RegisterOutputSlots(graph, layerIndex, layer);
3143}
3144
Finn Williams85d36712021-01-26 22:30:06 +00003145void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003146{
3147 CHECK_LAYERS(graph, 0, layerIndex);
3148
3149 auto inputs = GetInputs(graph, layerIndex);
3150 CHECK_VALID_SIZE(inputs.size(), 1);
3151
3152 auto outputs = GetOutputs(graph, layerIndex);
3153 CHECK_VALID_SIZE(outputs.size(), 1);
3154
3155 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3156 auto layerName = GetLayerName(graph, layerIndex);
3157 auto serializerDescriptor = serializerLayer->descriptor();
3158
3159 armnn::TransposeConvolution2dDescriptor descriptor;
3160 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3161 descriptor.m_PadRight = serializerDescriptor->padRight();
3162 descriptor.m_PadTop = serializerDescriptor->padTop();
3163 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3164 descriptor.m_StrideX = serializerDescriptor->strideX();
3165 descriptor.m_StrideY = serializerDescriptor->strideY();;
3166 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3167 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3168
3169 // weights & biases
3170 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3171 armnn::Optional<armnn::ConstTensor> optionalBiases;
3172 if (descriptor.m_BiasEnabled)
3173 {
3174 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3175 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3176 }
3177
3178 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3179 weights,
3180 optionalBiases,
3181 layerName.c_str());
3182
3183 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3184 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3185
3186 RegisterInputSlots(graph, layerIndex, layer);
3187 RegisterOutputSlots(graph, layerIndex, layer);
3188}
3189
Finn Williams85d36712021-01-26 22:30:06 +00003190void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003191{
3192 CHECK_LAYERS(graph, 0, layerIndex);
3193 auto inputs = GetInputs(graph, layerIndex);
3194
3195 auto outputs = GetOutputs(graph, layerIndex);
3196 CHECK_VALID_SIZE(outputs.size(), 1);
3197
3198 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3199 unsigned int axis = flatBufferDescriptor->axis();
3200 unsigned int numInputs = flatBufferDescriptor->numInputs();
3201 CHECK_VALID_SIZE(inputs.size(), numInputs);
3202
3203 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3204 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3205 flatBufferInputShape->begin() + flatBufferInputShape->size());
3206
3207 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3208 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3209
3210 for (unsigned int i=0; i<inputs.size(); ++i)
3211 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003212 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003213 if (descriptor.m_InputShape != inputShape)
3214 {
3215 std::stringstream ss;
3216 ss << "Shape of input "
3217 << i
3218 << " "
3219 << inputShape
3220 << " does not equal defined input shape "
3221 << descriptor.m_InputShape
3222 << ": "
3223 << CHECK_LOCATION().AsString();
3224 throw ParseException(ss.str());
3225 }
3226 }
3227
3228 auto layerName = GetLayerName(graph, layerIndex);
3229 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3230
3231 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3232 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3233
3234 RegisterInputSlots(graph, layerIndex, layer);
3235 RegisterOutputSlots(graph, layerIndex, layer);
3236}
3237
Finn Williams85d36712021-01-26 22:30:06 +00003238void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003239{
3240 CHECK_LAYERS(graph, 0, layerIndex);
3241
3242 auto inputs = GetInputs(graph, layerIndex);
3243 auto outputs = GetOutputs(graph, layerIndex);
3244
3245 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3246 auto fbDescriptor = fbLayer->descriptor();
3247
3248 armnn::StandInDescriptor descriptor;
3249 descriptor.m_NumInputs = fbDescriptor->numInputs();
3250 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3251
3252 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3253 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3254
3255 const std::string layerName = GetLayerName(graph, layerIndex);
3256 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3257
3258 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3259 {
3260 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3261 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3262 }
3263
3264 RegisterInputSlots(graph, layerIndex, layer);
3265 RegisterOutputSlots(graph, layerIndex, layer);
3266}
3267
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003268} // namespace armnnDeserializer