blob: aebdf0e52cb480b6a62da194429fc0091e54e74b [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000191 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Conor Kennedy76277882019-02-26 08:29:54 +0000192 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000193 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
194 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000195 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000196 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000197 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000198 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000199 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000200 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000201 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000202 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000203 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000204 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000205 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000206 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
207 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000208 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000209 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000210 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Kevin May43a799c2019-02-08 16:31:42 +0000211}
212
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000213Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000214{
215 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
216
217 switch(layerType)
218 {
Mike Kellyaf484012019-02-20 16:53:11 +0000219 case Layer::Layer_ActivationLayer:
220 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000221 case Layer::Layer_AdditionLayer:
222 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000223 case Layer::Layer_BatchToSpaceNdLayer:
224 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000225 case Layer::Layer_BatchNormalizationLayer:
226 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000227 case Layer::Layer_ConstantLayer:
228 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000229 case Layer::Layer_Convolution2dLayer:
230 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000231 case Layer::Layer_DepthwiseConvolution2dLayer:
232 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000233 case Layer::Layer_DivisionLayer:
234 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000235 case Layer::Layer_EqualLayer:
236 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000237 case Layer::Layer_FullyConnectedLayer:
238 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000239 case Layer::Layer_FloorLayer:
240 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000241 case Layer::Layer_GreaterLayer:
242 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000243 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000244 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
245 case Layer::Layer_MinimumLayer:
246 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000247 case Layer::Layer_MaximumLayer:
248 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000249 case Layer::Layer_MultiplicationLayer:
250 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000251 case Layer::Layer_NormalizationLayer:
252 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000253 case Layer::Layer_OutputLayer:
254 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000255 case Layer::Layer_PadLayer:
256 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000257 case Layer::Layer_PermuteLayer:
258 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000259 case Layer::Layer_Pooling2dLayer:
260 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000261 case Layer::Layer_ReshapeLayer:
262 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000263 case Layer::Layer_RsqrtLayer:
264 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000265 case Layer::Layer_SoftmaxLayer:
266 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000267 case Layer::Layer_SpaceToBatchNdLayer:
268 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000269 case Layer::Layer_NONE:
270 default:
271 throw ParseException(boost::str(
272 boost::format("Layer must have a type %1%") %
273 Layer::Layer_NONE));
274 }
275}
276
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000277std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
278{
279 auto layer = GetBaseLayer(graph, index);
280 assert(layer);
281 return layer->layerName()->str();
282}
283
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000284int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000285{
286 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
287
288 if (layerType == Layer::Layer_InputLayer)
289 {
290 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
291 }
292 else if ( layerType == Layer::Layer_OutputLayer )
293 {
294 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
295 }
296 return 0;
297}
298
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000299armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000300{
301 switch (dataLayout)
302 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000303 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000304 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000305 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000306 default:
307 return armnn::DataLayout::NCHW;
308 }
309}
310
Mike Kellyaf484012019-02-20 16:53:11 +0000311armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
312{
313 switch (function)
314 {
315 case armnnSerializer::ActivationFunction_Sigmoid:
316 return armnn::ActivationFunction::Sigmoid;
317 case armnnSerializer::ActivationFunction_TanH:
318 return armnn::ActivationFunction::TanH;
319 case armnnSerializer::ActivationFunction_Linear:
320 return armnn::ActivationFunction::Linear;
321 case armnnSerializer::ActivationFunction_ReLu:
322 return armnn::ActivationFunction::ReLu;
323 case armnnSerializer::ActivationFunction_BoundedReLu:
324 return armnn::ActivationFunction::BoundedReLu;
325 case armnnSerializer::ActivationFunction_LeakyReLu:
326 return armnn::ActivationFunction::LeakyReLu;
327 case armnnSerializer::ActivationFunction_Abs:
328 return armnn::ActivationFunction::Abs;
329 case armnnSerializer::ActivationFunction_Sqrt:
330 return armnn::ActivationFunction::Sqrt;
331 case armnnSerializer::ActivationFunction_Square:
332 return armnn::ActivationFunction::Square;
333 default:
334 return armnn::ActivationFunction::Sigmoid;
335 }
336}
337
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000338armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000339{
340 armnn::DataType type;
341 CHECK_TENSOR_PTR(tensorPtr);
342
343 switch (tensorPtr->dataType())
344 {
345 case DataType_QuantisedAsymm8:
346 type = armnn::DataType::QuantisedAsymm8;
347 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000348 case DataType_Signed32:
349 type = armnn::DataType::Signed32;
350 break;
Kevin May43a799c2019-02-08 16:31:42 +0000351 case DataType_Float32:
352 type = armnn::DataType::Float32;
353 break;
354 case DataType_Float16:
355 type = armnn::DataType::Float16;
356 break;
357 case DataType_Boolean:
358 type = armnn::DataType::Boolean;
359 break;
360 default:
361 {
362 CheckLocation location = CHECK_LOCATION();
363 throw ParseException(
364 boost::str(
365 boost::format("Unsupported data type %1% = %2%. %3%") %
366 tensorPtr->dataType() %
367 EnumNameDataType(tensorPtr->dataType()) %
368 location.AsString()));
369 }
370 }
371 float quantizationScale = tensorPtr->quantizationScale();
372 int32_t quantizationOffset = tensorPtr->quantizationOffset();
373
374 auto dimensions = tensorPtr->dimensions();
375 unsigned int size = dimensions->size();
376 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
377
378 // two statements (on purpose) for easier debugging:
379 armnn::TensorInfo result(size,
380 outputDims.data(),
381 type,
382 quantizationScale,
383 quantizationOffset);
384 return result;
385}
386
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000387armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000388{
389 CHECK_CONST_TENSOR_PTR(constTensorPtr);
390 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
391
392 switch (constTensorPtr->data_type())
393 {
394 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000395 {
396 auto byteData = constTensorPtr->data_as_ByteData()->data();
397 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
398 return armnn::ConstTensor(tensorInfo, byteData->data());
399 }
Mike Kellya0766c32019-02-19 17:22:07 +0000400 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000401 {
402 auto shortData = constTensorPtr->data_as_ShortData()->data();
403 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
404 return armnn::ConstTensor(tensorInfo, shortData->data());
405 }
Mike Kellya0766c32019-02-19 17:22:07 +0000406 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000407 {
408 auto intData = constTensorPtr->data_as_IntData()->data();
409 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
410 return armnn::ConstTensor(tensorInfo, intData->data());
411 }
Mike Kellya0766c32019-02-19 17:22:07 +0000412 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000413 {
414 auto longData = constTensorPtr->data_as_LongData()->data();
415 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
416 return armnn::ConstTensor(tensorInfo, longData->data());
417 }
Mike Kellya0766c32019-02-19 17:22:07 +0000418 default:
419 {
420 CheckLocation location = CHECK_LOCATION();
421 throw ParseException(
422 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
423 constTensorPtr->data_type() %
424 EnumNameConstTensorData(constTensorPtr->data_type()) %
425 location.AsString()));
426 }
427 }
428}
429
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000430Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000431{
432
433 CHECK_GRAPH(graphPtr, 0);
434 const auto& numInputs = graphPtr->inputIds()->size();
435
436 LayerBaseRawPtrVector result(numInputs);
437
438 for (unsigned int i=0; i<numInputs; ++i)
439 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000440 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000441 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
442 }
443 return result;
444}
445
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000446Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000447{
448 CHECK_GRAPH(graphPtr, 0);
449 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000450 LayerBaseRawPtrVector result(numOutputs);
451
452 for (unsigned int i=0; i<numOutputs; ++i)
453 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000454 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000455
Kevin May43a799c2019-02-08 16:31:42 +0000456 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
457 }
458 return result;
459}
460
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000462 unsigned int layerIndex)
463{
464 CHECK_LAYERS(graphPtr, 0, layerIndex);
465 auto layer = GetBaseLayer(graphPtr, layerIndex);
466 const auto& numInputs = layer->inputSlots()->size();
467
468 TensorRawPtrVector result(numInputs);
469
470 for (unsigned int i=0; i<numInputs; ++i)
471 {
472 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
473 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
474 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
475 }
476 return result;
477}
478
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000479Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000480 unsigned int layerIndex)
481{
482 CHECK_LAYERS(graphPtr, 0, layerIndex);
483 auto layer = GetBaseLayer(graphPtr, layerIndex);
484 const auto& numOutputs = layer->outputSlots()->size();
485
486 TensorRawPtrVector result(numOutputs);
487
488 for (unsigned int i=0; i<numOutputs; ++i)
489 {
490 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
491 }
492 return result;
493}
494
Derek Lamberti8ddae332019-02-21 16:29:43 +0000495void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000496{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000497 CHECK_LAYERS(graph, 0, layerIndex);
498 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000499 throw ParseException(
500 boost::str(
501 boost::format("Layer not supported. "
502 "layerIndex: %1% "
503 "layerName: %2% / %3%") %
504 layerIndex %
505 layerName %
506 CHECK_LOCATION().AsString()));
507}
508
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000509void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000510{
511 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000512 m_InputBindings.clear();
513 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000514}
515
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000516IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000517{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000518 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000519}
520
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000521IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000522{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000523 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000524}
525
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000526void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000527{
528 delete parser;
529}
530
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000531INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000532{
533 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000534 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
535 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000536}
537
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000538armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000539{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000540 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000541 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
542 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
543 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000544}
545
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000546Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000547{
548 if (binaryContent == nullptr)
549 {
550 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
551 CHECK_LOCATION().AsString()));
552 }
553 flatbuffers::Verifier verifier(binaryContent, len);
554 if (verifier.VerifyBuffer<SerializedGraph>() == false)
555 {
556 throw ParseException(
557 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
558 "flatbuffers format. size:%1% %2%") %
559 len %
560 CHECK_LOCATION().AsString()));
561 }
562 return GetSerializedGraph(binaryContent);
563}
564
Derek Lamberti8ddae332019-02-21 16:29:43 +0000565INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000566{
567 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000568 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000569 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000570 m_GraphConnections.emplace_back(graph->layers()->size());
571 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000572 {
573 if (layer->layer_type() != Layer_InputLayer &&
574 layer->layer_type() != Layer_OutputLayer)
575 {
576 // lookup and call the parser function
577 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000578 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000579 }
580 ++layerIndex;
581 }
582
Derek Lamberti8ddae332019-02-21 16:29:43 +0000583 SetupInputLayers(graph);
584 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000585
586 // establish the connections from the layer outputs to the inputs of the subsequent layers
587 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
588 {
589 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
590 {
591 for (size_t inputSlotIdx = 0;
592 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
593 ++inputSlotIdx)
594 {
595 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
596 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
597 }
598 }
599 }
600
601 return std::move(m_Network);
602}
603
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000604BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000605 const std::string& name) const
606{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000607 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000608 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000609 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000610 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000611 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000612 }
613 }
614 throw ParseException(
615 boost::str(
616 boost::format("No input binding found for layer:%1% / %2%") %
617 name %
618 CHECK_LOCATION().AsString()));
619}
620
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000621BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000622 const std::string& name) const
623{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000624 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000625 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000626 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000627 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000628 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000629 }
630 }
631 throw ParseException(
632 boost::str(
633 boost::format("No output binding found for layer:%1% / %2%") %
634 name %
635 CHECK_LOCATION().AsString()));
636}
637
Derek Lamberti8ddae332019-02-21 16:29:43 +0000638void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000639{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000640 CHECK_GRAPH(graph, 0);
641 auto inputs = GetGraphInputs(graph);
642 m_InputBindings.clear();
643 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000644 for (auto const& input : inputs)
645 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000646 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000647 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000648 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000649
650 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
651 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
652
Derek Lamberti8ddae332019-02-21 16:29:43 +0000653 RegisterOutputSlots(graph, input->index(), layer);
654
655 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
656 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
657 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000658 }
659}
660
Derek Lamberti8ddae332019-02-21 16:29:43 +0000661void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000662{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000663 CHECK_GRAPH(graph, 0);
664 auto outputs = GetGraphOutputs(graph);
665 m_OutputBindings.clear();
666 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000667 for (auto const& output : outputs)
668 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000669 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000670 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000671 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000672
Derek Lamberti8ddae332019-02-21 16:29:43 +0000673 RegisterInputSlots(graph, output->index(), layer);
674
675 auto baseLayer = GetBaseLayer(graph, output->index());
676 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
677 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
678 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
679
680 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
681 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
682 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000683 }
684}
685
Derek Lamberti8ddae332019-02-21 16:29:43 +0000686void Deserializer::RegisterOutputSlots(GraphPtr graph,
687 uint32_t layerIndex,
688 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000689{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000690 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000691 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000692 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000693 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
694 {
695 throw ParseException(
696 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
697 " for layer index: %3% %4%") %
698 parsedLayer->outputSlots()->size() %
699 layer->GetNumOutputSlots() %
700 layerIndex %
701 CHECK_LOCATION().AsString()));
702 }
703
704 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
705 {
706 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
707 RegisterOutputSlotOfConnection(layerIndex, slot);
708 }
709}
710
Derek Lamberti8ddae332019-02-21 16:29:43 +0000711void Deserializer::RegisterInputSlots(GraphPtr graph,
712 uint32_t layerIndex,
713 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000714{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000715 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000716 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000717 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000718 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
719 {
720 throw ParseException(
721 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
722 " for layer index:%3% %4%") %
723 parsedLayer->inputSlots()->size() %
724 layer->GetNumInputSlots() %
725 layerIndex %
726 CHECK_LOCATION().AsString()));
727 }
728
729 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
730 {
731 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
732 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
733 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
734 }
735}
736
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000737void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000738 armnn::IInputSlot* slot)
739{
740 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
741
742 Slots& slots = m_GraphConnections[0][connectionIndex];
743 slots.inputSlots.push_back(slot);
744}
745
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000746void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000747 armnn::IOutputSlot* slot)
748{
749 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
750
751 Slots& slots = m_GraphConnections[0][connectionIndex];
752
753 // assuming there is only one producer for that tensor
754 if (slots.outputSlot != nullptr)
755 {
756 throw ParseException(boost::str(
757 boost::format("Another layer has already registered itself as the producer of "
758 "connection:%1% / %2%") %
759 connectionIndex %
760 CHECK_LOCATION().AsString()));
761 }
762
763 slots.outputSlot = slot;
764}
765
Derek Lamberti8ddae332019-02-21 16:29:43 +0000766void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000767{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000768 CHECK_LAYERS(graph, 0, layerIndex);
769 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000770 CHECK_LOCATION();
771 CHECK_VALID_SIZE(inputs.size(), 1);
772
Derek Lamberti8ddae332019-02-21 16:29:43 +0000773 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000774 CHECK_VALID_SIZE(outputs.size(), 1);
775
Derek Lamberti8ddae332019-02-21 16:29:43 +0000776 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000777 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000778 auto serializerDescriptor = serializerLayer->descriptor();
779
780 armnn::ActivationDescriptor descriptor;
781 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
782 descriptor.m_A = serializerDescriptor->a();
783 descriptor.m_B = serializerDescriptor->b();
784
785 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
786 layerName.c_str());
787 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
788 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
789
Derek Lamberti8ddae332019-02-21 16:29:43 +0000790 RegisterInputSlots(graph, layerIndex, layer);
791 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000792}
793
Derek Lamberti8ddae332019-02-21 16:29:43 +0000794void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000795{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000796 CHECK_LAYERS(graph, 0, layerIndex);
797 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000798 CHECK_LOCATION();
799 CHECK_VALID_SIZE(inputs.size(), 2);
800
Derek Lamberti8ddae332019-02-21 16:29:43 +0000801 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000802 CHECK_VALID_SIZE(outputs.size(), 1);
803
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000804 auto layerName = GetLayerName(graph, layerIndex);
805 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000806
807 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
808 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
809
Derek Lamberti8ddae332019-02-21 16:29:43 +0000810 RegisterInputSlots(graph, layerIndex, layer);
811 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000812}
813
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000814void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
815{
816 CHECK_LAYERS(graph, 0, layerIndex);
817
818 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
819 CHECK_VALID_SIZE(inputs.size(), 1);
820
821 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
822 CHECK_VALID_SIZE(outputs.size(), 1);
823
824 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
825 auto flatBufferCrops = flatBufferDescriptor->crops();
826 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
827
828 if (flatBufferCrops->Length() % 2 != 0)
829 {
830 throw ParseException(boost::str(
831 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
832 }
833
834 std::vector<std::pair<unsigned int, unsigned int>> crops;
835 crops.reserve(flatBufferCrops->Length() / 2);
836 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
837 {
838 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
839 }
840
841 armnn::BatchToSpaceNdDescriptor descriptor;
842 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
843 descriptor.m_BlockShape =
844 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
845 descriptor.m_Crops = crops;
846
847 auto layerName = GetLayerName(graph, layerIndex);
848 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
849
850 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
851 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
852
853 RegisterInputSlots(graph, layerIndex, layer);
854 RegisterOutputSlots(graph, layerIndex, layer);
855}
856
ruoyan018e7fa232019-02-28 15:09:07 +0000857void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
858{
859 CHECK_LAYERS(graph, 0, layerIndex);
860
861 auto inputs = GetInputs(graph, layerIndex);
862 CHECK_VALID_SIZE(inputs.size(), 1);
863
864 auto outputs = GetOutputs(graph, layerIndex);
865 CHECK_VALID_SIZE(outputs.size(), 1);
866 auto outputInfo = ToTensorInfo(outputs[0]);
867
868 auto layerName = boost::str(boost::format("BatchNormalization:%1%") % layerIndex);
869
870 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
871 auto serializerDescriptor = serializerLayer->descriptor();
872
873 armnn::BatchNormalizationDescriptor descriptor;
874 descriptor.m_Eps = serializerDescriptor->eps();
875 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
876
877 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
878 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
879 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
880 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
881
882 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
883 mean,
884 variance,
885 beta,
886 gamma,
887 layerName.c_str());
888 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
889
890 RegisterInputSlots(graph, layerIndex, layer);
891 RegisterOutputSlots(graph, layerIndex, layer);
892}
893
Conor Kennedy76277882019-02-26 08:29:54 +0000894void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
895{
896 CHECK_LAYERS(graph, 0, layerIndex);
897 CHECK_LOCATION();
898
899 auto outputs = GetOutputs(graph, layerIndex);
900 CHECK_VALID_SIZE(outputs.size(), 1);
901
902 auto layerName = GetLayerName(graph, layerIndex);
903
904 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
905 auto serializerInput = serializerLayer->input();
906
907 armnn::ConstTensor input = ToConstTensor(serializerInput);
908
909 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
910
911 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
912 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
913
914 RegisterOutputSlots(graph, layerIndex, layer);
915}
916
Derek Lamberti8ddae332019-02-21 16:29:43 +0000917void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000918{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000919 CHECK_LAYERS(graph, 0, layerIndex);
920 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000921 CHECK_LOCATION();
922 CHECK_VALID_SIZE(inputs.size(), 1);
923
Derek Lamberti8ddae332019-02-21 16:29:43 +0000924 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000925 CHECK_VALID_SIZE(outputs.size(), 1);
926
Derek Lamberti8ddae332019-02-21 16:29:43 +0000927 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000928 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000929 auto serializerDescriptor = serializerLayer->descriptor();
930
931 armnn::Convolution2dDescriptor descriptor;
932 descriptor.m_PadLeft = serializerDescriptor->padLeft();
933 descriptor.m_PadRight = serializerDescriptor->padRight();
934 descriptor.m_PadTop = serializerDescriptor->padTop();
935 descriptor.m_PadBottom = serializerDescriptor->padBottom();
936 descriptor.m_StrideX = serializerDescriptor->strideX();
937 descriptor.m_StrideY = serializerDescriptor->strideY();;
938 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
939 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
940
941 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
942 armnn::ConstTensor biases;
943
944 if (descriptor.m_BiasEnabled)
945 {
946 biases = ToConstTensor(serializerLayer->biases());
947 }
948 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
949 weights,
950 biases,
951 layerName.c_str());
952 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
953 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
954
Derek Lamberti8ddae332019-02-21 16:29:43 +0000955 RegisterInputSlots(graph, layerIndex, layer);
956 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000957}
958
Derek Lamberti8ddae332019-02-21 16:29:43 +0000959void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000960{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000961 CHECK_LAYERS(graph, 0, layerIndex);
962 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000963 CHECK_LOCATION();
964 CHECK_VALID_SIZE(inputs.size(), 1);
965
Derek Lamberti8ddae332019-02-21 16:29:43 +0000966 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000967 CHECK_VALID_SIZE(outputs.size(), 1);
968
Derek Lamberti8ddae332019-02-21 16:29:43 +0000969 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000970 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000971 auto serializerDescriptor = serializerLayer->descriptor();
972
973 armnn::DepthwiseConvolution2dDescriptor descriptor;
974 descriptor.m_PadLeft = serializerDescriptor->padLeft();
975 descriptor.m_PadRight = serializerDescriptor->padRight();
976 descriptor.m_PadTop = serializerDescriptor->padTop();
977 descriptor.m_PadBottom = serializerDescriptor->padBottom();
978 descriptor.m_StrideX = serializerDescriptor->strideX();
979 descriptor.m_StrideY = serializerDescriptor->strideY();;
980 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
981 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
982
983 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
984 armnn::ConstTensor biases;
985
986 if (descriptor.m_BiasEnabled)
987 {
988 biases = ToConstTensor(serializerLayer->biases());
989 }
990 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
991 weights,
992 biases,
993 layerName.c_str());
994
995 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
996 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
997
Derek Lamberti8ddae332019-02-21 16:29:43 +0000998 RegisterInputSlots(graph, layerIndex, layer);
999 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001000}
1001
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001002void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1003{
1004 CHECK_LAYERS(graph, 0, layerIndex);
1005 auto inputs = GetInputs(graph, layerIndex);
1006 CHECK_LOCATION();
1007 CHECK_VALID_SIZE(inputs.size(), 2);
1008
1009 auto outputs = GetOutputs(graph, layerIndex);
1010 CHECK_VALID_SIZE(outputs.size(), 1);
1011
1012 auto layerName = GetLayerName(graph, layerIndex);
1013 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1014
1015 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1016 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1017
1018 RegisterInputSlots(graph, layerIndex, layer);
1019 RegisterOutputSlots(graph, layerIndex, layer);
1020}
1021
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001022void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1023{
1024 CHECK_LAYERS(graph, 0, layerIndex);
1025 auto inputs = GetInputs(graph, layerIndex);
1026 CHECK_LOCATION();
1027 CHECK_VALID_SIZE(inputs.size(), 2);
1028
1029 auto outputs = GetOutputs(graph, layerIndex);
1030 CHECK_VALID_SIZE(outputs.size(), 1);
1031
1032 auto layerName = GetLayerName(graph, layerIndex);
1033 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1034
1035 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1036 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1037
1038 RegisterInputSlots(graph, layerIndex, layer);
1039 RegisterOutputSlots(graph, layerIndex, layer);
1040}
1041
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001042void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1043{
1044 CHECK_LAYERS(graph, 0, layerIndex);
1045 auto inputs = GetInputs(graph, layerIndex);
1046 CHECK_LOCATION();
1047 CHECK_VALID_SIZE(inputs.size(), 2);
1048
1049 auto outputs = GetOutputs(graph, layerIndex);
1050 CHECK_VALID_SIZE(outputs.size(), 1);
1051
1052 auto layerName = GetLayerName(graph, layerIndex);
1053 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1054
1055 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1056 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1057
1058 RegisterInputSlots(graph, layerIndex, layer);
1059 RegisterOutputSlots(graph, layerIndex, layer);
1060}
1061
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001062void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1063{
1064 CHECK_LAYERS(graph, 0, layerIndex);
1065 auto inputs = GetInputs(graph, layerIndex);
1066 CHECK_LOCATION();
1067 CHECK_VALID_SIZE(inputs.size(), 2);
1068
1069 auto outputs = GetOutputs(graph, layerIndex);
1070 CHECK_VALID_SIZE(outputs.size(), 1);
1071
1072 auto layerName = GetLayerName(graph, layerIndex);
1073 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1074
1075 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1076 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1077
1078 RegisterInputSlots(graph, layerIndex, layer);
1079 RegisterOutputSlots(graph, layerIndex, layer);
1080}
1081
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001082void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1083{
1084 CHECK_LAYERS(graph, 0, layerIndex);
1085 auto inputs = GetInputs(graph, layerIndex);
1086 CHECK_LOCATION();
1087 CHECK_VALID_SIZE(inputs.size(), 2);
1088
1089 auto outputs = GetOutputs(graph, layerIndex);
1090 CHECK_VALID_SIZE(outputs.size(), 1);
1091
1092 auto layerName = GetLayerName(graph, layerIndex);
1093 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1094
1095 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1096 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1097
1098 RegisterInputSlots(graph, layerIndex, layer);
1099 RegisterOutputSlots(graph, layerIndex, layer);
1100}
1101
Derek Lamberti8ddae332019-02-21 16:29:43 +00001102void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001103{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001104 CHECK_LAYERS(graph, 0, layerIndex);
1105 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001106 CHECK_LOCATION();
1107 CHECK_VALID_SIZE(inputs.size(), 2);
1108
Derek Lamberti8ddae332019-02-21 16:29:43 +00001109 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001110 CHECK_VALID_SIZE(outputs.size(), 1);
1111
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001112 auto layerName = GetLayerName(graph, layerIndex);
1113 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001114
1115 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1116 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1117
Derek Lamberti8ddae332019-02-21 16:29:43 +00001118 RegisterInputSlots(graph, layerIndex, layer);
1119 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001120}
1121
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001122void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1123{
1124 CHECK_LAYERS(graph, 0, layerIndex);
1125 CHECK_LOCATION();
1126
1127 auto inputs = GetInputs(graph, layerIndex);
1128 CHECK_VALID_SIZE(inputs.size(), 1);
1129
1130 auto outputs = GetOutputs(graph, layerIndex);
1131 CHECK_VALID_SIZE(outputs.size(), 1);
1132
1133 auto layerName = GetLayerName(graph, layerIndex);
1134
1135 armnn::IConnectableLayer* layer;
1136
1137 layer = m_Network->AddFloorLayer();
1138
1139 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1140 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1141
1142 RegisterInputSlots(graph, layerIndex, layer);
1143 RegisterOutputSlots(graph, layerIndex, layer);
1144}
1145
Derek Lamberti8ddae332019-02-21 16:29:43 +00001146void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001147{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001148 CHECK_LAYERS(graph, 0, layerIndex);
1149 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001150 CHECK_LOCATION();
1151 CHECK_VALID_SIZE(inputs.size(), 1);
1152
Derek Lamberti8ddae332019-02-21 16:29:43 +00001153 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001154 CHECK_VALID_SIZE(outputs.size(), 1);
1155
Derek Lamberti8ddae332019-02-21 16:29:43 +00001156 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001157 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001158 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1159
1160 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1161 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1162 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1163
1164 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1165
1166 armnn::IConnectableLayer* layer;
1167 if (flatBufferDescriptor->biasEnabled())
1168 {
1169 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1170 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1171 weightsTensor,
1172 biasTensorData,
1173 layerName.c_str());
1174 }
1175 else
1176 {
1177 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1178 weightsTensor,
1179 layerName.c_str());
1180 }
1181
1182 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1183 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1184
Derek Lamberti8ddae332019-02-21 16:29:43 +00001185 RegisterInputSlots(graph, layerIndex, layer);
1186 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001187}
1188
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001189void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1190{
1191 CHECK_LAYERS(graph, 0, layerIndex);
1192
1193 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1194 CHECK_VALID_SIZE(inputs.size(), 1);
1195
1196 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1197 CHECK_VALID_SIZE(outputs.size(), 1);
1198
1199 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1200 auto flatBufferPadList = flatBufferDescriptor->padList();
1201
1202 if (flatBufferPadList->Length() % 2 != 0)
1203 {
1204 throw ParseException(boost::str(
1205 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1206 }
1207
1208 std::vector<std::pair<unsigned int, unsigned int>> padList;
1209 padList.reserve(flatBufferPadList->Length() / 2);
1210 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1211 {
1212 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1213 }
1214
1215 armnn::PadDescriptor descriptor(padList);
1216
1217 auto layerName = GetLayerName(graph, layerIndex);
1218 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1219
1220 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1221 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1222
1223 RegisterInputSlots(graph, layerIndex, layer);
1224 RegisterOutputSlots(graph, layerIndex, layer);
1225}
1226
Derek Lamberti8ddae332019-02-21 16:29:43 +00001227void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001228{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001229 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001230
1231 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001232 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001233
Derek Lamberti8ddae332019-02-21 16:29:43 +00001234 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001235 CHECK_VALID_SIZE(inputs.size(), 1);
1236
Derek Lamberti8ddae332019-02-21 16:29:43 +00001237 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001238 CHECK_VALID_SIZE(outputs.size(), 1);
1239 auto outputInfo = ToTensorInfo(outputs[0]);
1240
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001241 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001242 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1243
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001244 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001245 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1246
Derek Lamberti8ddae332019-02-21 16:29:43 +00001247 RegisterInputSlots(graph, layerIndex, layer);
1248 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001249}
1250
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001251armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001252 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001253{
1254 armnn::Pooling2dDescriptor desc;
1255
1256 switch (pooling2dDesc->poolType())
1257 {
1258 case PoolingAlgorithm_Average:
1259 {
1260 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001261 break;
1262 }
1263 case PoolingAlgorithm_Max:
1264 {
1265 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001266 break;
1267 }
1268 default:
1269 {
1270 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1271 }
1272 }
1273
1274 switch (pooling2dDesc->outputShapeRounding())
1275 {
1276 case OutputShapeRounding_Floor:
1277 {
1278 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1279 break;
1280 }
1281 case OutputShapeRounding_Ceiling:
1282 {
1283 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1284 break;
1285 }
1286 default:
1287 {
1288 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1289 }
1290 }
1291
1292 switch (pooling2dDesc->paddingMethod())
1293 {
1294 case PaddingMethod_Exclude:
1295 {
1296 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1297 break;
1298 }
1299 case PaddingMethod_IgnoreValue:
1300 {
1301 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1302 break;
1303 }
1304 default:
1305 {
1306 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1307 }
1308 }
1309
1310 switch (pooling2dDesc->dataLayout())
1311 {
1312 case DataLayout_NCHW:
1313 {
1314 desc.m_DataLayout = armnn::DataLayout::NCHW;
1315 break;
1316 }
1317 case DataLayout_NHWC:
1318 {
1319 desc.m_DataLayout = armnn::DataLayout::NHWC;
1320 break;
1321 }
1322 default:
1323 {
1324 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1325 }
1326 }
1327
1328 desc.m_PadRight = pooling2dDesc->padRight();
1329 desc.m_PadLeft = pooling2dDesc->padLeft();
1330 desc.m_PadBottom = pooling2dDesc->padBottom();
1331 desc.m_PadTop = pooling2dDesc->padTop();
1332 desc.m_StrideX = pooling2dDesc->strideX();
1333 desc.m_StrideY = pooling2dDesc->strideY();
1334 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1335 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1336
1337 return desc;
1338}
1339
Derek Lamberti8ddae332019-02-21 16:29:43 +00001340void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001341{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001342 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001343
Derek Lamberti8ddae332019-02-21 16:29:43 +00001344 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001345 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001346 CHECK_VALID_SIZE(inputs.size(), 1);
1347
Derek Lamberti8ddae332019-02-21 16:29:43 +00001348 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001349 CHECK_VALID_SIZE(outputs.size(), 1);
1350 auto outputInfo = ToTensorInfo(outputs[0]);
1351
1352 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001353 auto layerName = GetLayerName(graph, layerIndex);
1354 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001355 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1356
Derek Lamberti8ddae332019-02-21 16:29:43 +00001357 RegisterInputSlots(graph, layerIndex, layer);
1358 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001359}
1360
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001361armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001362 const std::vector<uint32_t>& targetDimsIn)
1363{
1364 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1365 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1366
1367 if (stretchDim != targetDimsIn.end())
1368 {
1369 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1370 {
1371 throw ParseException(boost::str(
1372 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1373 }
1374
1375 auto targetNumElements =
1376 boost::numeric_cast<unsigned int>(
1377 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1378
1379 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1380 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1381 }
1382
1383 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1384
1385 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1386 reshapeInfo.SetShape(outputShape);
1387
1388 return reshapeInfo;
1389}
1390
Derek Lamberti8ddae332019-02-21 16:29:43 +00001391void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001392{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001393 CHECK_LAYERS(graph, 0, layerIndex);
1394 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001395
Derek Lamberti8ddae332019-02-21 16:29:43 +00001396 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001397 CHECK_VALID_SIZE(outputs.size(), 1);
1398
1399 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1400 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1401
Derek Lamberti8ddae332019-02-21 16:29:43 +00001402 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001403 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1404
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001405 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001406 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1407
1408 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1409 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1410
1411 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1412 {
1413 std::stringstream ss;
1414 ss << "New shape defined in reshape parameters "
1415 << reshapeOutputTensorShape
1416 << " does not equal output shape "
1417 << actualOutputTensorInfo.GetShape()
1418 << ": "
1419 << CHECK_LOCATION().AsString();
1420 throw ParseException(ss.str());
1421 }
1422
1423 armnn::ReshapeDescriptor reshapeDesc;
1424 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1425
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001426 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001427 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1428 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1429
Derek Lamberti8ddae332019-02-21 16:29:43 +00001430 RegisterInputSlots(graph, layerIndex, layer);
1431 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001432}
1433
Derek Lamberti8ddae332019-02-21 16:29:43 +00001434void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001435{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001436 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001437
Derek Lamberti8ddae332019-02-21 16:29:43 +00001438 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001439 CHECK_VALID_SIZE(inputs.size(), 1);
1440
Derek Lamberti8ddae332019-02-21 16:29:43 +00001441 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001442 CHECK_VALID_SIZE(outputs.size(), 1);
1443
1444 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001445 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001446 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001447
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001448 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1449
1450 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1451 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1452
Derek Lamberti8ddae332019-02-21 16:29:43 +00001453 RegisterInputSlots(graph, layerIndex, layer);
1454 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001455}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001456
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001457void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1458{
1459 CHECK_LAYERS(graph, 0, layerIndex);
1460
1461 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1462 CHECK_VALID_SIZE(inputs.size(), 1);
1463
1464 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1465 CHECK_VALID_SIZE(outputs.size(), 1);
1466
1467 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1468 auto flatBufferPadList = flatBufferDescriptor->padList();
1469 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1470
1471 if (flatBufferPadList->Length() % 2 != 0)
1472 {
1473 throw ParseException(boost::str(
1474 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1475 }
1476
1477 std::vector<std::pair<unsigned int, unsigned int>> padList;
1478 padList.reserve(flatBufferPadList->Length() / 2);
1479 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1480 {
1481 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1482 }
1483
1484 armnn::SpaceToBatchNdDescriptor descriptor;
1485 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1486 descriptor.m_BlockShape =
1487 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1488 descriptor.m_PadList = padList;
1489
1490 auto layerName = GetLayerName(graph, layerIndex);
1491 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1492
1493 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1494 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1495
1496 RegisterInputSlots(graph, layerIndex, layer);
1497 RegisterOutputSlots(graph, layerIndex, layer);
1498}
1499
Nina Drozd57728782019-02-27 10:53:27 +00001500armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1501 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1502 unsigned int layerIndex)
1503{
1504 armnn::NormalizationDescriptor desc;
1505
1506 switch (normalizationDescriptor->normChannelType())
1507 {
1508 case NormalizationAlgorithmChannel_Across:
1509 {
1510 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1511 break;
1512 }
1513 case NormalizationAlgorithmChannel_Within:
1514 {
1515 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1516 break;
1517 }
1518 default:
1519 {
1520 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1521 }
1522 }
1523
1524 switch (normalizationDescriptor->normMethodType())
1525 {
1526 case NormalizationAlgorithmMethod_LocalBrightness:
1527 {
1528 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1529 break;
1530 }
1531 case NormalizationAlgorithmMethod_LocalContrast:
1532 {
1533 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1534 break;
1535 }
1536 default:
1537 {
1538 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1539 }
1540 }
1541
1542 switch (normalizationDescriptor->dataLayout())
1543 {
1544 case DataLayout_NCHW:
1545 {
1546 desc.m_DataLayout = armnn::DataLayout::NCHW;
1547 break;
1548 }
1549 case DataLayout_NHWC:
1550 {
1551 desc.m_DataLayout = armnn::DataLayout::NHWC;
1552 break;
1553 }
1554 default:
1555 {
1556 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1557 }
1558 }
1559
1560 desc.m_Alpha = normalizationDescriptor->alpha();
1561 desc.m_Beta = normalizationDescriptor->beta();
1562 desc.m_K = normalizationDescriptor->k();
1563 desc.m_NormSize = normalizationDescriptor->normSize();
1564
1565 return desc;
1566}
1567
1568void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1569{
1570 CHECK_LAYERS(graph, 0, layerIndex);
1571
1572 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1573
1574 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1575 CHECK_VALID_SIZE(inputs.size(), 1);
1576
1577 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1578 CHECK_VALID_SIZE(outputs.size(), 1);
1579
1580 auto outputInfo = ToTensorInfo(outputs[0]);
1581
1582 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1583 auto layerName = GetLayerName(graph, layerIndex);
1584
1585 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1586 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1587
1588 RegisterInputSlots(graph, layerIndex, layer);
1589 RegisterOutputSlots(graph, layerIndex, layer);
1590}
1591
Sadik Armagan8b42a382019-03-01 14:24:49 +00001592void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1593{
1594 CHECK_LAYERS(graph, 0, layerIndex);
1595 auto inputs = GetInputs(graph, layerIndex);
1596 CHECK_LOCATION();
1597 CHECK_VALID_SIZE(inputs.size(), 1);
1598
1599 auto outputs = GetOutputs(graph, layerIndex);
1600 CHECK_VALID_SIZE(outputs.size(), 1);
1601
1602 auto layerName = GetLayerName(graph, layerIndex);
1603 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1604
1605 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1606 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1607
1608 RegisterInputSlots(graph, layerIndex, layer);
1609 RegisterOutputSlots(graph, layerIndex, layer);
1610}
1611
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001612} // namespace armnnDeserializer