blob: b8a1eaa84afe5d3c74a3d77f5cf69f8b51f7fe1c [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
Conor Kennedy76277882019-02-26 08:29:54 +0000191 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000192 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
193 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000194 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000195 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000196 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000197 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000198 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000199 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000200 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000201 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000202 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000203 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
204 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
205 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000206 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Kevin May43a799c2019-02-08 16:31:42 +0000207}
208
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000209Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000210{
211 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
212
213 switch(layerType)
214 {
Mike Kellyaf484012019-02-20 16:53:11 +0000215 case Layer::Layer_ActivationLayer:
216 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000217 case Layer::Layer_AdditionLayer:
218 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000219 case Layer::Layer_BatchToSpaceNdLayer:
220 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000221 case Layer::Layer_ConstantLayer:
222 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000223 case Layer::Layer_Convolution2dLayer:
224 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000225 case Layer::Layer_DepthwiseConvolution2dLayer:
226 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000227 case Layer::Layer_DivisionLayer:
228 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000229 case Layer::Layer_EqualLayer:
230 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000231 case Layer::Layer_FullyConnectedLayer:
232 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000233 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000234 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
235 case Layer::Layer_MinimumLayer:
236 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000237 case Layer::Layer_MaximumLayer:
238 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000239 case Layer::Layer_MultiplicationLayer:
240 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000241 case Layer::Layer_NormalizationLayer:
242 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000243 case Layer::Layer_OutputLayer:
244 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000245 case Layer::Layer_PadLayer:
246 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000247 case Layer::Layer_PermuteLayer:
248 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000249 case Layer::Layer_Pooling2dLayer:
250 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000251 case Layer::Layer_ReshapeLayer:
252 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000253 case Layer::Layer_SoftmaxLayer:
254 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000255 case Layer::Layer_SpaceToBatchNdLayer:
256 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000257 case Layer::Layer_NONE:
258 default:
259 throw ParseException(boost::str(
260 boost::format("Layer must have a type %1%") %
261 Layer::Layer_NONE));
262 }
263}
264
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000265std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
266{
267 auto layer = GetBaseLayer(graph, index);
268 assert(layer);
269 return layer->layerName()->str();
270}
271
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000272int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000273{
274 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
275
276 if (layerType == Layer::Layer_InputLayer)
277 {
278 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
279 }
280 else if ( layerType == Layer::Layer_OutputLayer )
281 {
282 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
283 }
284 return 0;
285}
286
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000287armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000288{
289 switch (dataLayout)
290 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000291 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000292 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000293 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000294 default:
295 return armnn::DataLayout::NCHW;
296 }
297}
298
Mike Kellyaf484012019-02-20 16:53:11 +0000299armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
300{
301 switch (function)
302 {
303 case armnnSerializer::ActivationFunction_Sigmoid:
304 return armnn::ActivationFunction::Sigmoid;
305 case armnnSerializer::ActivationFunction_TanH:
306 return armnn::ActivationFunction::TanH;
307 case armnnSerializer::ActivationFunction_Linear:
308 return armnn::ActivationFunction::Linear;
309 case armnnSerializer::ActivationFunction_ReLu:
310 return armnn::ActivationFunction::ReLu;
311 case armnnSerializer::ActivationFunction_BoundedReLu:
312 return armnn::ActivationFunction::BoundedReLu;
313 case armnnSerializer::ActivationFunction_LeakyReLu:
314 return armnn::ActivationFunction::LeakyReLu;
315 case armnnSerializer::ActivationFunction_Abs:
316 return armnn::ActivationFunction::Abs;
317 case armnnSerializer::ActivationFunction_Sqrt:
318 return armnn::ActivationFunction::Sqrt;
319 case armnnSerializer::ActivationFunction_Square:
320 return armnn::ActivationFunction::Square;
321 default:
322 return armnn::ActivationFunction::Sigmoid;
323 }
324}
325
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000326armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000327{
328 armnn::DataType type;
329 CHECK_TENSOR_PTR(tensorPtr);
330
331 switch (tensorPtr->dataType())
332 {
333 case DataType_QuantisedAsymm8:
334 type = armnn::DataType::QuantisedAsymm8;
335 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000336 case DataType_Signed32:
337 type = armnn::DataType::Signed32;
338 break;
Kevin May43a799c2019-02-08 16:31:42 +0000339 case DataType_Float32:
340 type = armnn::DataType::Float32;
341 break;
342 case DataType_Float16:
343 type = armnn::DataType::Float16;
344 break;
345 case DataType_Boolean:
346 type = armnn::DataType::Boolean;
347 break;
348 default:
349 {
350 CheckLocation location = CHECK_LOCATION();
351 throw ParseException(
352 boost::str(
353 boost::format("Unsupported data type %1% = %2%. %3%") %
354 tensorPtr->dataType() %
355 EnumNameDataType(tensorPtr->dataType()) %
356 location.AsString()));
357 }
358 }
359 float quantizationScale = tensorPtr->quantizationScale();
360 int32_t quantizationOffset = tensorPtr->quantizationOffset();
361
362 auto dimensions = tensorPtr->dimensions();
363 unsigned int size = dimensions->size();
364 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
365
366 // two statements (on purpose) for easier debugging:
367 armnn::TensorInfo result(size,
368 outputDims.data(),
369 type,
370 quantizationScale,
371 quantizationOffset);
372 return result;
373}
374
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000375armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000376{
377 CHECK_CONST_TENSOR_PTR(constTensorPtr);
378 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
379
380 switch (constTensorPtr->data_type())
381 {
382 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000383 {
384 auto byteData = constTensorPtr->data_as_ByteData()->data();
385 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
386 return armnn::ConstTensor(tensorInfo, byteData->data());
387 }
Mike Kellya0766c32019-02-19 17:22:07 +0000388 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000389 {
390 auto shortData = constTensorPtr->data_as_ShortData()->data();
391 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
392 return armnn::ConstTensor(tensorInfo, shortData->data());
393 }
Mike Kellya0766c32019-02-19 17:22:07 +0000394 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000395 {
396 auto intData = constTensorPtr->data_as_IntData()->data();
397 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
398 return armnn::ConstTensor(tensorInfo, intData->data());
399 }
Mike Kellya0766c32019-02-19 17:22:07 +0000400 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000401 {
402 auto longData = constTensorPtr->data_as_LongData()->data();
403 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
404 return armnn::ConstTensor(tensorInfo, longData->data());
405 }
Mike Kellya0766c32019-02-19 17:22:07 +0000406 default:
407 {
408 CheckLocation location = CHECK_LOCATION();
409 throw ParseException(
410 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
411 constTensorPtr->data_type() %
412 EnumNameConstTensorData(constTensorPtr->data_type()) %
413 location.AsString()));
414 }
415 }
416}
417
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000418Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000419{
420
421 CHECK_GRAPH(graphPtr, 0);
422 const auto& numInputs = graphPtr->inputIds()->size();
423
424 LayerBaseRawPtrVector result(numInputs);
425
426 for (unsigned int i=0; i<numInputs; ++i)
427 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000428 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000429 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
430 }
431 return result;
432}
433
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000434Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000435{
436 CHECK_GRAPH(graphPtr, 0);
437 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000438 LayerBaseRawPtrVector result(numOutputs);
439
440 for (unsigned int i=0; i<numOutputs; ++i)
441 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000442 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000443
Kevin May43a799c2019-02-08 16:31:42 +0000444 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
445 }
446 return result;
447}
448
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000449Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000450 unsigned int layerIndex)
451{
452 CHECK_LAYERS(graphPtr, 0, layerIndex);
453 auto layer = GetBaseLayer(graphPtr, layerIndex);
454 const auto& numInputs = layer->inputSlots()->size();
455
456 TensorRawPtrVector result(numInputs);
457
458 for (unsigned int i=0; i<numInputs; ++i)
459 {
460 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
461 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
462 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
463 }
464 return result;
465}
466
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000467Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000468 unsigned int layerIndex)
469{
470 CHECK_LAYERS(graphPtr, 0, layerIndex);
471 auto layer = GetBaseLayer(graphPtr, layerIndex);
472 const auto& numOutputs = layer->outputSlots()->size();
473
474 TensorRawPtrVector result(numOutputs);
475
476 for (unsigned int i=0; i<numOutputs; ++i)
477 {
478 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
479 }
480 return result;
481}
482
Derek Lamberti8ddae332019-02-21 16:29:43 +0000483void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000484{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000485 CHECK_LAYERS(graph, 0, layerIndex);
486 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000487 throw ParseException(
488 boost::str(
489 boost::format("Layer not supported. "
490 "layerIndex: %1% "
491 "layerName: %2% / %3%") %
492 layerIndex %
493 layerName %
494 CHECK_LOCATION().AsString()));
495}
496
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000497void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000498{
499 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000500 m_InputBindings.clear();
501 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000502}
503
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000504IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000505{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000506 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000507}
508
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000509IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000510{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000511 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000512}
513
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000514void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000515{
516 delete parser;
517}
518
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000519INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000520{
521 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000522 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
523 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000524}
525
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000526armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000527{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000528 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000529 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
530 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
531 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000532}
533
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000534Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000535{
536 if (binaryContent == nullptr)
537 {
538 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
539 CHECK_LOCATION().AsString()));
540 }
541 flatbuffers::Verifier verifier(binaryContent, len);
542 if (verifier.VerifyBuffer<SerializedGraph>() == false)
543 {
544 throw ParseException(
545 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
546 "flatbuffers format. size:%1% %2%") %
547 len %
548 CHECK_LOCATION().AsString()));
549 }
550 return GetSerializedGraph(binaryContent);
551}
552
Derek Lamberti8ddae332019-02-21 16:29:43 +0000553INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000554{
555 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000556 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000557 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000558 m_GraphConnections.emplace_back(graph->layers()->size());
559 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000560 {
561 if (layer->layer_type() != Layer_InputLayer &&
562 layer->layer_type() != Layer_OutputLayer)
563 {
564 // lookup and call the parser function
565 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000566 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000567 }
568 ++layerIndex;
569 }
570
Derek Lamberti8ddae332019-02-21 16:29:43 +0000571 SetupInputLayers(graph);
572 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000573
574 // establish the connections from the layer outputs to the inputs of the subsequent layers
575 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
576 {
577 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
578 {
579 for (size_t inputSlotIdx = 0;
580 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
581 ++inputSlotIdx)
582 {
583 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
584 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
585 }
586 }
587 }
588
589 return std::move(m_Network);
590}
591
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000592BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000593 const std::string& name) const
594{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000595 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000596 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000597 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000598 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000599 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000600 }
601 }
602 throw ParseException(
603 boost::str(
604 boost::format("No input binding found for layer:%1% / %2%") %
605 name %
606 CHECK_LOCATION().AsString()));
607}
608
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000609BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000610 const std::string& name) const
611{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000612 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000613 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000614 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000615 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000616 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000617 }
618 }
619 throw ParseException(
620 boost::str(
621 boost::format("No output binding found for layer:%1% / %2%") %
622 name %
623 CHECK_LOCATION().AsString()));
624}
625
Derek Lamberti8ddae332019-02-21 16:29:43 +0000626void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000627{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000628 CHECK_GRAPH(graph, 0);
629 auto inputs = GetGraphInputs(graph);
630 m_InputBindings.clear();
631 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000632 for (auto const& input : inputs)
633 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000634 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000635 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000636 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000637
638 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
639 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
640
Derek Lamberti8ddae332019-02-21 16:29:43 +0000641 RegisterOutputSlots(graph, input->index(), layer);
642
643 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
644 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
645 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000646 }
647}
648
Derek Lamberti8ddae332019-02-21 16:29:43 +0000649void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000650{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000651 CHECK_GRAPH(graph, 0);
652 auto outputs = GetGraphOutputs(graph);
653 m_OutputBindings.clear();
654 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000655 for (auto const& output : outputs)
656 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000657 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000658 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000659 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000660
Derek Lamberti8ddae332019-02-21 16:29:43 +0000661 RegisterInputSlots(graph, output->index(), layer);
662
663 auto baseLayer = GetBaseLayer(graph, output->index());
664 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
665 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
666 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
667
668 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
669 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
670 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000671 }
672}
673
Derek Lamberti8ddae332019-02-21 16:29:43 +0000674void Deserializer::RegisterOutputSlots(GraphPtr graph,
675 uint32_t layerIndex,
676 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000677{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000678 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000679 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000680 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000681 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
682 {
683 throw ParseException(
684 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
685 " for layer index: %3% %4%") %
686 parsedLayer->outputSlots()->size() %
687 layer->GetNumOutputSlots() %
688 layerIndex %
689 CHECK_LOCATION().AsString()));
690 }
691
692 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
693 {
694 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
695 RegisterOutputSlotOfConnection(layerIndex, slot);
696 }
697}
698
Derek Lamberti8ddae332019-02-21 16:29:43 +0000699void Deserializer::RegisterInputSlots(GraphPtr graph,
700 uint32_t layerIndex,
701 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000702{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000703 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000704 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000705 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000706 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
707 {
708 throw ParseException(
709 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
710 " for layer index:%3% %4%") %
711 parsedLayer->inputSlots()->size() %
712 layer->GetNumInputSlots() %
713 layerIndex %
714 CHECK_LOCATION().AsString()));
715 }
716
717 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
718 {
719 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
720 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
721 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
722 }
723}
724
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000725void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000726 armnn::IInputSlot* slot)
727{
728 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
729
730 Slots& slots = m_GraphConnections[0][connectionIndex];
731 slots.inputSlots.push_back(slot);
732}
733
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000734void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000735 armnn::IOutputSlot* slot)
736{
737 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
738
739 Slots& slots = m_GraphConnections[0][connectionIndex];
740
741 // assuming there is only one producer for that tensor
742 if (slots.outputSlot != nullptr)
743 {
744 throw ParseException(boost::str(
745 boost::format("Another layer has already registered itself as the producer of "
746 "connection:%1% / %2%") %
747 connectionIndex %
748 CHECK_LOCATION().AsString()));
749 }
750
751 slots.outputSlot = slot;
752}
753
Derek Lamberti8ddae332019-02-21 16:29:43 +0000754void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000755{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000756 CHECK_LAYERS(graph, 0, layerIndex);
757 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000758 CHECK_LOCATION();
759 CHECK_VALID_SIZE(inputs.size(), 1);
760
Derek Lamberti8ddae332019-02-21 16:29:43 +0000761 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000762 CHECK_VALID_SIZE(outputs.size(), 1);
763
Derek Lamberti8ddae332019-02-21 16:29:43 +0000764 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000765 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000766 auto serializerDescriptor = serializerLayer->descriptor();
767
768 armnn::ActivationDescriptor descriptor;
769 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
770 descriptor.m_A = serializerDescriptor->a();
771 descriptor.m_B = serializerDescriptor->b();
772
773 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
774 layerName.c_str());
775 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
776 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
777
Derek Lamberti8ddae332019-02-21 16:29:43 +0000778 RegisterInputSlots(graph, layerIndex, layer);
779 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000780}
781
Derek Lamberti8ddae332019-02-21 16:29:43 +0000782void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000783{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000784 CHECK_LAYERS(graph, 0, layerIndex);
785 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000786 CHECK_LOCATION();
787 CHECK_VALID_SIZE(inputs.size(), 2);
788
Derek Lamberti8ddae332019-02-21 16:29:43 +0000789 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000790 CHECK_VALID_SIZE(outputs.size(), 1);
791
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000792 auto layerName = GetLayerName(graph, layerIndex);
793 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000794
795 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
796 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
797
Derek Lamberti8ddae332019-02-21 16:29:43 +0000798 RegisterInputSlots(graph, layerIndex, layer);
799 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000800}
801
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000802void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
803{
804 CHECK_LAYERS(graph, 0, layerIndex);
805
806 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
807 CHECK_VALID_SIZE(inputs.size(), 1);
808
809 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
810 CHECK_VALID_SIZE(outputs.size(), 1);
811
812 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
813 auto flatBufferCrops = flatBufferDescriptor->crops();
814 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
815
816 if (flatBufferCrops->Length() % 2 != 0)
817 {
818 throw ParseException(boost::str(
819 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
820 }
821
822 std::vector<std::pair<unsigned int, unsigned int>> crops;
823 crops.reserve(flatBufferCrops->Length() / 2);
824 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
825 {
826 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
827 }
828
829 armnn::BatchToSpaceNdDescriptor descriptor;
830 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
831 descriptor.m_BlockShape =
832 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
833 descriptor.m_Crops = crops;
834
835 auto layerName = GetLayerName(graph, layerIndex);
836 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
837
838 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
839 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
840
841 RegisterInputSlots(graph, layerIndex, layer);
842 RegisterOutputSlots(graph, layerIndex, layer);
843}
844
Conor Kennedy76277882019-02-26 08:29:54 +0000845void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
846{
847 CHECK_LAYERS(graph, 0, layerIndex);
848 CHECK_LOCATION();
849
850 auto outputs = GetOutputs(graph, layerIndex);
851 CHECK_VALID_SIZE(outputs.size(), 1);
852
853 auto layerName = GetLayerName(graph, layerIndex);
854
855 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
856 auto serializerInput = serializerLayer->input();
857
858 armnn::ConstTensor input = ToConstTensor(serializerInput);
859
860 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
861
862 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
863 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
864
865 RegisterOutputSlots(graph, layerIndex, layer);
866}
867
Derek Lamberti8ddae332019-02-21 16:29:43 +0000868void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000869{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000870 CHECK_LAYERS(graph, 0, layerIndex);
871 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000872 CHECK_LOCATION();
873 CHECK_VALID_SIZE(inputs.size(), 1);
874
Derek Lamberti8ddae332019-02-21 16:29:43 +0000875 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000876 CHECK_VALID_SIZE(outputs.size(), 1);
877
Derek Lamberti8ddae332019-02-21 16:29:43 +0000878 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000879 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000880 auto serializerDescriptor = serializerLayer->descriptor();
881
882 armnn::Convolution2dDescriptor descriptor;
883 descriptor.m_PadLeft = serializerDescriptor->padLeft();
884 descriptor.m_PadRight = serializerDescriptor->padRight();
885 descriptor.m_PadTop = serializerDescriptor->padTop();
886 descriptor.m_PadBottom = serializerDescriptor->padBottom();
887 descriptor.m_StrideX = serializerDescriptor->strideX();
888 descriptor.m_StrideY = serializerDescriptor->strideY();;
889 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
890 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
891
892 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
893 armnn::ConstTensor biases;
894
895 if (descriptor.m_BiasEnabled)
896 {
897 biases = ToConstTensor(serializerLayer->biases());
898 }
899 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
900 weights,
901 biases,
902 layerName.c_str());
903 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
904 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
905
Derek Lamberti8ddae332019-02-21 16:29:43 +0000906 RegisterInputSlots(graph, layerIndex, layer);
907 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000908}
909
Derek Lamberti8ddae332019-02-21 16:29:43 +0000910void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000911{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000912 CHECK_LAYERS(graph, 0, layerIndex);
913 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000914 CHECK_LOCATION();
915 CHECK_VALID_SIZE(inputs.size(), 1);
916
Derek Lamberti8ddae332019-02-21 16:29:43 +0000917 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000918 CHECK_VALID_SIZE(outputs.size(), 1);
919
Derek Lamberti8ddae332019-02-21 16:29:43 +0000920 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000921 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000922 auto serializerDescriptor = serializerLayer->descriptor();
923
924 armnn::DepthwiseConvolution2dDescriptor descriptor;
925 descriptor.m_PadLeft = serializerDescriptor->padLeft();
926 descriptor.m_PadRight = serializerDescriptor->padRight();
927 descriptor.m_PadTop = serializerDescriptor->padTop();
928 descriptor.m_PadBottom = serializerDescriptor->padBottom();
929 descriptor.m_StrideX = serializerDescriptor->strideX();
930 descriptor.m_StrideY = serializerDescriptor->strideY();;
931 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
932 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
933
934 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
935 armnn::ConstTensor biases;
936
937 if (descriptor.m_BiasEnabled)
938 {
939 biases = ToConstTensor(serializerLayer->biases());
940 }
941 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
942 weights,
943 biases,
944 layerName.c_str());
945
946 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
947 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
948
Derek Lamberti8ddae332019-02-21 16:29:43 +0000949 RegisterInputSlots(graph, layerIndex, layer);
950 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000951}
952
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000953void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
954{
955 CHECK_LAYERS(graph, 0, layerIndex);
956 auto inputs = GetInputs(graph, layerIndex);
957 CHECK_LOCATION();
958 CHECK_VALID_SIZE(inputs.size(), 2);
959
960 auto outputs = GetOutputs(graph, layerIndex);
961 CHECK_VALID_SIZE(outputs.size(), 1);
962
963 auto layerName = GetLayerName(graph, layerIndex);
964 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
965
966 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
967 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
968
969 RegisterInputSlots(graph, layerIndex, layer);
970 RegisterOutputSlots(graph, layerIndex, layer);
971}
972
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000973void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
974{
975 CHECK_LAYERS(graph, 0, layerIndex);
976 auto inputs = GetInputs(graph, layerIndex);
977 CHECK_LOCATION();
978 CHECK_VALID_SIZE(inputs.size(), 2);
979
980 auto outputs = GetOutputs(graph, layerIndex);
981 CHECK_VALID_SIZE(outputs.size(), 1);
982
983 auto layerName = GetLayerName(graph, layerIndex);
984 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
985
986 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
987 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
988
989 RegisterInputSlots(graph, layerIndex, layer);
990 RegisterOutputSlots(graph, layerIndex, layer);
991}
992
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000993void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
994{
995 CHECK_LAYERS(graph, 0, layerIndex);
996 auto inputs = GetInputs(graph, layerIndex);
997 CHECK_LOCATION();
998 CHECK_VALID_SIZE(inputs.size(), 2);
999
1000 auto outputs = GetOutputs(graph, layerIndex);
1001 CHECK_VALID_SIZE(outputs.size(), 1);
1002
1003 auto layerName = GetLayerName(graph, layerIndex);
1004 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1005
1006 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1007 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1008
1009 RegisterInputSlots(graph, layerIndex, layer);
1010 RegisterOutputSlots(graph, layerIndex, layer);
1011}
1012
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001013void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1014{
1015 CHECK_LAYERS(graph, 0, layerIndex);
1016 auto inputs = GetInputs(graph, layerIndex);
1017 CHECK_LOCATION();
1018 CHECK_VALID_SIZE(inputs.size(), 2);
1019
1020 auto outputs = GetOutputs(graph, layerIndex);
1021 CHECK_VALID_SIZE(outputs.size(), 1);
1022
1023 auto layerName = GetLayerName(graph, layerIndex);
1024 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1025
1026 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1027 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1028
1029 RegisterInputSlots(graph, layerIndex, layer);
1030 RegisterOutputSlots(graph, layerIndex, layer);
1031}
1032
Derek Lamberti8ddae332019-02-21 16:29:43 +00001033void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001034{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001035 CHECK_LAYERS(graph, 0, layerIndex);
1036 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001037 CHECK_LOCATION();
1038 CHECK_VALID_SIZE(inputs.size(), 2);
1039
Derek Lamberti8ddae332019-02-21 16:29:43 +00001040 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001041 CHECK_VALID_SIZE(outputs.size(), 1);
1042
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001043 auto layerName = GetLayerName(graph, layerIndex);
1044 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001045
1046 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1047 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1048
Derek Lamberti8ddae332019-02-21 16:29:43 +00001049 RegisterInputSlots(graph, layerIndex, layer);
1050 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001051}
1052
Derek Lamberti8ddae332019-02-21 16:29:43 +00001053void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001054{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001055 CHECK_LAYERS(graph, 0, layerIndex);
1056 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001057 CHECK_LOCATION();
1058 CHECK_VALID_SIZE(inputs.size(), 1);
1059
Derek Lamberti8ddae332019-02-21 16:29:43 +00001060 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001061 CHECK_VALID_SIZE(outputs.size(), 1);
1062
Derek Lamberti8ddae332019-02-21 16:29:43 +00001063 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001064 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001065 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1066
1067 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1068 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1069 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1070
1071 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1072
1073 armnn::IConnectableLayer* layer;
1074 if (flatBufferDescriptor->biasEnabled())
1075 {
1076 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1077 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1078 weightsTensor,
1079 biasTensorData,
1080 layerName.c_str());
1081 }
1082 else
1083 {
1084 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1085 weightsTensor,
1086 layerName.c_str());
1087 }
1088
1089 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1090 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1091
Derek Lamberti8ddae332019-02-21 16:29:43 +00001092 RegisterInputSlots(graph, layerIndex, layer);
1093 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001094}
1095
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001096void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1097{
1098 CHECK_LAYERS(graph, 0, layerIndex);
1099
1100 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1101 CHECK_VALID_SIZE(inputs.size(), 1);
1102
1103 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1104 CHECK_VALID_SIZE(outputs.size(), 1);
1105
1106 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1107 auto flatBufferPadList = flatBufferDescriptor->padList();
1108
1109 if (flatBufferPadList->Length() % 2 != 0)
1110 {
1111 throw ParseException(boost::str(
1112 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1113 }
1114
1115 std::vector<std::pair<unsigned int, unsigned int>> padList;
1116 padList.reserve(flatBufferPadList->Length() / 2);
1117 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1118 {
1119 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1120 }
1121
1122 armnn::PadDescriptor descriptor(padList);
1123
1124 auto layerName = GetLayerName(graph, layerIndex);
1125 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1126
1127 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1128 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1129
1130 RegisterInputSlots(graph, layerIndex, layer);
1131 RegisterOutputSlots(graph, layerIndex, layer);
1132}
1133
Derek Lamberti8ddae332019-02-21 16:29:43 +00001134void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001135{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001136 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001137
1138 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001139 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001140
Derek Lamberti8ddae332019-02-21 16:29:43 +00001141 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001142 CHECK_VALID_SIZE(inputs.size(), 1);
1143
Derek Lamberti8ddae332019-02-21 16:29:43 +00001144 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001145 CHECK_VALID_SIZE(outputs.size(), 1);
1146 auto outputInfo = ToTensorInfo(outputs[0]);
1147
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001148 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001149 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1150
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001151 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001152 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1153
Derek Lamberti8ddae332019-02-21 16:29:43 +00001154 RegisterInputSlots(graph, layerIndex, layer);
1155 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001156}
1157
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001158armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001159 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001160{
1161 armnn::Pooling2dDescriptor desc;
1162
1163 switch (pooling2dDesc->poolType())
1164 {
1165 case PoolingAlgorithm_Average:
1166 {
1167 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001168 break;
1169 }
1170 case PoolingAlgorithm_Max:
1171 {
1172 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001173 break;
1174 }
1175 default:
1176 {
1177 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1178 }
1179 }
1180
1181 switch (pooling2dDesc->outputShapeRounding())
1182 {
1183 case OutputShapeRounding_Floor:
1184 {
1185 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1186 break;
1187 }
1188 case OutputShapeRounding_Ceiling:
1189 {
1190 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1191 break;
1192 }
1193 default:
1194 {
1195 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1196 }
1197 }
1198
1199 switch (pooling2dDesc->paddingMethod())
1200 {
1201 case PaddingMethod_Exclude:
1202 {
1203 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1204 break;
1205 }
1206 case PaddingMethod_IgnoreValue:
1207 {
1208 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1209 break;
1210 }
1211 default:
1212 {
1213 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1214 }
1215 }
1216
1217 switch (pooling2dDesc->dataLayout())
1218 {
1219 case DataLayout_NCHW:
1220 {
1221 desc.m_DataLayout = armnn::DataLayout::NCHW;
1222 break;
1223 }
1224 case DataLayout_NHWC:
1225 {
1226 desc.m_DataLayout = armnn::DataLayout::NHWC;
1227 break;
1228 }
1229 default:
1230 {
1231 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1232 }
1233 }
1234
1235 desc.m_PadRight = pooling2dDesc->padRight();
1236 desc.m_PadLeft = pooling2dDesc->padLeft();
1237 desc.m_PadBottom = pooling2dDesc->padBottom();
1238 desc.m_PadTop = pooling2dDesc->padTop();
1239 desc.m_StrideX = pooling2dDesc->strideX();
1240 desc.m_StrideY = pooling2dDesc->strideY();
1241 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1242 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1243
1244 return desc;
1245}
1246
Derek Lamberti8ddae332019-02-21 16:29:43 +00001247void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001248{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001249 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001250
Derek Lamberti8ddae332019-02-21 16:29:43 +00001251 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001252 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001253 CHECK_VALID_SIZE(inputs.size(), 1);
1254
Derek Lamberti8ddae332019-02-21 16:29:43 +00001255 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001256 CHECK_VALID_SIZE(outputs.size(), 1);
1257 auto outputInfo = ToTensorInfo(outputs[0]);
1258
1259 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001260 auto layerName = GetLayerName(graph, layerIndex);
1261 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001262 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1263
Derek Lamberti8ddae332019-02-21 16:29:43 +00001264 RegisterInputSlots(graph, layerIndex, layer);
1265 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001266}
1267
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001268armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001269 const std::vector<uint32_t>& targetDimsIn)
1270{
1271 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1272 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1273
1274 if (stretchDim != targetDimsIn.end())
1275 {
1276 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1277 {
1278 throw ParseException(boost::str(
1279 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1280 }
1281
1282 auto targetNumElements =
1283 boost::numeric_cast<unsigned int>(
1284 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1285
1286 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1287 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1288 }
1289
1290 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1291
1292 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1293 reshapeInfo.SetShape(outputShape);
1294
1295 return reshapeInfo;
1296}
1297
Derek Lamberti8ddae332019-02-21 16:29:43 +00001298void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001299{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001300 CHECK_LAYERS(graph, 0, layerIndex);
1301 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001302
Derek Lamberti8ddae332019-02-21 16:29:43 +00001303 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001304 CHECK_VALID_SIZE(outputs.size(), 1);
1305
1306 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1307 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1308
Derek Lamberti8ddae332019-02-21 16:29:43 +00001309 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001310 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1311
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001312 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001313 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1314
1315 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1316 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1317
1318 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1319 {
1320 std::stringstream ss;
1321 ss << "New shape defined in reshape parameters "
1322 << reshapeOutputTensorShape
1323 << " does not equal output shape "
1324 << actualOutputTensorInfo.GetShape()
1325 << ": "
1326 << CHECK_LOCATION().AsString();
1327 throw ParseException(ss.str());
1328 }
1329
1330 armnn::ReshapeDescriptor reshapeDesc;
1331 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1332
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001333 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001334 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1335 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1336
Derek Lamberti8ddae332019-02-21 16:29:43 +00001337 RegisterInputSlots(graph, layerIndex, layer);
1338 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001339}
1340
Derek Lamberti8ddae332019-02-21 16:29:43 +00001341void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001342{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001343 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001344
Derek Lamberti8ddae332019-02-21 16:29:43 +00001345 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001346 CHECK_VALID_SIZE(inputs.size(), 1);
1347
Derek Lamberti8ddae332019-02-21 16:29:43 +00001348 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001349 CHECK_VALID_SIZE(outputs.size(), 1);
1350
1351 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001352 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001353 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001354
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001355 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1356
1357 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1358 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1359
Derek Lamberti8ddae332019-02-21 16:29:43 +00001360 RegisterInputSlots(graph, layerIndex, layer);
1361 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001362}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001363
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001364void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1365{
1366 CHECK_LAYERS(graph, 0, layerIndex);
1367
1368 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1369 CHECK_VALID_SIZE(inputs.size(), 1);
1370
1371 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1372 CHECK_VALID_SIZE(outputs.size(), 1);
1373
1374 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1375 auto flatBufferPadList = flatBufferDescriptor->padList();
1376 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1377
1378 if (flatBufferPadList->Length() % 2 != 0)
1379 {
1380 throw ParseException(boost::str(
1381 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1382 }
1383
1384 std::vector<std::pair<unsigned int, unsigned int>> padList;
1385 padList.reserve(flatBufferPadList->Length() / 2);
1386 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1387 {
1388 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1389 }
1390
1391 armnn::SpaceToBatchNdDescriptor descriptor;
1392 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1393 descriptor.m_BlockShape =
1394 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1395 descriptor.m_PadList = padList;
1396
1397 auto layerName = GetLayerName(graph, layerIndex);
1398 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1399
1400 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1401 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1402
1403 RegisterInputSlots(graph, layerIndex, layer);
1404 RegisterOutputSlots(graph, layerIndex, layer);
1405}
1406
Nina Drozd57728782019-02-27 10:53:27 +00001407armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1408 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1409 unsigned int layerIndex)
1410{
1411 armnn::NormalizationDescriptor desc;
1412
1413 switch (normalizationDescriptor->normChannelType())
1414 {
1415 case NormalizationAlgorithmChannel_Across:
1416 {
1417 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1418 break;
1419 }
1420 case NormalizationAlgorithmChannel_Within:
1421 {
1422 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1423 break;
1424 }
1425 default:
1426 {
1427 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1428 }
1429 }
1430
1431 switch (normalizationDescriptor->normMethodType())
1432 {
1433 case NormalizationAlgorithmMethod_LocalBrightness:
1434 {
1435 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1436 break;
1437 }
1438 case NormalizationAlgorithmMethod_LocalContrast:
1439 {
1440 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1441 break;
1442 }
1443 default:
1444 {
1445 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1446 }
1447 }
1448
1449 switch (normalizationDescriptor->dataLayout())
1450 {
1451 case DataLayout_NCHW:
1452 {
1453 desc.m_DataLayout = armnn::DataLayout::NCHW;
1454 break;
1455 }
1456 case DataLayout_NHWC:
1457 {
1458 desc.m_DataLayout = armnn::DataLayout::NHWC;
1459 break;
1460 }
1461 default:
1462 {
1463 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1464 }
1465 }
1466
1467 desc.m_Alpha = normalizationDescriptor->alpha();
1468 desc.m_Beta = normalizationDescriptor->beta();
1469 desc.m_K = normalizationDescriptor->k();
1470 desc.m_NormSize = normalizationDescriptor->normSize();
1471
1472 return desc;
1473}
1474
1475void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1476{
1477 CHECK_LAYERS(graph, 0, layerIndex);
1478
1479 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1480
1481 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1482 CHECK_VALID_SIZE(inputs.size(), 1);
1483
1484 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1485 CHECK_VALID_SIZE(outputs.size(), 1);
1486
1487 auto outputInfo = ToTensorInfo(outputs[0]);
1488
1489 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1490 auto layerName = GetLayerName(graph, layerIndex);
1491
1492 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1493 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1494
1495 RegisterInputSlots(graph, layerIndex, layer);
1496 RegisterOutputSlots(graph, layerIndex, layer);
1497}
1498
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001499} // namespace armnnDeserializer