blob: 076b23e5a475037d4b68a54aca23454d677d2ff7 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
Conor Kennedy76277882019-02-26 08:29:54 +0000191 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000192 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
193 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000194 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000195 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000196 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000197 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000198 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000199 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000200 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000201 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
202 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
203 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000204 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Kevin May43a799c2019-02-08 16:31:42 +0000205}
206
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000207Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000208{
209 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
210
211 switch(layerType)
212 {
Mike Kellyaf484012019-02-20 16:53:11 +0000213 case Layer::Layer_ActivationLayer:
214 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000215 case Layer::Layer_AdditionLayer:
216 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000217 case Layer::Layer_BatchToSpaceNdLayer:
218 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000219 case Layer::Layer_ConstantLayer:
220 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000221 case Layer::Layer_Convolution2dLayer:
222 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000223 case Layer::Layer_DepthwiseConvolution2dLayer:
224 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000225 case Layer::Layer_DivisionLayer:
226 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000227 case Layer::Layer_EqualLayer:
228 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000229 case Layer::Layer_FullyConnectedLayer:
230 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000231 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000232 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
233 case Layer::Layer_MinimumLayer:
234 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000235 case Layer::Layer_MaximumLayer:
236 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000237 case Layer::Layer_MultiplicationLayer:
238 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000239 case Layer::Layer_OutputLayer:
240 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000241 case Layer::Layer_PermuteLayer:
242 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000243 case Layer::Layer_Pooling2dLayer:
244 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000245 case Layer::Layer_ReshapeLayer:
246 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000247 case Layer::Layer_SoftmaxLayer:
248 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000249 case Layer::Layer_SpaceToBatchNdLayer:
250 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000251 case Layer::Layer_NONE:
252 default:
253 throw ParseException(boost::str(
254 boost::format("Layer must have a type %1%") %
255 Layer::Layer_NONE));
256 }
257}
258
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000259std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
260{
261 auto layer = GetBaseLayer(graph, index);
262 assert(layer);
263 return layer->layerName()->str();
264}
265
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000266int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000267{
268 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
269
270 if (layerType == Layer::Layer_InputLayer)
271 {
272 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
273 }
274 else if ( layerType == Layer::Layer_OutputLayer )
275 {
276 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
277 }
278 return 0;
279}
280
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000281armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000282{
283 switch (dataLayout)
284 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000285 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000286 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000287 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000288 default:
289 return armnn::DataLayout::NCHW;
290 }
291}
292
Mike Kellyaf484012019-02-20 16:53:11 +0000293armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
294{
295 switch (function)
296 {
297 case armnnSerializer::ActivationFunction_Sigmoid:
298 return armnn::ActivationFunction::Sigmoid;
299 case armnnSerializer::ActivationFunction_TanH:
300 return armnn::ActivationFunction::TanH;
301 case armnnSerializer::ActivationFunction_Linear:
302 return armnn::ActivationFunction::Linear;
303 case armnnSerializer::ActivationFunction_ReLu:
304 return armnn::ActivationFunction::ReLu;
305 case armnnSerializer::ActivationFunction_BoundedReLu:
306 return armnn::ActivationFunction::BoundedReLu;
307 case armnnSerializer::ActivationFunction_LeakyReLu:
308 return armnn::ActivationFunction::LeakyReLu;
309 case armnnSerializer::ActivationFunction_Abs:
310 return armnn::ActivationFunction::Abs;
311 case armnnSerializer::ActivationFunction_Sqrt:
312 return armnn::ActivationFunction::Sqrt;
313 case armnnSerializer::ActivationFunction_Square:
314 return armnn::ActivationFunction::Square;
315 default:
316 return armnn::ActivationFunction::Sigmoid;
317 }
318}
319
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000320armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000321{
322 armnn::DataType type;
323 CHECK_TENSOR_PTR(tensorPtr);
324
325 switch (tensorPtr->dataType())
326 {
327 case DataType_QuantisedAsymm8:
328 type = armnn::DataType::QuantisedAsymm8;
329 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000330 case DataType_Signed32:
331 type = armnn::DataType::Signed32;
332 break;
Kevin May43a799c2019-02-08 16:31:42 +0000333 case DataType_Float32:
334 type = armnn::DataType::Float32;
335 break;
336 case DataType_Float16:
337 type = armnn::DataType::Float16;
338 break;
339 case DataType_Boolean:
340 type = armnn::DataType::Boolean;
341 break;
342 default:
343 {
344 CheckLocation location = CHECK_LOCATION();
345 throw ParseException(
346 boost::str(
347 boost::format("Unsupported data type %1% = %2%. %3%") %
348 tensorPtr->dataType() %
349 EnumNameDataType(tensorPtr->dataType()) %
350 location.AsString()));
351 }
352 }
353 float quantizationScale = tensorPtr->quantizationScale();
354 int32_t quantizationOffset = tensorPtr->quantizationOffset();
355
356 auto dimensions = tensorPtr->dimensions();
357 unsigned int size = dimensions->size();
358 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
359
360 // two statements (on purpose) for easier debugging:
361 armnn::TensorInfo result(size,
362 outputDims.data(),
363 type,
364 quantizationScale,
365 quantizationOffset);
366 return result;
367}
368
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000369armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000370{
371 CHECK_CONST_TENSOR_PTR(constTensorPtr);
372 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
373
374 switch (constTensorPtr->data_type())
375 {
376 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000377 {
378 auto byteData = constTensorPtr->data_as_ByteData()->data();
379 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
380 return armnn::ConstTensor(tensorInfo, byteData->data());
381 }
Mike Kellya0766c32019-02-19 17:22:07 +0000382 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000383 {
384 auto shortData = constTensorPtr->data_as_ShortData()->data();
385 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
386 return armnn::ConstTensor(tensorInfo, shortData->data());
387 }
Mike Kellya0766c32019-02-19 17:22:07 +0000388 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000389 {
390 auto intData = constTensorPtr->data_as_IntData()->data();
391 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
392 return armnn::ConstTensor(tensorInfo, intData->data());
393 }
Mike Kellya0766c32019-02-19 17:22:07 +0000394 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000395 {
396 auto longData = constTensorPtr->data_as_LongData()->data();
397 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
398 return armnn::ConstTensor(tensorInfo, longData->data());
399 }
Mike Kellya0766c32019-02-19 17:22:07 +0000400 default:
401 {
402 CheckLocation location = CHECK_LOCATION();
403 throw ParseException(
404 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
405 constTensorPtr->data_type() %
406 EnumNameConstTensorData(constTensorPtr->data_type()) %
407 location.AsString()));
408 }
409 }
410}
411
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000412Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000413{
414
415 CHECK_GRAPH(graphPtr, 0);
416 const auto& numInputs = graphPtr->inputIds()->size();
417
418 LayerBaseRawPtrVector result(numInputs);
419
420 for (unsigned int i=0; i<numInputs; ++i)
421 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000422 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000423 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
424 }
425 return result;
426}
427
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000428Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000429{
430 CHECK_GRAPH(graphPtr, 0);
431 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000432 LayerBaseRawPtrVector result(numOutputs);
433
434 for (unsigned int i=0; i<numOutputs; ++i)
435 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000436 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000437
Kevin May43a799c2019-02-08 16:31:42 +0000438 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
439 }
440 return result;
441}
442
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000443Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000444 unsigned int layerIndex)
445{
446 CHECK_LAYERS(graphPtr, 0, layerIndex);
447 auto layer = GetBaseLayer(graphPtr, layerIndex);
448 const auto& numInputs = layer->inputSlots()->size();
449
450 TensorRawPtrVector result(numInputs);
451
452 for (unsigned int i=0; i<numInputs; ++i)
453 {
454 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
455 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
456 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
457 }
458 return result;
459}
460
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000462 unsigned int layerIndex)
463{
464 CHECK_LAYERS(graphPtr, 0, layerIndex);
465 auto layer = GetBaseLayer(graphPtr, layerIndex);
466 const auto& numOutputs = layer->outputSlots()->size();
467
468 TensorRawPtrVector result(numOutputs);
469
470 for (unsigned int i=0; i<numOutputs; ++i)
471 {
472 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
473 }
474 return result;
475}
476
Derek Lamberti8ddae332019-02-21 16:29:43 +0000477void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000478{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000479 CHECK_LAYERS(graph, 0, layerIndex);
480 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000481 throw ParseException(
482 boost::str(
483 boost::format("Layer not supported. "
484 "layerIndex: %1% "
485 "layerName: %2% / %3%") %
486 layerIndex %
487 layerName %
488 CHECK_LOCATION().AsString()));
489}
490
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000491void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000492{
493 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000494 m_InputBindings.clear();
495 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000496}
497
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000498IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000499{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000500 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000501}
502
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000503IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000504{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000505 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000506}
507
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000508void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000509{
510 delete parser;
511}
512
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000513INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000514{
515 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000516 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
517 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000518}
519
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000520armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000521{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000522 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000523 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
524 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
525 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000526}
527
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000528Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000529{
530 if (binaryContent == nullptr)
531 {
532 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
533 CHECK_LOCATION().AsString()));
534 }
535 flatbuffers::Verifier verifier(binaryContent, len);
536 if (verifier.VerifyBuffer<SerializedGraph>() == false)
537 {
538 throw ParseException(
539 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
540 "flatbuffers format. size:%1% %2%") %
541 len %
542 CHECK_LOCATION().AsString()));
543 }
544 return GetSerializedGraph(binaryContent);
545}
546
Derek Lamberti8ddae332019-02-21 16:29:43 +0000547INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000548{
549 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000550 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000551 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000552 m_GraphConnections.emplace_back(graph->layers()->size());
553 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000554 {
555 if (layer->layer_type() != Layer_InputLayer &&
556 layer->layer_type() != Layer_OutputLayer)
557 {
558 // lookup and call the parser function
559 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000560 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000561 }
562 ++layerIndex;
563 }
564
Derek Lamberti8ddae332019-02-21 16:29:43 +0000565 SetupInputLayers(graph);
566 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000567
568 // establish the connections from the layer outputs to the inputs of the subsequent layers
569 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
570 {
571 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
572 {
573 for (size_t inputSlotIdx = 0;
574 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
575 ++inputSlotIdx)
576 {
577 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
578 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
579 }
580 }
581 }
582
583 return std::move(m_Network);
584}
585
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000586BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000587 const std::string& name) const
588{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000589 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000590 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000591 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000592 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000593 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000594 }
595 }
596 throw ParseException(
597 boost::str(
598 boost::format("No input binding found for layer:%1% / %2%") %
599 name %
600 CHECK_LOCATION().AsString()));
601}
602
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000603BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000604 const std::string& name) const
605{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000606 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000607 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000608 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000609 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000610 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000611 }
612 }
613 throw ParseException(
614 boost::str(
615 boost::format("No output binding found for layer:%1% / %2%") %
616 name %
617 CHECK_LOCATION().AsString()));
618}
619
Derek Lamberti8ddae332019-02-21 16:29:43 +0000620void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000621{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000622 CHECK_GRAPH(graph, 0);
623 auto inputs = GetGraphInputs(graph);
624 m_InputBindings.clear();
625 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000626 for (auto const& input : inputs)
627 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000628 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000629 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000630 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000631
632 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
633 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
634
Derek Lamberti8ddae332019-02-21 16:29:43 +0000635 RegisterOutputSlots(graph, input->index(), layer);
636
637 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
638 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
639 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000640 }
641}
642
Derek Lamberti8ddae332019-02-21 16:29:43 +0000643void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000644{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000645 CHECK_GRAPH(graph, 0);
646 auto outputs = GetGraphOutputs(graph);
647 m_OutputBindings.clear();
648 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000649 for (auto const& output : outputs)
650 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000651 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000652 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000653 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000654
Derek Lamberti8ddae332019-02-21 16:29:43 +0000655 RegisterInputSlots(graph, output->index(), layer);
656
657 auto baseLayer = GetBaseLayer(graph, output->index());
658 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
659 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
660 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
661
662 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
663 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
664 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000665 }
666}
667
Derek Lamberti8ddae332019-02-21 16:29:43 +0000668void Deserializer::RegisterOutputSlots(GraphPtr graph,
669 uint32_t layerIndex,
670 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000671{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000672 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000673 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000674 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000675 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
676 {
677 throw ParseException(
678 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
679 " for layer index: %3% %4%") %
680 parsedLayer->outputSlots()->size() %
681 layer->GetNumOutputSlots() %
682 layerIndex %
683 CHECK_LOCATION().AsString()));
684 }
685
686 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
687 {
688 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
689 RegisterOutputSlotOfConnection(layerIndex, slot);
690 }
691}
692
Derek Lamberti8ddae332019-02-21 16:29:43 +0000693void Deserializer::RegisterInputSlots(GraphPtr graph,
694 uint32_t layerIndex,
695 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000696{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000697 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000698 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000699 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000700 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
701 {
702 throw ParseException(
703 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
704 " for layer index:%3% %4%") %
705 parsedLayer->inputSlots()->size() %
706 layer->GetNumInputSlots() %
707 layerIndex %
708 CHECK_LOCATION().AsString()));
709 }
710
711 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
712 {
713 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
714 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
715 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
716 }
717}
718
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000719void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000720 armnn::IInputSlot* slot)
721{
722 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
723
724 Slots& slots = m_GraphConnections[0][connectionIndex];
725 slots.inputSlots.push_back(slot);
726}
727
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000728void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000729 armnn::IOutputSlot* slot)
730{
731 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
732
733 Slots& slots = m_GraphConnections[0][connectionIndex];
734
735 // assuming there is only one producer for that tensor
736 if (slots.outputSlot != nullptr)
737 {
738 throw ParseException(boost::str(
739 boost::format("Another layer has already registered itself as the producer of "
740 "connection:%1% / %2%") %
741 connectionIndex %
742 CHECK_LOCATION().AsString()));
743 }
744
745 slots.outputSlot = slot;
746}
747
Derek Lamberti8ddae332019-02-21 16:29:43 +0000748void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000749{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000750 CHECK_LAYERS(graph, 0, layerIndex);
751 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000752 CHECK_LOCATION();
753 CHECK_VALID_SIZE(inputs.size(), 1);
754
Derek Lamberti8ddae332019-02-21 16:29:43 +0000755 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000756 CHECK_VALID_SIZE(outputs.size(), 1);
757
Derek Lamberti8ddae332019-02-21 16:29:43 +0000758 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000759 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000760 auto serializerDescriptor = serializerLayer->descriptor();
761
762 armnn::ActivationDescriptor descriptor;
763 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
764 descriptor.m_A = serializerDescriptor->a();
765 descriptor.m_B = serializerDescriptor->b();
766
767 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
768 layerName.c_str());
769 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
770 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
771
Derek Lamberti8ddae332019-02-21 16:29:43 +0000772 RegisterInputSlots(graph, layerIndex, layer);
773 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000774}
775
Derek Lamberti8ddae332019-02-21 16:29:43 +0000776void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000777{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000778 CHECK_LAYERS(graph, 0, layerIndex);
779 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000780 CHECK_LOCATION();
781 CHECK_VALID_SIZE(inputs.size(), 2);
782
Derek Lamberti8ddae332019-02-21 16:29:43 +0000783 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000784 CHECK_VALID_SIZE(outputs.size(), 1);
785
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000786 auto layerName = GetLayerName(graph, layerIndex);
787 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000788
789 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
790 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
791
Derek Lamberti8ddae332019-02-21 16:29:43 +0000792 RegisterInputSlots(graph, layerIndex, layer);
793 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000794}
795
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000796void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
797{
798 CHECK_LAYERS(graph, 0, layerIndex);
799
800 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
801 CHECK_VALID_SIZE(inputs.size(), 1);
802
803 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
804 CHECK_VALID_SIZE(outputs.size(), 1);
805
806 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
807 auto flatBufferCrops = flatBufferDescriptor->crops();
808 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
809
810 if (flatBufferCrops->Length() % 2 != 0)
811 {
812 throw ParseException(boost::str(
813 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
814 }
815
816 std::vector<std::pair<unsigned int, unsigned int>> crops;
817 crops.reserve(flatBufferCrops->Length() / 2);
818 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
819 {
820 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
821 }
822
823 armnn::BatchToSpaceNdDescriptor descriptor;
824 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
825 descriptor.m_BlockShape =
826 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
827 descriptor.m_Crops = crops;
828
829 auto layerName = GetLayerName(graph, layerIndex);
830 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
831
832 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
833 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
834
835 RegisterInputSlots(graph, layerIndex, layer);
836 RegisterOutputSlots(graph, layerIndex, layer);
837}
838
Conor Kennedy76277882019-02-26 08:29:54 +0000839void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
840{
841 CHECK_LAYERS(graph, 0, layerIndex);
842 CHECK_LOCATION();
843
844 auto outputs = GetOutputs(graph, layerIndex);
845 CHECK_VALID_SIZE(outputs.size(), 1);
846
847 auto layerName = GetLayerName(graph, layerIndex);
848
849 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
850 auto serializerInput = serializerLayer->input();
851
852 armnn::ConstTensor input = ToConstTensor(serializerInput);
853
854 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
855
856 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
857 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
858
859 RegisterOutputSlots(graph, layerIndex, layer);
860}
861
Derek Lamberti8ddae332019-02-21 16:29:43 +0000862void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000863{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000864 CHECK_LAYERS(graph, 0, layerIndex);
865 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000866 CHECK_LOCATION();
867 CHECK_VALID_SIZE(inputs.size(), 1);
868
Derek Lamberti8ddae332019-02-21 16:29:43 +0000869 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000870 CHECK_VALID_SIZE(outputs.size(), 1);
871
Derek Lamberti8ddae332019-02-21 16:29:43 +0000872 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000873 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000874 auto serializerDescriptor = serializerLayer->descriptor();
875
876 armnn::Convolution2dDescriptor descriptor;
877 descriptor.m_PadLeft = serializerDescriptor->padLeft();
878 descriptor.m_PadRight = serializerDescriptor->padRight();
879 descriptor.m_PadTop = serializerDescriptor->padTop();
880 descriptor.m_PadBottom = serializerDescriptor->padBottom();
881 descriptor.m_StrideX = serializerDescriptor->strideX();
882 descriptor.m_StrideY = serializerDescriptor->strideY();;
883 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
884 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
885
886 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
887 armnn::ConstTensor biases;
888
889 if (descriptor.m_BiasEnabled)
890 {
891 biases = ToConstTensor(serializerLayer->biases());
892 }
893 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
894 weights,
895 biases,
896 layerName.c_str());
897 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
898 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
899
Derek Lamberti8ddae332019-02-21 16:29:43 +0000900 RegisterInputSlots(graph, layerIndex, layer);
901 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000902}
903
Derek Lamberti8ddae332019-02-21 16:29:43 +0000904void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000905{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000906 CHECK_LAYERS(graph, 0, layerIndex);
907 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000908 CHECK_LOCATION();
909 CHECK_VALID_SIZE(inputs.size(), 1);
910
Derek Lamberti8ddae332019-02-21 16:29:43 +0000911 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000912 CHECK_VALID_SIZE(outputs.size(), 1);
913
Derek Lamberti8ddae332019-02-21 16:29:43 +0000914 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000915 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000916 auto serializerDescriptor = serializerLayer->descriptor();
917
918 armnn::DepthwiseConvolution2dDescriptor descriptor;
919 descriptor.m_PadLeft = serializerDescriptor->padLeft();
920 descriptor.m_PadRight = serializerDescriptor->padRight();
921 descriptor.m_PadTop = serializerDescriptor->padTop();
922 descriptor.m_PadBottom = serializerDescriptor->padBottom();
923 descriptor.m_StrideX = serializerDescriptor->strideX();
924 descriptor.m_StrideY = serializerDescriptor->strideY();;
925 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
926 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
927
928 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
929 armnn::ConstTensor biases;
930
931 if (descriptor.m_BiasEnabled)
932 {
933 biases = ToConstTensor(serializerLayer->biases());
934 }
935 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
936 weights,
937 biases,
938 layerName.c_str());
939
940 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
941 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
942
Derek Lamberti8ddae332019-02-21 16:29:43 +0000943 RegisterInputSlots(graph, layerIndex, layer);
944 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000945}
946
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000947void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
948{
949 CHECK_LAYERS(graph, 0, layerIndex);
950 auto inputs = GetInputs(graph, layerIndex);
951 CHECK_LOCATION();
952 CHECK_VALID_SIZE(inputs.size(), 2);
953
954 auto outputs = GetOutputs(graph, layerIndex);
955 CHECK_VALID_SIZE(outputs.size(), 1);
956
957 auto layerName = GetLayerName(graph, layerIndex);
958 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
959
960 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
961 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
962
963 RegisterInputSlots(graph, layerIndex, layer);
964 RegisterOutputSlots(graph, layerIndex, layer);
965}
966
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000967void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
968{
969 CHECK_LAYERS(graph, 0, layerIndex);
970 auto inputs = GetInputs(graph, layerIndex);
971 CHECK_LOCATION();
972 CHECK_VALID_SIZE(inputs.size(), 2);
973
974 auto outputs = GetOutputs(graph, layerIndex);
975 CHECK_VALID_SIZE(outputs.size(), 1);
976
977 auto layerName = GetLayerName(graph, layerIndex);
978 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
979
980 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
981 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
982
983 RegisterInputSlots(graph, layerIndex, layer);
984 RegisterOutputSlots(graph, layerIndex, layer);
985}
986
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000987void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
988{
989 CHECK_LAYERS(graph, 0, layerIndex);
990 auto inputs = GetInputs(graph, layerIndex);
991 CHECK_LOCATION();
992 CHECK_VALID_SIZE(inputs.size(), 2);
993
994 auto outputs = GetOutputs(graph, layerIndex);
995 CHECK_VALID_SIZE(outputs.size(), 1);
996
997 auto layerName = GetLayerName(graph, layerIndex);
998 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
999
1000 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1001 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1002
1003 RegisterInputSlots(graph, layerIndex, layer);
1004 RegisterOutputSlots(graph, layerIndex, layer);
1005}
1006
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001007void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1008{
1009 CHECK_LAYERS(graph, 0, layerIndex);
1010 auto inputs = GetInputs(graph, layerIndex);
1011 CHECK_LOCATION();
1012 CHECK_VALID_SIZE(inputs.size(), 2);
1013
1014 auto outputs = GetOutputs(graph, layerIndex);
1015 CHECK_VALID_SIZE(outputs.size(), 1);
1016
1017 auto layerName = GetLayerName(graph, layerIndex);
1018 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1019
1020 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1021 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1022
1023 RegisterInputSlots(graph, layerIndex, layer);
1024 RegisterOutputSlots(graph, layerIndex, layer);
1025}
1026
Derek Lamberti8ddae332019-02-21 16:29:43 +00001027void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001028{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001029 CHECK_LAYERS(graph, 0, layerIndex);
1030 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001031 CHECK_LOCATION();
1032 CHECK_VALID_SIZE(inputs.size(), 2);
1033
Derek Lamberti8ddae332019-02-21 16:29:43 +00001034 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001035 CHECK_VALID_SIZE(outputs.size(), 1);
1036
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001037 auto layerName = GetLayerName(graph, layerIndex);
1038 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001039
1040 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1041 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1042
Derek Lamberti8ddae332019-02-21 16:29:43 +00001043 RegisterInputSlots(graph, layerIndex, layer);
1044 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001045}
1046
Derek Lamberti8ddae332019-02-21 16:29:43 +00001047void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001048{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001049 CHECK_LAYERS(graph, 0, layerIndex);
1050 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001051 CHECK_LOCATION();
1052 CHECK_VALID_SIZE(inputs.size(), 1);
1053
Derek Lamberti8ddae332019-02-21 16:29:43 +00001054 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001055 CHECK_VALID_SIZE(outputs.size(), 1);
1056
Derek Lamberti8ddae332019-02-21 16:29:43 +00001057 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001058 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001059 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1060
1061 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1062 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1063 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1064
1065 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1066
1067 armnn::IConnectableLayer* layer;
1068 if (flatBufferDescriptor->biasEnabled())
1069 {
1070 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1071 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1072 weightsTensor,
1073 biasTensorData,
1074 layerName.c_str());
1075 }
1076 else
1077 {
1078 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1079 weightsTensor,
1080 layerName.c_str());
1081 }
1082
1083 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1084 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1085
Derek Lamberti8ddae332019-02-21 16:29:43 +00001086 RegisterInputSlots(graph, layerIndex, layer);
1087 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001088}
1089
Derek Lamberti8ddae332019-02-21 16:29:43 +00001090void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001091{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001092 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001093
1094 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001095 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001096
Derek Lamberti8ddae332019-02-21 16:29:43 +00001097 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001098 CHECK_VALID_SIZE(inputs.size(), 1);
1099
Derek Lamberti8ddae332019-02-21 16:29:43 +00001100 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001101 CHECK_VALID_SIZE(outputs.size(), 1);
1102 auto outputInfo = ToTensorInfo(outputs[0]);
1103
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001104 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001105 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1106
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001107 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001108 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1109
Derek Lamberti8ddae332019-02-21 16:29:43 +00001110 RegisterInputSlots(graph, layerIndex, layer);
1111 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001112}
1113
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001114armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001115 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001116{
1117 armnn::Pooling2dDescriptor desc;
1118
1119 switch (pooling2dDesc->poolType())
1120 {
1121 case PoolingAlgorithm_Average:
1122 {
1123 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001124 break;
1125 }
1126 case PoolingAlgorithm_Max:
1127 {
1128 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001129 break;
1130 }
1131 default:
1132 {
1133 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1134 }
1135 }
1136
1137 switch (pooling2dDesc->outputShapeRounding())
1138 {
1139 case OutputShapeRounding_Floor:
1140 {
1141 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1142 break;
1143 }
1144 case OutputShapeRounding_Ceiling:
1145 {
1146 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1147 break;
1148 }
1149 default:
1150 {
1151 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1152 }
1153 }
1154
1155 switch (pooling2dDesc->paddingMethod())
1156 {
1157 case PaddingMethod_Exclude:
1158 {
1159 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1160 break;
1161 }
1162 case PaddingMethod_IgnoreValue:
1163 {
1164 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1165 break;
1166 }
1167 default:
1168 {
1169 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1170 }
1171 }
1172
1173 switch (pooling2dDesc->dataLayout())
1174 {
1175 case DataLayout_NCHW:
1176 {
1177 desc.m_DataLayout = armnn::DataLayout::NCHW;
1178 break;
1179 }
1180 case DataLayout_NHWC:
1181 {
1182 desc.m_DataLayout = armnn::DataLayout::NHWC;
1183 break;
1184 }
1185 default:
1186 {
1187 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1188 }
1189 }
1190
1191 desc.m_PadRight = pooling2dDesc->padRight();
1192 desc.m_PadLeft = pooling2dDesc->padLeft();
1193 desc.m_PadBottom = pooling2dDesc->padBottom();
1194 desc.m_PadTop = pooling2dDesc->padTop();
1195 desc.m_StrideX = pooling2dDesc->strideX();
1196 desc.m_StrideY = pooling2dDesc->strideY();
1197 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1198 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1199
1200 return desc;
1201}
1202
Derek Lamberti8ddae332019-02-21 16:29:43 +00001203void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001204{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001205 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001206
Derek Lamberti8ddae332019-02-21 16:29:43 +00001207 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001208 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001209 CHECK_VALID_SIZE(inputs.size(), 1);
1210
Derek Lamberti8ddae332019-02-21 16:29:43 +00001211 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001212 CHECK_VALID_SIZE(outputs.size(), 1);
1213 auto outputInfo = ToTensorInfo(outputs[0]);
1214
1215 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001216 auto layerName = GetLayerName(graph, layerIndex);
1217 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001218 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1219
Derek Lamberti8ddae332019-02-21 16:29:43 +00001220 RegisterInputSlots(graph, layerIndex, layer);
1221 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001222}
1223
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001224armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001225 const std::vector<uint32_t>& targetDimsIn)
1226{
1227 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1228 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1229
1230 if (stretchDim != targetDimsIn.end())
1231 {
1232 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1233 {
1234 throw ParseException(boost::str(
1235 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1236 }
1237
1238 auto targetNumElements =
1239 boost::numeric_cast<unsigned int>(
1240 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1241
1242 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1243 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1244 }
1245
1246 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1247
1248 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1249 reshapeInfo.SetShape(outputShape);
1250
1251 return reshapeInfo;
1252}
1253
Derek Lamberti8ddae332019-02-21 16:29:43 +00001254void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001255{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001256 CHECK_LAYERS(graph, 0, layerIndex);
1257 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001258
Derek Lamberti8ddae332019-02-21 16:29:43 +00001259 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001260 CHECK_VALID_SIZE(outputs.size(), 1);
1261
1262 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1263 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1264
Derek Lamberti8ddae332019-02-21 16:29:43 +00001265 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001266 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1267
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001268 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001269 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1270
1271 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1272 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1273
1274 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1275 {
1276 std::stringstream ss;
1277 ss << "New shape defined in reshape parameters "
1278 << reshapeOutputTensorShape
1279 << " does not equal output shape "
1280 << actualOutputTensorInfo.GetShape()
1281 << ": "
1282 << CHECK_LOCATION().AsString();
1283 throw ParseException(ss.str());
1284 }
1285
1286 armnn::ReshapeDescriptor reshapeDesc;
1287 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1288
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001289 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001290 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1291 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1292
Derek Lamberti8ddae332019-02-21 16:29:43 +00001293 RegisterInputSlots(graph, layerIndex, layer);
1294 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001295}
1296
Derek Lamberti8ddae332019-02-21 16:29:43 +00001297void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001298{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001299 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001300
Derek Lamberti8ddae332019-02-21 16:29:43 +00001301 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001302 CHECK_VALID_SIZE(inputs.size(), 1);
1303
Derek Lamberti8ddae332019-02-21 16:29:43 +00001304 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001305 CHECK_VALID_SIZE(outputs.size(), 1);
1306
1307 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001308 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001309 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001310
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001311 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1312
1313 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1314 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1315
Derek Lamberti8ddae332019-02-21 16:29:43 +00001316 RegisterInputSlots(graph, layerIndex, layer);
1317 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001318}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001319
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001320void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1321{
1322 CHECK_LAYERS(graph, 0, layerIndex);
1323
1324 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1325 CHECK_VALID_SIZE(inputs.size(), 1);
1326
1327 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1328 CHECK_VALID_SIZE(outputs.size(), 1);
1329
1330 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1331 auto flatBufferPadList = flatBufferDescriptor->padList();
1332 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1333
1334 if (flatBufferPadList->Length() % 2 != 0)
1335 {
1336 throw ParseException(boost::str(
1337 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1338 }
1339
1340 std::vector<std::pair<unsigned int, unsigned int>> padList;
1341 padList.reserve(flatBufferPadList->Length() / 2);
1342 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1343 {
1344 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1345 }
1346
1347 armnn::SpaceToBatchNdDescriptor descriptor;
1348 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1349 descriptor.m_BlockShape =
1350 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1351 descriptor.m_PadList = padList;
1352
1353 auto layerName = GetLayerName(graph, layerIndex);
1354 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1355
1356 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1357 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1358
1359 RegisterInputSlots(graph, layerIndex, layer);
1360 RegisterOutputSlots(graph, layerIndex, layer);
1361}
1362
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001363} // namespace armnnDeserializer