blob: aa7454339e5fc013856741e28ec8b6ffddccf7de [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000191 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Conor Kennedy76277882019-02-26 08:29:54 +0000192 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000193 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
194 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000195 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000196 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000197 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000198 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000199 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000200 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000201 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000202 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000203 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000204 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000205 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000206 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
207 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000208 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000209 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000210 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000211 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Kevin May43a799c2019-02-08 16:31:42 +0000212}
213
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000214Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000215{
216 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
217
218 switch(layerType)
219 {
Mike Kellyaf484012019-02-20 16:53:11 +0000220 case Layer::Layer_ActivationLayer:
221 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000222 case Layer::Layer_AdditionLayer:
223 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000224 case Layer::Layer_BatchToSpaceNdLayer:
225 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000226 case Layer::Layer_BatchNormalizationLayer:
227 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000228 case Layer::Layer_ConstantLayer:
229 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000230 case Layer::Layer_Convolution2dLayer:
231 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000232 case Layer::Layer_DepthwiseConvolution2dLayer:
233 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000234 case Layer::Layer_DivisionLayer:
235 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000236 case Layer::Layer_EqualLayer:
237 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000238 case Layer::Layer_FullyConnectedLayer:
239 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000240 case Layer::Layer_FloorLayer:
241 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000242 case Layer::Layer_GreaterLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000244 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000245 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
246 case Layer::Layer_MinimumLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000248 case Layer::Layer_MaximumLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000250 case Layer::Layer_MultiplicationLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000252 case Layer::Layer_NormalizationLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000254 case Layer::Layer_OutputLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000256 case Layer::Layer_PadLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000258 case Layer::Layer_PermuteLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000260 case Layer::Layer_Pooling2dLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000262 case Layer::Layer_ReshapeLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000264 case Layer::Layer_ResizeBilinearLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000266 case Layer::Layer_RsqrtLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000268 case Layer::Layer_SoftmaxLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000270 case Layer::Layer_SpaceToBatchNdLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000272 case Layer::Layer_NONE:
273 default:
274 throw ParseException(boost::str(
275 boost::format("Layer must have a type %1%") %
276 Layer::Layer_NONE));
277 }
278}
279
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000280std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
281{
282 auto layer = GetBaseLayer(graph, index);
283 assert(layer);
284 return layer->layerName()->str();
285}
286
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000287int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000288{
289 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
290
291 if (layerType == Layer::Layer_InputLayer)
292 {
293 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
294 }
295 else if ( layerType == Layer::Layer_OutputLayer )
296 {
297 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
298 }
299 return 0;
300}
301
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000302armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000303{
304 switch (dataLayout)
305 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000306 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000307 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000308 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000309 default:
310 return armnn::DataLayout::NCHW;
311 }
312}
313
Mike Kellyaf484012019-02-20 16:53:11 +0000314armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
315{
316 switch (function)
317 {
318 case armnnSerializer::ActivationFunction_Sigmoid:
319 return armnn::ActivationFunction::Sigmoid;
320 case armnnSerializer::ActivationFunction_TanH:
321 return armnn::ActivationFunction::TanH;
322 case armnnSerializer::ActivationFunction_Linear:
323 return armnn::ActivationFunction::Linear;
324 case armnnSerializer::ActivationFunction_ReLu:
325 return armnn::ActivationFunction::ReLu;
326 case armnnSerializer::ActivationFunction_BoundedReLu:
327 return armnn::ActivationFunction::BoundedReLu;
328 case armnnSerializer::ActivationFunction_LeakyReLu:
329 return armnn::ActivationFunction::LeakyReLu;
330 case armnnSerializer::ActivationFunction_Abs:
331 return armnn::ActivationFunction::Abs;
332 case armnnSerializer::ActivationFunction_Sqrt:
333 return armnn::ActivationFunction::Sqrt;
334 case armnnSerializer::ActivationFunction_Square:
335 return armnn::ActivationFunction::Square;
336 default:
337 return armnn::ActivationFunction::Sigmoid;
338 }
339}
340
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000341armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000342{
343 armnn::DataType type;
344 CHECK_TENSOR_PTR(tensorPtr);
345
346 switch (tensorPtr->dataType())
347 {
348 case DataType_QuantisedAsymm8:
349 type = armnn::DataType::QuantisedAsymm8;
350 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000351 case DataType_Signed32:
352 type = armnn::DataType::Signed32;
353 break;
Kevin May43a799c2019-02-08 16:31:42 +0000354 case DataType_Float32:
355 type = armnn::DataType::Float32;
356 break;
357 case DataType_Float16:
358 type = armnn::DataType::Float16;
359 break;
360 case DataType_Boolean:
361 type = armnn::DataType::Boolean;
362 break;
363 default:
364 {
365 CheckLocation location = CHECK_LOCATION();
366 throw ParseException(
367 boost::str(
368 boost::format("Unsupported data type %1% = %2%. %3%") %
369 tensorPtr->dataType() %
370 EnumNameDataType(tensorPtr->dataType()) %
371 location.AsString()));
372 }
373 }
374 float quantizationScale = tensorPtr->quantizationScale();
375 int32_t quantizationOffset = tensorPtr->quantizationOffset();
376
377 auto dimensions = tensorPtr->dimensions();
378 unsigned int size = dimensions->size();
379 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
380
381 // two statements (on purpose) for easier debugging:
382 armnn::TensorInfo result(size,
383 outputDims.data(),
384 type,
385 quantizationScale,
386 quantizationOffset);
387 return result;
388}
389
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000390armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000391{
392 CHECK_CONST_TENSOR_PTR(constTensorPtr);
393 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
394
395 switch (constTensorPtr->data_type())
396 {
397 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000398 {
399 auto byteData = constTensorPtr->data_as_ByteData()->data();
400 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
401 return armnn::ConstTensor(tensorInfo, byteData->data());
402 }
Mike Kellya0766c32019-02-19 17:22:07 +0000403 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000404 {
405 auto shortData = constTensorPtr->data_as_ShortData()->data();
406 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
407 return armnn::ConstTensor(tensorInfo, shortData->data());
408 }
Mike Kellya0766c32019-02-19 17:22:07 +0000409 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000410 {
411 auto intData = constTensorPtr->data_as_IntData()->data();
412 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
413 return armnn::ConstTensor(tensorInfo, intData->data());
414 }
Mike Kellya0766c32019-02-19 17:22:07 +0000415 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000416 {
417 auto longData = constTensorPtr->data_as_LongData()->data();
418 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
419 return armnn::ConstTensor(tensorInfo, longData->data());
420 }
Mike Kellya0766c32019-02-19 17:22:07 +0000421 default:
422 {
423 CheckLocation location = CHECK_LOCATION();
424 throw ParseException(
425 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
426 constTensorPtr->data_type() %
427 EnumNameConstTensorData(constTensorPtr->data_type()) %
428 location.AsString()));
429 }
430 }
431}
432
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000433Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000434{
435
436 CHECK_GRAPH(graphPtr, 0);
437 const auto& numInputs = graphPtr->inputIds()->size();
438
439 LayerBaseRawPtrVector result(numInputs);
440
441 for (unsigned int i=0; i<numInputs; ++i)
442 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000443 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000444 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
445 }
446 return result;
447}
448
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000449Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000450{
451 CHECK_GRAPH(graphPtr, 0);
452 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000453 LayerBaseRawPtrVector result(numOutputs);
454
455 for (unsigned int i=0; i<numOutputs; ++i)
456 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000457 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000458
Kevin May43a799c2019-02-08 16:31:42 +0000459 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
460 }
461 return result;
462}
463
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000464Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000465 unsigned int layerIndex)
466{
467 CHECK_LAYERS(graphPtr, 0, layerIndex);
468 auto layer = GetBaseLayer(graphPtr, layerIndex);
469 const auto& numInputs = layer->inputSlots()->size();
470
471 TensorRawPtrVector result(numInputs);
472
473 for (unsigned int i=0; i<numInputs; ++i)
474 {
475 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
476 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
477 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
478 }
479 return result;
480}
481
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000482Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000483 unsigned int layerIndex)
484{
485 CHECK_LAYERS(graphPtr, 0, layerIndex);
486 auto layer = GetBaseLayer(graphPtr, layerIndex);
487 const auto& numOutputs = layer->outputSlots()->size();
488
489 TensorRawPtrVector result(numOutputs);
490
491 for (unsigned int i=0; i<numOutputs; ++i)
492 {
493 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
494 }
495 return result;
496}
497
Derek Lamberti8ddae332019-02-21 16:29:43 +0000498void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000499{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000500 CHECK_LAYERS(graph, 0, layerIndex);
501 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000502 throw ParseException(
503 boost::str(
504 boost::format("Layer not supported. "
505 "layerIndex: %1% "
506 "layerName: %2% / %3%") %
507 layerIndex %
508 layerName %
509 CHECK_LOCATION().AsString()));
510}
511
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000512void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000513{
514 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000515 m_InputBindings.clear();
516 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000517}
518
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000519IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000520{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000521 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000522}
523
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000524IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000525{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000526 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000527}
528
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000529void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000530{
531 delete parser;
532}
533
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000534INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000535{
536 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000537 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
538 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000539}
540
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000541armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000542{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000543 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000544 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
545 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
546 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000547}
548
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000549Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000550{
551 if (binaryContent == nullptr)
552 {
553 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
554 CHECK_LOCATION().AsString()));
555 }
556 flatbuffers::Verifier verifier(binaryContent, len);
557 if (verifier.VerifyBuffer<SerializedGraph>() == false)
558 {
559 throw ParseException(
560 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
561 "flatbuffers format. size:%1% %2%") %
562 len %
563 CHECK_LOCATION().AsString()));
564 }
565 return GetSerializedGraph(binaryContent);
566}
567
Derek Lamberti8ddae332019-02-21 16:29:43 +0000568INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000569{
570 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000571 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000572 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000573 m_GraphConnections.emplace_back(graph->layers()->size());
574 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000575 {
576 if (layer->layer_type() != Layer_InputLayer &&
577 layer->layer_type() != Layer_OutputLayer)
578 {
579 // lookup and call the parser function
580 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000581 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000582 }
583 ++layerIndex;
584 }
585
Derek Lamberti8ddae332019-02-21 16:29:43 +0000586 SetupInputLayers(graph);
587 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000588
589 // establish the connections from the layer outputs to the inputs of the subsequent layers
590 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
591 {
592 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
593 {
594 for (size_t inputSlotIdx = 0;
595 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
596 ++inputSlotIdx)
597 {
598 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
599 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
600 }
601 }
602 }
603
604 return std::move(m_Network);
605}
606
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000607BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000608 const std::string& name) const
609{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000610 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000611 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000612 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000613 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000614 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000615 }
616 }
617 throw ParseException(
618 boost::str(
619 boost::format("No input binding found for layer:%1% / %2%") %
620 name %
621 CHECK_LOCATION().AsString()));
622}
623
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000624BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000625 const std::string& name) const
626{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000627 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000628 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000629 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000630 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000631 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000632 }
633 }
634 throw ParseException(
635 boost::str(
636 boost::format("No output binding found for layer:%1% / %2%") %
637 name %
638 CHECK_LOCATION().AsString()));
639}
640
Derek Lamberti8ddae332019-02-21 16:29:43 +0000641void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000642{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000643 CHECK_GRAPH(graph, 0);
644 auto inputs = GetGraphInputs(graph);
645 m_InputBindings.clear();
646 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000647 for (auto const& input : inputs)
648 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000649 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000650 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000651 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000652
653 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
654 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
655
Derek Lamberti8ddae332019-02-21 16:29:43 +0000656 RegisterOutputSlots(graph, input->index(), layer);
657
658 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
659 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
660 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000661 }
662}
663
Derek Lamberti8ddae332019-02-21 16:29:43 +0000664void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000665{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000666 CHECK_GRAPH(graph, 0);
667 auto outputs = GetGraphOutputs(graph);
668 m_OutputBindings.clear();
669 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000670 for (auto const& output : outputs)
671 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000672 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000673 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000674 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000675
Derek Lamberti8ddae332019-02-21 16:29:43 +0000676 RegisterInputSlots(graph, output->index(), layer);
677
678 auto baseLayer = GetBaseLayer(graph, output->index());
679 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
680 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
681 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
682
683 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
684 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
685 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000686 }
687}
688
Derek Lamberti8ddae332019-02-21 16:29:43 +0000689void Deserializer::RegisterOutputSlots(GraphPtr graph,
690 uint32_t layerIndex,
691 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000692{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000693 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000694 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000695 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000696 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
697 {
698 throw ParseException(
699 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
700 " for layer index: %3% %4%") %
701 parsedLayer->outputSlots()->size() %
702 layer->GetNumOutputSlots() %
703 layerIndex %
704 CHECK_LOCATION().AsString()));
705 }
706
707 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
708 {
709 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
710 RegisterOutputSlotOfConnection(layerIndex, slot);
711 }
712}
713
Derek Lamberti8ddae332019-02-21 16:29:43 +0000714void Deserializer::RegisterInputSlots(GraphPtr graph,
715 uint32_t layerIndex,
716 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000717{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000718 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000719 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000720 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000721 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
722 {
723 throw ParseException(
724 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
725 " for layer index:%3% %4%") %
726 parsedLayer->inputSlots()->size() %
727 layer->GetNumInputSlots() %
728 layerIndex %
729 CHECK_LOCATION().AsString()));
730 }
731
732 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
733 {
734 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
735 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
736 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
737 }
738}
739
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000740void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000741 armnn::IInputSlot* slot)
742{
743 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
744
745 Slots& slots = m_GraphConnections[0][connectionIndex];
746 slots.inputSlots.push_back(slot);
747}
748
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000749void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000750 armnn::IOutputSlot* slot)
751{
752 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
753
754 Slots& slots = m_GraphConnections[0][connectionIndex];
755
756 // assuming there is only one producer for that tensor
757 if (slots.outputSlot != nullptr)
758 {
759 throw ParseException(boost::str(
760 boost::format("Another layer has already registered itself as the producer of "
761 "connection:%1% / %2%") %
762 connectionIndex %
763 CHECK_LOCATION().AsString()));
764 }
765
766 slots.outputSlot = slot;
767}
768
Derek Lamberti8ddae332019-02-21 16:29:43 +0000769void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000770{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000771 CHECK_LAYERS(graph, 0, layerIndex);
772 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000773 CHECK_LOCATION();
774 CHECK_VALID_SIZE(inputs.size(), 1);
775
Derek Lamberti8ddae332019-02-21 16:29:43 +0000776 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000777 CHECK_VALID_SIZE(outputs.size(), 1);
778
Derek Lamberti8ddae332019-02-21 16:29:43 +0000779 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000780 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000781 auto serializerDescriptor = serializerLayer->descriptor();
782
783 armnn::ActivationDescriptor descriptor;
784 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
785 descriptor.m_A = serializerDescriptor->a();
786 descriptor.m_B = serializerDescriptor->b();
787
788 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
789 layerName.c_str());
790 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
791 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
792
Derek Lamberti8ddae332019-02-21 16:29:43 +0000793 RegisterInputSlots(graph, layerIndex, layer);
794 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000795}
796
Derek Lamberti8ddae332019-02-21 16:29:43 +0000797void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000798{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000799 CHECK_LAYERS(graph, 0, layerIndex);
800 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000801 CHECK_LOCATION();
802 CHECK_VALID_SIZE(inputs.size(), 2);
803
Derek Lamberti8ddae332019-02-21 16:29:43 +0000804 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000805 CHECK_VALID_SIZE(outputs.size(), 1);
806
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000807 auto layerName = GetLayerName(graph, layerIndex);
808 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000809
810 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
811 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
812
Derek Lamberti8ddae332019-02-21 16:29:43 +0000813 RegisterInputSlots(graph, layerIndex, layer);
814 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000815}
816
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000817void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
818{
819 CHECK_LAYERS(graph, 0, layerIndex);
820
821 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
822 CHECK_VALID_SIZE(inputs.size(), 1);
823
824 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
825 CHECK_VALID_SIZE(outputs.size(), 1);
826
827 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
828 auto flatBufferCrops = flatBufferDescriptor->crops();
829 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
830
831 if (flatBufferCrops->Length() % 2 != 0)
832 {
833 throw ParseException(boost::str(
834 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
835 }
836
837 std::vector<std::pair<unsigned int, unsigned int>> crops;
838 crops.reserve(flatBufferCrops->Length() / 2);
839 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
840 {
841 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
842 }
843
844 armnn::BatchToSpaceNdDescriptor descriptor;
845 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
846 descriptor.m_BlockShape =
847 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
848 descriptor.m_Crops = crops;
849
850 auto layerName = GetLayerName(graph, layerIndex);
851 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
852
853 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
854 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
855
856 RegisterInputSlots(graph, layerIndex, layer);
857 RegisterOutputSlots(graph, layerIndex, layer);
858}
859
ruoyan018e7fa232019-02-28 15:09:07 +0000860void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
861{
862 CHECK_LAYERS(graph, 0, layerIndex);
863
864 auto inputs = GetInputs(graph, layerIndex);
865 CHECK_VALID_SIZE(inputs.size(), 1);
866
867 auto outputs = GetOutputs(graph, layerIndex);
868 CHECK_VALID_SIZE(outputs.size(), 1);
869 auto outputInfo = ToTensorInfo(outputs[0]);
870
871 auto layerName = boost::str(boost::format("BatchNormalization:%1%") % layerIndex);
872
873 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
874 auto serializerDescriptor = serializerLayer->descriptor();
875
876 armnn::BatchNormalizationDescriptor descriptor;
877 descriptor.m_Eps = serializerDescriptor->eps();
878 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
879
880 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
881 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
882 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
883 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
884
885 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
886 mean,
887 variance,
888 beta,
889 gamma,
890 layerName.c_str());
891 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
892
893 RegisterInputSlots(graph, layerIndex, layer);
894 RegisterOutputSlots(graph, layerIndex, layer);
895}
896
Conor Kennedy76277882019-02-26 08:29:54 +0000897void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
898{
899 CHECK_LAYERS(graph, 0, layerIndex);
900 CHECK_LOCATION();
901
902 auto outputs = GetOutputs(graph, layerIndex);
903 CHECK_VALID_SIZE(outputs.size(), 1);
904
905 auto layerName = GetLayerName(graph, layerIndex);
906
907 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
908 auto serializerInput = serializerLayer->input();
909
910 armnn::ConstTensor input = ToConstTensor(serializerInput);
911
912 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
913
914 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
915 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
916
917 RegisterOutputSlots(graph, layerIndex, layer);
918}
919
Derek Lamberti8ddae332019-02-21 16:29:43 +0000920void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000921{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000922 CHECK_LAYERS(graph, 0, layerIndex);
923 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000924 CHECK_LOCATION();
925 CHECK_VALID_SIZE(inputs.size(), 1);
926
Derek Lamberti8ddae332019-02-21 16:29:43 +0000927 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000928 CHECK_VALID_SIZE(outputs.size(), 1);
929
Derek Lamberti8ddae332019-02-21 16:29:43 +0000930 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000931 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000932 auto serializerDescriptor = serializerLayer->descriptor();
933
934 armnn::Convolution2dDescriptor descriptor;
935 descriptor.m_PadLeft = serializerDescriptor->padLeft();
936 descriptor.m_PadRight = serializerDescriptor->padRight();
937 descriptor.m_PadTop = serializerDescriptor->padTop();
938 descriptor.m_PadBottom = serializerDescriptor->padBottom();
939 descriptor.m_StrideX = serializerDescriptor->strideX();
940 descriptor.m_StrideY = serializerDescriptor->strideY();;
941 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
942 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
943
944 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
945 armnn::ConstTensor biases;
946
947 if (descriptor.m_BiasEnabled)
948 {
949 biases = ToConstTensor(serializerLayer->biases());
950 }
951 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
952 weights,
953 biases,
954 layerName.c_str());
955 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
956 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
957
Derek Lamberti8ddae332019-02-21 16:29:43 +0000958 RegisterInputSlots(graph, layerIndex, layer);
959 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000960}
961
Derek Lamberti8ddae332019-02-21 16:29:43 +0000962void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000963{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000964 CHECK_LAYERS(graph, 0, layerIndex);
965 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000966 CHECK_LOCATION();
967 CHECK_VALID_SIZE(inputs.size(), 1);
968
Derek Lamberti8ddae332019-02-21 16:29:43 +0000969 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000970 CHECK_VALID_SIZE(outputs.size(), 1);
971
Derek Lamberti8ddae332019-02-21 16:29:43 +0000972 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000973 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000974 auto serializerDescriptor = serializerLayer->descriptor();
975
976 armnn::DepthwiseConvolution2dDescriptor descriptor;
977 descriptor.m_PadLeft = serializerDescriptor->padLeft();
978 descriptor.m_PadRight = serializerDescriptor->padRight();
979 descriptor.m_PadTop = serializerDescriptor->padTop();
980 descriptor.m_PadBottom = serializerDescriptor->padBottom();
981 descriptor.m_StrideX = serializerDescriptor->strideX();
982 descriptor.m_StrideY = serializerDescriptor->strideY();;
983 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
984 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
985
986 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
987 armnn::ConstTensor biases;
988
989 if (descriptor.m_BiasEnabled)
990 {
991 biases = ToConstTensor(serializerLayer->biases());
992 }
993 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
994 weights,
995 biases,
996 layerName.c_str());
997
998 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
999 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1000
Derek Lamberti8ddae332019-02-21 16:29:43 +00001001 RegisterInputSlots(graph, layerIndex, layer);
1002 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001003}
1004
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001005void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1006{
1007 CHECK_LAYERS(graph, 0, layerIndex);
1008 auto inputs = GetInputs(graph, layerIndex);
1009 CHECK_LOCATION();
1010 CHECK_VALID_SIZE(inputs.size(), 2);
1011
1012 auto outputs = GetOutputs(graph, layerIndex);
1013 CHECK_VALID_SIZE(outputs.size(), 1);
1014
1015 auto layerName = GetLayerName(graph, layerIndex);
1016 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1017
1018 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1019 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1020
1021 RegisterInputSlots(graph, layerIndex, layer);
1022 RegisterOutputSlots(graph, layerIndex, layer);
1023}
1024
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001025void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1026{
1027 CHECK_LAYERS(graph, 0, layerIndex);
1028 auto inputs = GetInputs(graph, layerIndex);
1029 CHECK_LOCATION();
1030 CHECK_VALID_SIZE(inputs.size(), 2);
1031
1032 auto outputs = GetOutputs(graph, layerIndex);
1033 CHECK_VALID_SIZE(outputs.size(), 1);
1034
1035 auto layerName = GetLayerName(graph, layerIndex);
1036 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1037
1038 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1039 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1040
1041 RegisterInputSlots(graph, layerIndex, layer);
1042 RegisterOutputSlots(graph, layerIndex, layer);
1043}
1044
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001045void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1046{
1047 CHECK_LAYERS(graph, 0, layerIndex);
1048 auto inputs = GetInputs(graph, layerIndex);
1049 CHECK_LOCATION();
1050 CHECK_VALID_SIZE(inputs.size(), 2);
1051
1052 auto outputs = GetOutputs(graph, layerIndex);
1053 CHECK_VALID_SIZE(outputs.size(), 1);
1054
1055 auto layerName = GetLayerName(graph, layerIndex);
1056 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1057
1058 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1059 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1060
1061 RegisterInputSlots(graph, layerIndex, layer);
1062 RegisterOutputSlots(graph, layerIndex, layer);
1063}
1064
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001065void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1066{
1067 CHECK_LAYERS(graph, 0, layerIndex);
1068 auto inputs = GetInputs(graph, layerIndex);
1069 CHECK_LOCATION();
1070 CHECK_VALID_SIZE(inputs.size(), 2);
1071
1072 auto outputs = GetOutputs(graph, layerIndex);
1073 CHECK_VALID_SIZE(outputs.size(), 1);
1074
1075 auto layerName = GetLayerName(graph, layerIndex);
1076 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1077
1078 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1079 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1080
1081 RegisterInputSlots(graph, layerIndex, layer);
1082 RegisterOutputSlots(graph, layerIndex, layer);
1083}
1084
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001085void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1086{
1087 CHECK_LAYERS(graph, 0, layerIndex);
1088 auto inputs = GetInputs(graph, layerIndex);
1089 CHECK_LOCATION();
1090 CHECK_VALID_SIZE(inputs.size(), 2);
1091
1092 auto outputs = GetOutputs(graph, layerIndex);
1093 CHECK_VALID_SIZE(outputs.size(), 1);
1094
1095 auto layerName = GetLayerName(graph, layerIndex);
1096 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1097
1098 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1099 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1100
1101 RegisterInputSlots(graph, layerIndex, layer);
1102 RegisterOutputSlots(graph, layerIndex, layer);
1103}
1104
Derek Lamberti8ddae332019-02-21 16:29:43 +00001105void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001106{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001107 CHECK_LAYERS(graph, 0, layerIndex);
1108 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001109 CHECK_LOCATION();
1110 CHECK_VALID_SIZE(inputs.size(), 2);
1111
Derek Lamberti8ddae332019-02-21 16:29:43 +00001112 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001113 CHECK_VALID_SIZE(outputs.size(), 1);
1114
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001115 auto layerName = GetLayerName(graph, layerIndex);
1116 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001117
1118 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1119 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1120
Derek Lamberti8ddae332019-02-21 16:29:43 +00001121 RegisterInputSlots(graph, layerIndex, layer);
1122 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001123}
1124
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001125void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1126{
1127 CHECK_LAYERS(graph, 0, layerIndex);
1128 CHECK_LOCATION();
1129
1130 auto inputs = GetInputs(graph, layerIndex);
1131 CHECK_VALID_SIZE(inputs.size(), 1);
1132
1133 auto outputs = GetOutputs(graph, layerIndex);
1134 CHECK_VALID_SIZE(outputs.size(), 1);
1135
1136 auto layerName = GetLayerName(graph, layerIndex);
1137
1138 armnn::IConnectableLayer* layer;
1139
1140 layer = m_Network->AddFloorLayer();
1141
1142 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1143 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1144
1145 RegisterInputSlots(graph, layerIndex, layer);
1146 RegisterOutputSlots(graph, layerIndex, layer);
1147}
1148
Derek Lamberti8ddae332019-02-21 16:29:43 +00001149void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001150{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001151 CHECK_LAYERS(graph, 0, layerIndex);
1152 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001153 CHECK_LOCATION();
1154 CHECK_VALID_SIZE(inputs.size(), 1);
1155
Derek Lamberti8ddae332019-02-21 16:29:43 +00001156 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001157 CHECK_VALID_SIZE(outputs.size(), 1);
1158
Derek Lamberti8ddae332019-02-21 16:29:43 +00001159 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001160 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001161 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1162
1163 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1164 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1165 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1166
1167 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1168
1169 armnn::IConnectableLayer* layer;
1170 if (flatBufferDescriptor->biasEnabled())
1171 {
1172 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1173 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1174 weightsTensor,
1175 biasTensorData,
1176 layerName.c_str());
1177 }
1178 else
1179 {
1180 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1181 weightsTensor,
1182 layerName.c_str());
1183 }
1184
1185 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1186 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1187
Derek Lamberti8ddae332019-02-21 16:29:43 +00001188 RegisterInputSlots(graph, layerIndex, layer);
1189 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001190}
1191
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001192void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1193{
1194 CHECK_LAYERS(graph, 0, layerIndex);
1195
1196 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1197 CHECK_VALID_SIZE(inputs.size(), 1);
1198
1199 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1200 CHECK_VALID_SIZE(outputs.size(), 1);
1201
1202 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1203 auto flatBufferPadList = flatBufferDescriptor->padList();
1204
1205 if (flatBufferPadList->Length() % 2 != 0)
1206 {
1207 throw ParseException(boost::str(
1208 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1209 }
1210
1211 std::vector<std::pair<unsigned int, unsigned int>> padList;
1212 padList.reserve(flatBufferPadList->Length() / 2);
1213 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1214 {
1215 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1216 }
1217
1218 armnn::PadDescriptor descriptor(padList);
1219
1220 auto layerName = GetLayerName(graph, layerIndex);
1221 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1222
1223 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1224 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1225
1226 RegisterInputSlots(graph, layerIndex, layer);
1227 RegisterOutputSlots(graph, layerIndex, layer);
1228}
1229
Derek Lamberti8ddae332019-02-21 16:29:43 +00001230void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001231{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001232 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001233
1234 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001235 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001236
Derek Lamberti8ddae332019-02-21 16:29:43 +00001237 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001238 CHECK_VALID_SIZE(inputs.size(), 1);
1239
Derek Lamberti8ddae332019-02-21 16:29:43 +00001240 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001241 CHECK_VALID_SIZE(outputs.size(), 1);
1242 auto outputInfo = ToTensorInfo(outputs[0]);
1243
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001244 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001245 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1246
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001247 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001248 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1249
Derek Lamberti8ddae332019-02-21 16:29:43 +00001250 RegisterInputSlots(graph, layerIndex, layer);
1251 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001252}
1253
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001254armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001255 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001256{
1257 armnn::Pooling2dDescriptor desc;
1258
1259 switch (pooling2dDesc->poolType())
1260 {
1261 case PoolingAlgorithm_Average:
1262 {
1263 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001264 break;
1265 }
1266 case PoolingAlgorithm_Max:
1267 {
1268 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001269 break;
1270 }
1271 default:
1272 {
1273 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1274 }
1275 }
1276
1277 switch (pooling2dDesc->outputShapeRounding())
1278 {
1279 case OutputShapeRounding_Floor:
1280 {
1281 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1282 break;
1283 }
1284 case OutputShapeRounding_Ceiling:
1285 {
1286 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1287 break;
1288 }
1289 default:
1290 {
1291 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1292 }
1293 }
1294
1295 switch (pooling2dDesc->paddingMethod())
1296 {
1297 case PaddingMethod_Exclude:
1298 {
1299 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1300 break;
1301 }
1302 case PaddingMethod_IgnoreValue:
1303 {
1304 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1305 break;
1306 }
1307 default:
1308 {
1309 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1310 }
1311 }
1312
1313 switch (pooling2dDesc->dataLayout())
1314 {
1315 case DataLayout_NCHW:
1316 {
1317 desc.m_DataLayout = armnn::DataLayout::NCHW;
1318 break;
1319 }
1320 case DataLayout_NHWC:
1321 {
1322 desc.m_DataLayout = armnn::DataLayout::NHWC;
1323 break;
1324 }
1325 default:
1326 {
1327 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1328 }
1329 }
1330
1331 desc.m_PadRight = pooling2dDesc->padRight();
1332 desc.m_PadLeft = pooling2dDesc->padLeft();
1333 desc.m_PadBottom = pooling2dDesc->padBottom();
1334 desc.m_PadTop = pooling2dDesc->padTop();
1335 desc.m_StrideX = pooling2dDesc->strideX();
1336 desc.m_StrideY = pooling2dDesc->strideY();
1337 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1338 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1339
1340 return desc;
1341}
1342
Derek Lamberti8ddae332019-02-21 16:29:43 +00001343void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001344{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001345 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001346
Derek Lamberti8ddae332019-02-21 16:29:43 +00001347 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001348 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001349 CHECK_VALID_SIZE(inputs.size(), 1);
1350
Derek Lamberti8ddae332019-02-21 16:29:43 +00001351 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001352 CHECK_VALID_SIZE(outputs.size(), 1);
1353 auto outputInfo = ToTensorInfo(outputs[0]);
1354
1355 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001356 auto layerName = GetLayerName(graph, layerIndex);
1357 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001358 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1359
Derek Lamberti8ddae332019-02-21 16:29:43 +00001360 RegisterInputSlots(graph, layerIndex, layer);
1361 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001362}
1363
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001364armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001365 const std::vector<uint32_t>& targetDimsIn)
1366{
1367 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1368 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1369
1370 if (stretchDim != targetDimsIn.end())
1371 {
1372 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1373 {
1374 throw ParseException(boost::str(
1375 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1376 }
1377
1378 auto targetNumElements =
1379 boost::numeric_cast<unsigned int>(
1380 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1381
1382 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1383 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1384 }
1385
1386 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1387
1388 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1389 reshapeInfo.SetShape(outputShape);
1390
1391 return reshapeInfo;
1392}
1393
Derek Lamberti8ddae332019-02-21 16:29:43 +00001394void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001395{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001396 CHECK_LAYERS(graph, 0, layerIndex);
1397 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001398
Derek Lamberti8ddae332019-02-21 16:29:43 +00001399 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001400 CHECK_VALID_SIZE(outputs.size(), 1);
1401
1402 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1403 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1404
Derek Lamberti8ddae332019-02-21 16:29:43 +00001405 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001406 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1407
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001408 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001409 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1410
1411 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1412 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1413
1414 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1415 {
1416 std::stringstream ss;
1417 ss << "New shape defined in reshape parameters "
1418 << reshapeOutputTensorShape
1419 << " does not equal output shape "
1420 << actualOutputTensorInfo.GetShape()
1421 << ": "
1422 << CHECK_LOCATION().AsString();
1423 throw ParseException(ss.str());
1424 }
1425
1426 armnn::ReshapeDescriptor reshapeDesc;
1427 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1428
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001429 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001430 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1431 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1432
Derek Lamberti8ddae332019-02-21 16:29:43 +00001433 RegisterInputSlots(graph, layerIndex, layer);
1434 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001435}
1436
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001437void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1438{
1439 CHECK_LAYERS(graph, 0, layerIndex);
1440
1441 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1442 CHECK_VALID_SIZE(inputs.size(), 1);
1443
1444 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1445 CHECK_VALID_SIZE(outputs.size(), 1);
1446
1447 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1448
1449 armnn::ResizeBilinearDescriptor descriptor;
1450 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1451 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1452 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1453
1454 auto layerName = GetLayerName(graph, layerIndex);
1455 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1456
1457 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1458 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1459
1460 RegisterInputSlots(graph, layerIndex, layer);
1461 RegisterOutputSlots(graph, layerIndex, layer);
1462}
1463
Derek Lamberti8ddae332019-02-21 16:29:43 +00001464void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001465{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001466 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001467
Derek Lamberti8ddae332019-02-21 16:29:43 +00001468 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001469 CHECK_VALID_SIZE(inputs.size(), 1);
1470
Derek Lamberti8ddae332019-02-21 16:29:43 +00001471 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001472 CHECK_VALID_SIZE(outputs.size(), 1);
1473
1474 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001475 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001476 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001477
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001478 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1479
1480 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1481 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1482
Derek Lamberti8ddae332019-02-21 16:29:43 +00001483 RegisterInputSlots(graph, layerIndex, layer);
1484 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001485}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001486
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001487void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1488{
1489 CHECK_LAYERS(graph, 0, layerIndex);
1490
1491 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1492 CHECK_VALID_SIZE(inputs.size(), 1);
1493
1494 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1495 CHECK_VALID_SIZE(outputs.size(), 1);
1496
1497 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1498 auto flatBufferPadList = flatBufferDescriptor->padList();
1499 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1500
1501 if (flatBufferPadList->Length() % 2 != 0)
1502 {
1503 throw ParseException(boost::str(
1504 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1505 }
1506
1507 std::vector<std::pair<unsigned int, unsigned int>> padList;
1508 padList.reserve(flatBufferPadList->Length() / 2);
1509 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1510 {
1511 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1512 }
1513
1514 armnn::SpaceToBatchNdDescriptor descriptor;
1515 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1516 descriptor.m_BlockShape =
1517 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1518 descriptor.m_PadList = padList;
1519
1520 auto layerName = GetLayerName(graph, layerIndex);
1521 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1522
1523 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1524 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1525
1526 RegisterInputSlots(graph, layerIndex, layer);
1527 RegisterOutputSlots(graph, layerIndex, layer);
1528}
1529
Nina Drozd57728782019-02-27 10:53:27 +00001530armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1531 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1532 unsigned int layerIndex)
1533{
1534 armnn::NormalizationDescriptor desc;
1535
1536 switch (normalizationDescriptor->normChannelType())
1537 {
1538 case NormalizationAlgorithmChannel_Across:
1539 {
1540 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1541 break;
1542 }
1543 case NormalizationAlgorithmChannel_Within:
1544 {
1545 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1546 break;
1547 }
1548 default:
1549 {
1550 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1551 }
1552 }
1553
1554 switch (normalizationDescriptor->normMethodType())
1555 {
1556 case NormalizationAlgorithmMethod_LocalBrightness:
1557 {
1558 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1559 break;
1560 }
1561 case NormalizationAlgorithmMethod_LocalContrast:
1562 {
1563 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1564 break;
1565 }
1566 default:
1567 {
1568 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1569 }
1570 }
1571
1572 switch (normalizationDescriptor->dataLayout())
1573 {
1574 case DataLayout_NCHW:
1575 {
1576 desc.m_DataLayout = armnn::DataLayout::NCHW;
1577 break;
1578 }
1579 case DataLayout_NHWC:
1580 {
1581 desc.m_DataLayout = armnn::DataLayout::NHWC;
1582 break;
1583 }
1584 default:
1585 {
1586 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1587 }
1588 }
1589
1590 desc.m_Alpha = normalizationDescriptor->alpha();
1591 desc.m_Beta = normalizationDescriptor->beta();
1592 desc.m_K = normalizationDescriptor->k();
1593 desc.m_NormSize = normalizationDescriptor->normSize();
1594
1595 return desc;
1596}
1597
1598void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1599{
1600 CHECK_LAYERS(graph, 0, layerIndex);
1601
1602 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1603
1604 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1605 CHECK_VALID_SIZE(inputs.size(), 1);
1606
1607 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1608 CHECK_VALID_SIZE(outputs.size(), 1);
1609
1610 auto outputInfo = ToTensorInfo(outputs[0]);
1611
1612 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1613 auto layerName = GetLayerName(graph, layerIndex);
1614
1615 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1616 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1617
1618 RegisterInputSlots(graph, layerIndex, layer);
1619 RegisterOutputSlots(graph, layerIndex, layer);
1620}
1621
Sadik Armagan8b42a382019-03-01 14:24:49 +00001622void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1623{
1624 CHECK_LAYERS(graph, 0, layerIndex);
1625 auto inputs = GetInputs(graph, layerIndex);
1626 CHECK_LOCATION();
1627 CHECK_VALID_SIZE(inputs.size(), 1);
1628
1629 auto outputs = GetOutputs(graph, layerIndex);
1630 CHECK_VALID_SIZE(outputs.size(), 1);
1631
1632 auto layerName = GetLayerName(graph, layerIndex);
1633 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1634
1635 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1636 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1637
1638 RegisterInputSlots(graph, layerIndex, layer);
1639 RegisterOutputSlots(graph, layerIndex, layer);
1640}
1641
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001642} // namespace armnnDeserializer