blob: 719e47e7abace1d0cc2896dbc2981f6510c8a681 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000191 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Conor Kennedy76277882019-02-26 08:29:54 +0000192 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000193 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
194 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000195 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000196 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000197 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000198 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000199 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000200 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000201 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000202 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000203 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
204 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Jim Flynnac25a1b2019-02-28 10:40:49 +0000205 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseMerger;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000206 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000207 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000208 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000209 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000210 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
211 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000212 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000213 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000214 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000215 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000216 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000217 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Kevin May43a799c2019-02-08 16:31:42 +0000218}
219
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000220Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000221{
222 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
223
224 switch(layerType)
225 {
Mike Kellyaf484012019-02-20 16:53:11 +0000226 case Layer::Layer_ActivationLayer:
227 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000228 case Layer::Layer_AdditionLayer:
229 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000230 case Layer::Layer_BatchToSpaceNdLayer:
231 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000232 case Layer::Layer_BatchNormalizationLayer:
233 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000234 case Layer::Layer_ConstantLayer:
235 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000236 case Layer::Layer_Convolution2dLayer:
237 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000238 case Layer::Layer_DepthwiseConvolution2dLayer:
239 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000240 case Layer::Layer_DivisionLayer:
241 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000242 case Layer::Layer_EqualLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000244 case Layer::Layer_FullyConnectedLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000246 case Layer::Layer_FloorLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000248 case Layer::Layer_GatherLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000250 case Layer::Layer_GreaterLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000252 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000253 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000254 case Layer::Layer_L2NormalizationLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000256 case Layer::Layer_MeanLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000258 case Layer::Layer_MinimumLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000260 case Layer::Layer_MaximumLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000262 case Layer::Layer_MergerLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000264 case Layer::Layer_MultiplicationLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000266 case Layer::Layer_NormalizationLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000268 case Layer::Layer_OutputLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000270 case Layer::Layer_PadLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000272 case Layer::Layer_PermuteLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000274 case Layer::Layer_Pooling2dLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000276 case Layer::Layer_ReshapeLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000278 case Layer::Layer_ResizeBilinearLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000280 case Layer::Layer_RsqrtLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000282 case Layer::Layer_SoftmaxLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000284 case Layer::Layer_SpaceToBatchNdLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000286 case Layer::Layer_StridedSliceLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000288 case Layer::Layer_SubtractionLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000290 case Layer::Layer_NONE:
291 default:
292 throw ParseException(boost::str(
293 boost::format("Layer must have a type %1%") %
294 Layer::Layer_NONE));
295 }
296}
297
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000298std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
299{
300 auto layer = GetBaseLayer(graph, index);
301 assert(layer);
302 return layer->layerName()->str();
303}
304
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000305int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000306{
307 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
308
309 if (layerType == Layer::Layer_InputLayer)
310 {
311 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
312 }
313 else if ( layerType == Layer::Layer_OutputLayer )
314 {
315 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
316 }
317 return 0;
318}
319
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000320armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000321{
322 switch (dataLayout)
323 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000324 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000325 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000326 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000327 default:
328 return armnn::DataLayout::NCHW;
329 }
330}
331
Mike Kellyaf484012019-02-20 16:53:11 +0000332armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
333{
334 switch (function)
335 {
336 case armnnSerializer::ActivationFunction_Sigmoid:
337 return armnn::ActivationFunction::Sigmoid;
338 case armnnSerializer::ActivationFunction_TanH:
339 return armnn::ActivationFunction::TanH;
340 case armnnSerializer::ActivationFunction_Linear:
341 return armnn::ActivationFunction::Linear;
342 case armnnSerializer::ActivationFunction_ReLu:
343 return armnn::ActivationFunction::ReLu;
344 case armnnSerializer::ActivationFunction_BoundedReLu:
345 return armnn::ActivationFunction::BoundedReLu;
346 case armnnSerializer::ActivationFunction_LeakyReLu:
347 return armnn::ActivationFunction::LeakyReLu;
348 case armnnSerializer::ActivationFunction_Abs:
349 return armnn::ActivationFunction::Abs;
350 case armnnSerializer::ActivationFunction_Sqrt:
351 return armnn::ActivationFunction::Sqrt;
352 case armnnSerializer::ActivationFunction_Square:
353 return armnn::ActivationFunction::Square;
354 default:
355 return armnn::ActivationFunction::Sigmoid;
356 }
357}
358
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000359armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000360{
361 armnn::DataType type;
362 CHECK_TENSOR_PTR(tensorPtr);
363
364 switch (tensorPtr->dataType())
365 {
366 case DataType_QuantisedAsymm8:
367 type = armnn::DataType::QuantisedAsymm8;
368 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000369 case DataType_Signed32:
370 type = armnn::DataType::Signed32;
371 break;
Kevin May43a799c2019-02-08 16:31:42 +0000372 case DataType_Float32:
373 type = armnn::DataType::Float32;
374 break;
375 case DataType_Float16:
376 type = armnn::DataType::Float16;
377 break;
378 case DataType_Boolean:
379 type = armnn::DataType::Boolean;
380 break;
381 default:
382 {
383 CheckLocation location = CHECK_LOCATION();
384 throw ParseException(
385 boost::str(
386 boost::format("Unsupported data type %1% = %2%. %3%") %
387 tensorPtr->dataType() %
388 EnumNameDataType(tensorPtr->dataType()) %
389 location.AsString()));
390 }
391 }
392 float quantizationScale = tensorPtr->quantizationScale();
393 int32_t quantizationOffset = tensorPtr->quantizationOffset();
394
395 auto dimensions = tensorPtr->dimensions();
396 unsigned int size = dimensions->size();
397 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
398
399 // two statements (on purpose) for easier debugging:
400 armnn::TensorInfo result(size,
401 outputDims.data(),
402 type,
403 quantizationScale,
404 quantizationOffset);
405 return result;
406}
407
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000408armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000409{
410 CHECK_CONST_TENSOR_PTR(constTensorPtr);
411 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
412
413 switch (constTensorPtr->data_type())
414 {
415 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000416 {
417 auto byteData = constTensorPtr->data_as_ByteData()->data();
418 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
419 return armnn::ConstTensor(tensorInfo, byteData->data());
420 }
Mike Kellya0766c32019-02-19 17:22:07 +0000421 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000422 {
423 auto shortData = constTensorPtr->data_as_ShortData()->data();
424 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
425 return armnn::ConstTensor(tensorInfo, shortData->data());
426 }
Mike Kellya0766c32019-02-19 17:22:07 +0000427 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000428 {
429 auto intData = constTensorPtr->data_as_IntData()->data();
430 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
431 return armnn::ConstTensor(tensorInfo, intData->data());
432 }
Mike Kellya0766c32019-02-19 17:22:07 +0000433 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000434 {
435 auto longData = constTensorPtr->data_as_LongData()->data();
436 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
437 return armnn::ConstTensor(tensorInfo, longData->data());
438 }
Mike Kellya0766c32019-02-19 17:22:07 +0000439 default:
440 {
441 CheckLocation location = CHECK_LOCATION();
442 throw ParseException(
443 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
444 constTensorPtr->data_type() %
445 EnumNameConstTensorData(constTensorPtr->data_type()) %
446 location.AsString()));
447 }
448 }
449}
450
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000451Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000452{
453
454 CHECK_GRAPH(graphPtr, 0);
455 const auto& numInputs = graphPtr->inputIds()->size();
456
457 LayerBaseRawPtrVector result(numInputs);
458
459 for (unsigned int i=0; i<numInputs; ++i)
460 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000461 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000462 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
463 }
464 return result;
465}
466
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000467Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000468{
469 CHECK_GRAPH(graphPtr, 0);
470 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000471 LayerBaseRawPtrVector result(numOutputs);
472
473 for (unsigned int i=0; i<numOutputs; ++i)
474 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000475 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000476
Kevin May43a799c2019-02-08 16:31:42 +0000477 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
478 }
479 return result;
480}
481
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000482Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000483 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000484{
485 CHECK_LAYERS(graphPtr, 0, layerIndex);
486 auto layer = GetBaseLayer(graphPtr, layerIndex);
487 const auto& numInputs = layer->inputSlots()->size();
488
489 TensorRawPtrVector result(numInputs);
490
491 for (unsigned int i=0; i<numInputs; ++i)
492 {
493 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
494 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
495 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
496 }
497 return result;
498}
499
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000500Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000501 unsigned int layerIndex)
502{
503 CHECK_LAYERS(graphPtr, 0, layerIndex);
504 auto layer = GetBaseLayer(graphPtr, layerIndex);
505 const auto& numOutputs = layer->outputSlots()->size();
506
507 TensorRawPtrVector result(numOutputs);
508
509 for (unsigned int i=0; i<numOutputs; ++i)
510 {
511 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
512 }
513 return result;
514}
515
Derek Lamberti8ddae332019-02-21 16:29:43 +0000516void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000517{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000518 CHECK_LAYERS(graph, 0, layerIndex);
519 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000520 throw ParseException(
521 boost::str(
522 boost::format("Layer not supported. "
523 "layerIndex: %1% "
524 "layerName: %2% / %3%") %
525 layerIndex %
526 layerName %
527 CHECK_LOCATION().AsString()));
528}
529
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000530void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000531{
532 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000533 m_InputBindings.clear();
534 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000535}
536
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000537IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000538{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000539 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000540}
541
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000542IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000543{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000544 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000545}
546
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000547void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000548{
549 delete parser;
550}
551
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000552INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000553{
554 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000555 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
556 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000557}
558
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000559armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000560{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000561 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000562 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
563 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
564 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000565}
566
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000567Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000568{
569 if (binaryContent == nullptr)
570 {
571 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
572 CHECK_LOCATION().AsString()));
573 }
574 flatbuffers::Verifier verifier(binaryContent, len);
575 if (verifier.VerifyBuffer<SerializedGraph>() == false)
576 {
577 throw ParseException(
578 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
579 "flatbuffers format. size:%1% %2%") %
580 len %
581 CHECK_LOCATION().AsString()));
582 }
583 return GetSerializedGraph(binaryContent);
584}
585
Derek Lamberti8ddae332019-02-21 16:29:43 +0000586INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000587{
588 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000589 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000590 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000591 m_GraphConnections.emplace_back(graph->layers()->size());
592 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000593 {
594 if (layer->layer_type() != Layer_InputLayer &&
595 layer->layer_type() != Layer_OutputLayer)
596 {
597 // lookup and call the parser function
598 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000599 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000600 }
601 ++layerIndex;
602 }
603
Derek Lamberti8ddae332019-02-21 16:29:43 +0000604 SetupInputLayers(graph);
605 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000606
607 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000608 for (size_t connectionsIndex = 0; connectionsIndex < m_GraphConnections[0].size(); ++connectionsIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000609 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000610 SlotsMap& slotsMap = m_GraphConnections[0][connectionsIndex];
611 for (unsigned int outputSlotIndex = 0; outputSlotIndex < slotsMap.outputSlots.size(); outputSlotIndex++)
Kevin May43a799c2019-02-08 16:31:42 +0000612 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000613 if (slotsMap.inputSlots.find(outputSlotIndex) != slotsMap.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000614 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000615 for (armnn::IInputSlot* inputSlot : slotsMap.inputSlots[outputSlotIndex])
616 {
617 slotsMap.outputSlots[outputSlotIndex]->Connect(*inputSlot);
618 }
Kevin May43a799c2019-02-08 16:31:42 +0000619 }
620 }
621 }
622
623 return std::move(m_Network);
624}
625
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000626BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000627 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000628{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000629 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000630 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000631 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000632 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000633 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000634 }
635 }
636 throw ParseException(
637 boost::str(
638 boost::format("No input binding found for layer:%1% / %2%") %
639 name %
640 CHECK_LOCATION().AsString()));
641}
642
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000643BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000644 const std::string& name) const
645{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000646 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000647 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000648 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000649 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000650 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000651 }
652 }
653 throw ParseException(
654 boost::str(
655 boost::format("No output binding found for layer:%1% / %2%") %
656 name %
657 CHECK_LOCATION().AsString()));
658}
659
Derek Lamberti8ddae332019-02-21 16:29:43 +0000660void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000661{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000662 CHECK_GRAPH(graph, 0);
663 auto inputs = GetGraphInputs(graph);
664 m_InputBindings.clear();
665 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000666 for (auto const& input : inputs)
667 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000668 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000669 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000670 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000671
672 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
673 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
674
Derek Lamberti8ddae332019-02-21 16:29:43 +0000675 RegisterOutputSlots(graph, input->index(), layer);
676
677 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
678 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
679 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000680 }
681}
682
Derek Lamberti8ddae332019-02-21 16:29:43 +0000683void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000684{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000685 CHECK_GRAPH(graph, 0);
686 auto outputs = GetGraphOutputs(graph);
687 m_OutputBindings.clear();
688 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000689 for (auto const& output : outputs)
690 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000691 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000692 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000693 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000694
Derek Lamberti8ddae332019-02-21 16:29:43 +0000695 RegisterInputSlots(graph, output->index(), layer);
696
697 auto baseLayer = GetBaseLayer(graph, output->index());
698 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
699 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
700 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
701
702 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
703 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
704 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000705 }
706}
707
Derek Lamberti8ddae332019-02-21 16:29:43 +0000708void Deserializer::RegisterOutputSlots(GraphPtr graph,
709 uint32_t layerIndex,
710 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000711{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000712 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000713 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000714 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000715 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
716 {
717 throw ParseException(
718 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
719 " for layer index: %3% %4%") %
720 parsedLayer->outputSlots()->size() %
721 layer->GetNumOutputSlots() %
722 layerIndex %
723 CHECK_LOCATION().AsString()));
724 }
725
726 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
727 {
728 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
729 RegisterOutputSlotOfConnection(layerIndex, slot);
730 }
731}
732
Derek Lamberti8ddae332019-02-21 16:29:43 +0000733void Deserializer::RegisterInputSlots(GraphPtr graph,
734 uint32_t layerIndex,
735 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000736{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000737 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000738 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000739 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000740 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
741 {
742 throw ParseException(
743 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
744 " for layer index:%3% %4%") %
745 parsedLayer->inputSlots()->size() %
746 layer->GetNumInputSlots() %
747 layerIndex %
748 CHECK_LOCATION().AsString()));
749 }
750
751 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
752 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000753 auto fbConnection = parsedLayer->inputSlots()->Get(slotIndex)->connection();
Kevin May43a799c2019-02-08 16:31:42 +0000754 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000755
756 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), slot);
Kevin May43a799c2019-02-08 16:31:42 +0000757 }
758}
759
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000760void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
761 uint32_t outputSlotIndex,
762 armnn::IInputSlot* slot)
Kevin May43a799c2019-02-08 16:31:42 +0000763{
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000764 BOOST_ASSERT(m_GraphConnections[0].size() > sourceLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000765
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000766 SlotsMap& slotsMap = m_GraphConnections[0][sourceLayerIndex];
767 if (slotsMap.inputSlots.find(outputSlotIndex) == slotsMap.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000768 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000769 slotsMap.inputSlots[outputSlotIndex] = {slot};
Kevin May43a799c2019-02-08 16:31:42 +0000770 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000771 else
772 {
773 slotsMap.inputSlots[outputSlotIndex].push_back(slot);
774 }
775}
Kevin May43a799c2019-02-08 16:31:42 +0000776
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000777void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
778 armnn::IOutputSlot* slot)
779{
780 BOOST_ASSERT(m_GraphConnections[0].size() > sourceLayerIndex);
781 m_GraphConnections[0][sourceLayerIndex].outputSlots.push_back(slot);
Kevin May43a799c2019-02-08 16:31:42 +0000782}
783
Derek Lamberti8ddae332019-02-21 16:29:43 +0000784void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000785{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000786 CHECK_LAYERS(graph, 0, layerIndex);
787 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000788 CHECK_LOCATION();
789 CHECK_VALID_SIZE(inputs.size(), 1);
790
Derek Lamberti8ddae332019-02-21 16:29:43 +0000791 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000792 CHECK_VALID_SIZE(outputs.size(), 1);
793
Derek Lamberti8ddae332019-02-21 16:29:43 +0000794 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000795 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000796 auto serializerDescriptor = serializerLayer->descriptor();
797
798 armnn::ActivationDescriptor descriptor;
799 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
800 descriptor.m_A = serializerDescriptor->a();
801 descriptor.m_B = serializerDescriptor->b();
802
803 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
804 layerName.c_str());
805 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
806 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
807
Derek Lamberti8ddae332019-02-21 16:29:43 +0000808 RegisterInputSlots(graph, layerIndex, layer);
809 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000810}
811
Derek Lamberti8ddae332019-02-21 16:29:43 +0000812void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000813{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000814 CHECK_LAYERS(graph, 0, layerIndex);
815 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000816 CHECK_LOCATION();
817 CHECK_VALID_SIZE(inputs.size(), 2);
818
Derek Lamberti8ddae332019-02-21 16:29:43 +0000819 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000820 CHECK_VALID_SIZE(outputs.size(), 1);
821
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000822 auto layerName = GetLayerName(graph, layerIndex);
823 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000824
825 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
826 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
827
Derek Lamberti8ddae332019-02-21 16:29:43 +0000828 RegisterInputSlots(graph, layerIndex, layer);
829 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000830}
831
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000832void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
833{
834 CHECK_LAYERS(graph, 0, layerIndex);
835
836 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
837 CHECK_VALID_SIZE(inputs.size(), 1);
838
839 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
840 CHECK_VALID_SIZE(outputs.size(), 1);
841
842 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
843 auto flatBufferCrops = flatBufferDescriptor->crops();
844 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
845
846 if (flatBufferCrops->Length() % 2 != 0)
847 {
848 throw ParseException(boost::str(
849 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
850 }
851
852 std::vector<std::pair<unsigned int, unsigned int>> crops;
853 crops.reserve(flatBufferCrops->Length() / 2);
854 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
855 {
856 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
857 }
858
859 armnn::BatchToSpaceNdDescriptor descriptor;
860 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
861 descriptor.m_BlockShape =
862 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
863 descriptor.m_Crops = crops;
864
865 auto layerName = GetLayerName(graph, layerIndex);
866 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
867
868 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
869 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
870
871 RegisterInputSlots(graph, layerIndex, layer);
872 RegisterOutputSlots(graph, layerIndex, layer);
873}
874
ruoyan018e7fa232019-02-28 15:09:07 +0000875void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
876{
877 CHECK_LAYERS(graph, 0, layerIndex);
878
879 auto inputs = GetInputs(graph, layerIndex);
880 CHECK_VALID_SIZE(inputs.size(), 1);
881
882 auto outputs = GetOutputs(graph, layerIndex);
883 CHECK_VALID_SIZE(outputs.size(), 1);
884 auto outputInfo = ToTensorInfo(outputs[0]);
885
ruoyan015c7ab052019-03-04 14:48:02 +0000886 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +0000887
888 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
889 auto serializerDescriptor = serializerLayer->descriptor();
890
891 armnn::BatchNormalizationDescriptor descriptor;
892 descriptor.m_Eps = serializerDescriptor->eps();
893 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
894
895 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
896 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
897 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
898 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
899
900 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
901 mean,
902 variance,
903 beta,
904 gamma,
905 layerName.c_str());
906 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
907
908 RegisterInputSlots(graph, layerIndex, layer);
909 RegisterOutputSlots(graph, layerIndex, layer);
910}
911
Conor Kennedy76277882019-02-26 08:29:54 +0000912void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
913{
914 CHECK_LAYERS(graph, 0, layerIndex);
915 CHECK_LOCATION();
916
917 auto outputs = GetOutputs(graph, layerIndex);
918 CHECK_VALID_SIZE(outputs.size(), 1);
919
920 auto layerName = GetLayerName(graph, layerIndex);
921
922 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
923 auto serializerInput = serializerLayer->input();
924
925 armnn::ConstTensor input = ToConstTensor(serializerInput);
926
927 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
928
929 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
930 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
931
932 RegisterOutputSlots(graph, layerIndex, layer);
933}
934
Derek Lamberti8ddae332019-02-21 16:29:43 +0000935void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000936{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000937 CHECK_LAYERS(graph, 0, layerIndex);
938 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000939 CHECK_LOCATION();
940 CHECK_VALID_SIZE(inputs.size(), 1);
941
Derek Lamberti8ddae332019-02-21 16:29:43 +0000942 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000943 CHECK_VALID_SIZE(outputs.size(), 1);
944
Derek Lamberti8ddae332019-02-21 16:29:43 +0000945 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000946 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000947 auto serializerDescriptor = serializerLayer->descriptor();
948
949 armnn::Convolution2dDescriptor descriptor;
950 descriptor.m_PadLeft = serializerDescriptor->padLeft();
951 descriptor.m_PadRight = serializerDescriptor->padRight();
952 descriptor.m_PadTop = serializerDescriptor->padTop();
953 descriptor.m_PadBottom = serializerDescriptor->padBottom();
954 descriptor.m_StrideX = serializerDescriptor->strideX();
955 descriptor.m_StrideY = serializerDescriptor->strideY();;
956 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
957 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
958
959 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
960 armnn::ConstTensor biases;
961
962 if (descriptor.m_BiasEnabled)
963 {
964 biases = ToConstTensor(serializerLayer->biases());
965 }
966 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
967 weights,
968 biases,
969 layerName.c_str());
970 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
971 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
972
Derek Lamberti8ddae332019-02-21 16:29:43 +0000973 RegisterInputSlots(graph, layerIndex, layer);
974 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000975}
976
Derek Lamberti8ddae332019-02-21 16:29:43 +0000977void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000978{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000979 CHECK_LAYERS(graph, 0, layerIndex);
980 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000981 CHECK_LOCATION();
982 CHECK_VALID_SIZE(inputs.size(), 1);
983
Derek Lamberti8ddae332019-02-21 16:29:43 +0000984 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000985 CHECK_VALID_SIZE(outputs.size(), 1);
986
Derek Lamberti8ddae332019-02-21 16:29:43 +0000987 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000988 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000989 auto serializerDescriptor = serializerLayer->descriptor();
990
991 armnn::DepthwiseConvolution2dDescriptor descriptor;
992 descriptor.m_PadLeft = serializerDescriptor->padLeft();
993 descriptor.m_PadRight = serializerDescriptor->padRight();
994 descriptor.m_PadTop = serializerDescriptor->padTop();
995 descriptor.m_PadBottom = serializerDescriptor->padBottom();
996 descriptor.m_StrideX = serializerDescriptor->strideX();
997 descriptor.m_StrideY = serializerDescriptor->strideY();;
998 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
999 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1000
1001 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1002 armnn::ConstTensor biases;
1003
1004 if (descriptor.m_BiasEnabled)
1005 {
1006 biases = ToConstTensor(serializerLayer->biases());
1007 }
1008 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1009 weights,
1010 biases,
1011 layerName.c_str());
1012
1013 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1014 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1015
Derek Lamberti8ddae332019-02-21 16:29:43 +00001016 RegisterInputSlots(graph, layerIndex, layer);
1017 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001018}
1019
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001020void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1021{
1022 CHECK_LAYERS(graph, 0, layerIndex);
1023 auto inputs = GetInputs(graph, layerIndex);
1024 CHECK_LOCATION();
1025 CHECK_VALID_SIZE(inputs.size(), 2);
1026
1027 auto outputs = GetOutputs(graph, layerIndex);
1028 CHECK_VALID_SIZE(outputs.size(), 1);
1029
1030 auto layerName = GetLayerName(graph, layerIndex);
1031 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1032
1033 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1034 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1035
1036 RegisterInputSlots(graph, layerIndex, layer);
1037 RegisterOutputSlots(graph, layerIndex, layer);
1038}
1039
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001040void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1041{
1042 CHECK_LAYERS(graph, 0, layerIndex);
1043 auto inputs = GetInputs(graph, layerIndex);
1044 CHECK_LOCATION();
1045 CHECK_VALID_SIZE(inputs.size(), 2);
1046
1047 auto outputs = GetOutputs(graph, layerIndex);
1048 CHECK_VALID_SIZE(outputs.size(), 1);
1049
1050 auto layerName = GetLayerName(graph, layerIndex);
1051 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1052
1053 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1054 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1055
1056 RegisterInputSlots(graph, layerIndex, layer);
1057 RegisterOutputSlots(graph, layerIndex, layer);
1058}
1059
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001060void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1061{
1062 CHECK_LAYERS(graph, 0, layerIndex);
1063 auto inputs = GetInputs(graph, layerIndex);
1064 CHECK_LOCATION();
1065 CHECK_VALID_SIZE(inputs.size(), 2);
1066
1067 auto outputs = GetOutputs(graph, layerIndex);
1068 CHECK_VALID_SIZE(outputs.size(), 1);
1069
1070 auto layerName = GetLayerName(graph, layerIndex);
1071 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1072
1073 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1074 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1075
1076 RegisterInputSlots(graph, layerIndex, layer);
1077 RegisterOutputSlots(graph, layerIndex, layer);
1078}
1079
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001080void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1081{
1082 CHECK_LAYERS(graph, 0, layerIndex);
1083
1084 auto inputs = GetInputs(graph, layerIndex);
1085 CHECK_VALID_SIZE(inputs.size(), 1);
1086
1087 auto outputs = GetOutputs(graph, layerIndex);
1088 CHECK_VALID_SIZE(outputs.size(), 1);
1089 auto outputInfo = ToTensorInfo(outputs[0]);
1090
1091 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1092 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1093
1094 auto layerName = GetLayerName(graph, layerIndex);
1095 armnn::L2NormalizationDescriptor descriptor;
1096 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1097
1098 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1099 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1100
1101 RegisterInputSlots(graph, layerIndex, layer);
1102 RegisterOutputSlots(graph, layerIndex, layer);
1103}
1104
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001105void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1106{
1107 CHECK_LAYERS(graph, 0, layerIndex);
1108 auto inputs = GetInputs(graph, layerIndex);
1109 CHECK_LOCATION();
1110 CHECK_VALID_SIZE(inputs.size(), 2);
1111
1112 auto outputs = GetOutputs(graph, layerIndex);
1113 CHECK_VALID_SIZE(outputs.size(), 1);
1114
1115 auto layerName = GetLayerName(graph, layerIndex);
1116 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1117
1118 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1119 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1120
1121 RegisterInputSlots(graph, layerIndex, layer);
1122 RegisterOutputSlots(graph, layerIndex, layer);
1123}
1124
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001125void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1126{
1127 CHECK_LAYERS(graph, 0, layerIndex);
1128 auto inputs = GetInputs(graph, layerIndex);
1129 CHECK_LOCATION();
1130 CHECK_VALID_SIZE(inputs.size(), 2);
1131
1132 auto outputs = GetOutputs(graph, layerIndex);
1133 CHECK_VALID_SIZE(outputs.size(), 1);
1134
1135 auto layerName = GetLayerName(graph, layerIndex);
1136 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1137
1138 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1139 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1140
1141 RegisterInputSlots(graph, layerIndex, layer);
1142 RegisterOutputSlots(graph, layerIndex, layer);
1143}
1144
Jim Flynnac25a1b2019-02-28 10:40:49 +00001145void Deserializer::ParseMerger(GraphPtr graph, unsigned int layerIndex)
1146{
1147 CHECK_LAYERS(graph, 0, layerIndex);
1148 CHECK_LOCATION();
1149
1150 auto outputs = GetOutputs(graph, layerIndex);
1151 CHECK_VALID_SIZE(outputs.size(), 1);
1152
1153 auto mergerLayer = graph->layers()->Get(layerIndex)->layer_as_MergerLayer();
1154 auto layerName = GetLayerName(graph, layerIndex);
1155 auto mergerDescriptor = mergerLayer->descriptor();
1156 unsigned int numViews = mergerDescriptor->numViews();
1157 unsigned int numDimensions = mergerDescriptor->numDimensions();
1158
1159 // can now check the number of inputs == number of views
1160 auto inputs = GetInputs(graph, layerIndex);
1161 CHECK_VALID_SIZE(inputs.size(), numViews);
1162
1163 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
1164 auto originsPtr = mergerDescriptor->viewOrigins();
1165 for (unsigned int v = 0; v < numViews; ++v)
1166 {
1167 auto originPtr = originsPtr->Get(v);
1168 for (unsigned int d = 0; d < numDimensions; ++d)
1169 {
1170 uint32_t value = originPtr->data()->Get(d);
1171 descriptor.SetViewOriginCoord(v, d, value);
1172 }
1173 }
1174 descriptor.SetConcatAxis(mergerDescriptor->concatAxis());
1175
1176 IConnectableLayer* layer = m_Network->AddMergerLayer(descriptor, layerName.c_str());
1177 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1178 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1179
1180 RegisterInputSlots(graph, layerIndex, layer);
1181 RegisterOutputSlots(graph, layerIndex, layer);
1182}
1183
Derek Lamberti8ddae332019-02-21 16:29:43 +00001184void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001185{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001186 CHECK_LAYERS(graph, 0, layerIndex);
1187 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001188 CHECK_LOCATION();
1189 CHECK_VALID_SIZE(inputs.size(), 2);
1190
Derek Lamberti8ddae332019-02-21 16:29:43 +00001191 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001192 CHECK_VALID_SIZE(outputs.size(), 1);
1193
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001194 auto layerName = GetLayerName(graph, layerIndex);
1195 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001196
1197 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1198 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1199
Derek Lamberti8ddae332019-02-21 16:29:43 +00001200 RegisterInputSlots(graph, layerIndex, layer);
1201 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001202}
1203
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001204void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1205{
1206 CHECK_LAYERS(graph, 0, layerIndex);
1207 CHECK_LOCATION();
1208
1209 auto inputs = GetInputs(graph, layerIndex);
1210 CHECK_VALID_SIZE(inputs.size(), 1);
1211
1212 auto outputs = GetOutputs(graph, layerIndex);
1213 CHECK_VALID_SIZE(outputs.size(), 1);
1214
1215 auto layerName = GetLayerName(graph, layerIndex);
1216
1217 armnn::IConnectableLayer* layer;
1218
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001219 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001220
1221 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1222 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1223
1224 RegisterInputSlots(graph, layerIndex, layer);
1225 RegisterOutputSlots(graph, layerIndex, layer);
1226}
1227
Derek Lamberti8ddae332019-02-21 16:29:43 +00001228void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001229{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001230 CHECK_LAYERS(graph, 0, layerIndex);
1231 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001232 CHECK_LOCATION();
1233 CHECK_VALID_SIZE(inputs.size(), 1);
1234
Derek Lamberti8ddae332019-02-21 16:29:43 +00001235 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001236 CHECK_VALID_SIZE(outputs.size(), 1);
1237
Derek Lamberti8ddae332019-02-21 16:29:43 +00001238 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001239 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001240 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1241
1242 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1243 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1244 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1245
1246 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1247
1248 armnn::IConnectableLayer* layer;
1249 if (flatBufferDescriptor->biasEnabled())
1250 {
1251 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1252 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1253 weightsTensor,
1254 biasTensorData,
1255 layerName.c_str());
1256 }
1257 else
1258 {
1259 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1260 weightsTensor,
1261 layerName.c_str());
1262 }
1263
1264 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1265 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1266
Derek Lamberti8ddae332019-02-21 16:29:43 +00001267 RegisterInputSlots(graph, layerIndex, layer);
1268 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001269}
1270
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001271void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1272{
1273 CHECK_LAYERS(graph, 0, layerIndex);
1274
1275 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1276 CHECK_VALID_SIZE(inputs.size(), 1);
1277
1278 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1279 CHECK_VALID_SIZE(outputs.size(), 1);
1280
1281 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1282 auto flatBufferPadList = flatBufferDescriptor->padList();
1283
1284 if (flatBufferPadList->Length() % 2 != 0)
1285 {
1286 throw ParseException(boost::str(
1287 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1288 }
1289
1290 std::vector<std::pair<unsigned int, unsigned int>> padList;
1291 padList.reserve(flatBufferPadList->Length() / 2);
1292 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1293 {
1294 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1295 }
1296
1297 armnn::PadDescriptor descriptor(padList);
1298
1299 auto layerName = GetLayerName(graph, layerIndex);
1300 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1301
1302 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1303 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1304
1305 RegisterInputSlots(graph, layerIndex, layer);
1306 RegisterOutputSlots(graph, layerIndex, layer);
1307}
1308
Derek Lamberti8ddae332019-02-21 16:29:43 +00001309void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001310{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001311 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001312
1313 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001314 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001315
Derek Lamberti8ddae332019-02-21 16:29:43 +00001316 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001317 CHECK_VALID_SIZE(inputs.size(), 1);
1318
Derek Lamberti8ddae332019-02-21 16:29:43 +00001319 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001320 CHECK_VALID_SIZE(outputs.size(), 1);
1321 auto outputInfo = ToTensorInfo(outputs[0]);
1322
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001323 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001324 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1325
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001326 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001327 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1328
Derek Lamberti8ddae332019-02-21 16:29:43 +00001329 RegisterInputSlots(graph, layerIndex, layer);
1330 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001331}
1332
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001333armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001334 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001335{
1336 armnn::Pooling2dDescriptor desc;
1337
1338 switch (pooling2dDesc->poolType())
1339 {
1340 case PoolingAlgorithm_Average:
1341 {
1342 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001343 break;
1344 }
1345 case PoolingAlgorithm_Max:
1346 {
1347 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001348 break;
1349 }
1350 default:
1351 {
1352 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1353 }
1354 }
1355
1356 switch (pooling2dDesc->outputShapeRounding())
1357 {
1358 case OutputShapeRounding_Floor:
1359 {
1360 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1361 break;
1362 }
1363 case OutputShapeRounding_Ceiling:
1364 {
1365 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1366 break;
1367 }
1368 default:
1369 {
1370 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1371 }
1372 }
1373
1374 switch (pooling2dDesc->paddingMethod())
1375 {
1376 case PaddingMethod_Exclude:
1377 {
1378 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1379 break;
1380 }
1381 case PaddingMethod_IgnoreValue:
1382 {
1383 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1384 break;
1385 }
1386 default:
1387 {
1388 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1389 }
1390 }
1391
1392 switch (pooling2dDesc->dataLayout())
1393 {
1394 case DataLayout_NCHW:
1395 {
1396 desc.m_DataLayout = armnn::DataLayout::NCHW;
1397 break;
1398 }
1399 case DataLayout_NHWC:
1400 {
1401 desc.m_DataLayout = armnn::DataLayout::NHWC;
1402 break;
1403 }
1404 default:
1405 {
1406 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1407 }
1408 }
1409
1410 desc.m_PadRight = pooling2dDesc->padRight();
1411 desc.m_PadLeft = pooling2dDesc->padLeft();
1412 desc.m_PadBottom = pooling2dDesc->padBottom();
1413 desc.m_PadTop = pooling2dDesc->padTop();
1414 desc.m_StrideX = pooling2dDesc->strideX();
1415 desc.m_StrideY = pooling2dDesc->strideY();
1416 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1417 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1418
1419 return desc;
1420}
1421
Derek Lamberti8ddae332019-02-21 16:29:43 +00001422void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001423{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001424 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001425
Derek Lamberti8ddae332019-02-21 16:29:43 +00001426 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001427 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001428 CHECK_VALID_SIZE(inputs.size(), 1);
1429
Derek Lamberti8ddae332019-02-21 16:29:43 +00001430 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001431 CHECK_VALID_SIZE(outputs.size(), 1);
1432 auto outputInfo = ToTensorInfo(outputs[0]);
1433
1434 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001435 auto layerName = GetLayerName(graph, layerIndex);
1436 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001437 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1438
Derek Lamberti8ddae332019-02-21 16:29:43 +00001439 RegisterInputSlots(graph, layerIndex, layer);
1440 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001441}
1442
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001443armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001444 const std::vector<uint32_t>& targetDimsIn)
1445{
1446 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1447 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1448
1449 if (stretchDim != targetDimsIn.end())
1450 {
1451 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1452 {
1453 throw ParseException(boost::str(
1454 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1455 }
1456
1457 auto targetNumElements =
1458 boost::numeric_cast<unsigned int>(
1459 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1460
1461 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1462 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1463 }
1464
1465 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1466
1467 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1468 reshapeInfo.SetShape(outputShape);
1469
1470 return reshapeInfo;
1471}
1472
Derek Lamberti8ddae332019-02-21 16:29:43 +00001473void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001474{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001475 CHECK_LAYERS(graph, 0, layerIndex);
1476 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001477
Derek Lamberti8ddae332019-02-21 16:29:43 +00001478 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001479 CHECK_VALID_SIZE(outputs.size(), 1);
1480
1481 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1482 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1483
Derek Lamberti8ddae332019-02-21 16:29:43 +00001484 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001485 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1486
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001487 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001488 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1489
1490 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1491 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1492
1493 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1494 {
1495 std::stringstream ss;
1496 ss << "New shape defined in reshape parameters "
1497 << reshapeOutputTensorShape
1498 << " does not equal output shape "
1499 << actualOutputTensorInfo.GetShape()
1500 << ": "
1501 << CHECK_LOCATION().AsString();
1502 throw ParseException(ss.str());
1503 }
1504
1505 armnn::ReshapeDescriptor reshapeDesc;
1506 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1507
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001508 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001509 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1510 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1511
Derek Lamberti8ddae332019-02-21 16:29:43 +00001512 RegisterInputSlots(graph, layerIndex, layer);
1513 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001514}
1515
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001516void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1517{
1518 CHECK_LAYERS(graph, 0, layerIndex);
1519
1520 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1521 CHECK_VALID_SIZE(inputs.size(), 1);
1522
1523 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1524 CHECK_VALID_SIZE(outputs.size(), 1);
1525
1526 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1527
1528 armnn::ResizeBilinearDescriptor descriptor;
1529 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1530 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1531 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1532
1533 auto layerName = GetLayerName(graph, layerIndex);
1534 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1535
1536 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1537 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1538
1539 RegisterInputSlots(graph, layerIndex, layer);
1540 RegisterOutputSlots(graph, layerIndex, layer);
1541}
1542
Derek Lamberti8ddae332019-02-21 16:29:43 +00001543void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001544{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001545 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001546
Derek Lamberti8ddae332019-02-21 16:29:43 +00001547 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001548 CHECK_VALID_SIZE(inputs.size(), 1);
1549
Derek Lamberti8ddae332019-02-21 16:29:43 +00001550 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001551 CHECK_VALID_SIZE(outputs.size(), 1);
1552
1553 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001554 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001555 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001556
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001557 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1558
1559 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1560 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1561
Derek Lamberti8ddae332019-02-21 16:29:43 +00001562 RegisterInputSlots(graph, layerIndex, layer);
1563 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001564}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001565
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001566void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1567{
1568 CHECK_LAYERS(graph, 0, layerIndex);
1569
1570 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1571 CHECK_VALID_SIZE(inputs.size(), 1);
1572
1573 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1574 CHECK_VALID_SIZE(outputs.size(), 1);
1575
1576 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1577 auto flatBufferPadList = flatBufferDescriptor->padList();
1578 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1579
1580 if (flatBufferPadList->Length() % 2 != 0)
1581 {
1582 throw ParseException(boost::str(
1583 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1584 }
1585
1586 std::vector<std::pair<unsigned int, unsigned int>> padList;
1587 padList.reserve(flatBufferPadList->Length() / 2);
1588 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1589 {
1590 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1591 }
1592
1593 armnn::SpaceToBatchNdDescriptor descriptor;
1594 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1595 descriptor.m_BlockShape =
1596 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1597 descriptor.m_PadList = padList;
1598
1599 auto layerName = GetLayerName(graph, layerIndex);
1600 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1601
1602 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1603 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1604
1605 RegisterInputSlots(graph, layerIndex, layer);
1606 RegisterOutputSlots(graph, layerIndex, layer);
1607}
1608
Nina Drozd57728782019-02-27 10:53:27 +00001609armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1610 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1611 unsigned int layerIndex)
1612{
1613 armnn::NormalizationDescriptor desc;
1614
1615 switch (normalizationDescriptor->normChannelType())
1616 {
1617 case NormalizationAlgorithmChannel_Across:
1618 {
1619 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1620 break;
1621 }
1622 case NormalizationAlgorithmChannel_Within:
1623 {
1624 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1625 break;
1626 }
1627 default:
1628 {
1629 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1630 }
1631 }
1632
1633 switch (normalizationDescriptor->normMethodType())
1634 {
1635 case NormalizationAlgorithmMethod_LocalBrightness:
1636 {
1637 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1638 break;
1639 }
1640 case NormalizationAlgorithmMethod_LocalContrast:
1641 {
1642 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1643 break;
1644 }
1645 default:
1646 {
1647 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1648 }
1649 }
1650
1651 switch (normalizationDescriptor->dataLayout())
1652 {
1653 case DataLayout_NCHW:
1654 {
1655 desc.m_DataLayout = armnn::DataLayout::NCHW;
1656 break;
1657 }
1658 case DataLayout_NHWC:
1659 {
1660 desc.m_DataLayout = armnn::DataLayout::NHWC;
1661 break;
1662 }
1663 default:
1664 {
1665 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1666 }
1667 }
1668
1669 desc.m_Alpha = normalizationDescriptor->alpha();
1670 desc.m_Beta = normalizationDescriptor->beta();
1671 desc.m_K = normalizationDescriptor->k();
1672 desc.m_NormSize = normalizationDescriptor->normSize();
1673
1674 return desc;
1675}
1676
1677void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1678{
1679 CHECK_LAYERS(graph, 0, layerIndex);
1680
1681 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1682
1683 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1684 CHECK_VALID_SIZE(inputs.size(), 1);
1685
1686 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1687 CHECK_VALID_SIZE(outputs.size(), 1);
1688
1689 auto outputInfo = ToTensorInfo(outputs[0]);
1690
1691 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1692 auto layerName = GetLayerName(graph, layerIndex);
1693
1694 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1695 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1696
1697 RegisterInputSlots(graph, layerIndex, layer);
1698 RegisterOutputSlots(graph, layerIndex, layer);
1699}
1700
Sadik Armagan8b42a382019-03-01 14:24:49 +00001701void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1702{
1703 CHECK_LAYERS(graph, 0, layerIndex);
1704 auto inputs = GetInputs(graph, layerIndex);
1705 CHECK_LOCATION();
1706 CHECK_VALID_SIZE(inputs.size(), 1);
1707
1708 auto outputs = GetOutputs(graph, layerIndex);
1709 CHECK_VALID_SIZE(outputs.size(), 1);
1710
1711 auto layerName = GetLayerName(graph, layerIndex);
1712 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1713
1714 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1715 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1716
1717 RegisterInputSlots(graph, layerIndex, layer);
1718 RegisterOutputSlots(graph, layerIndex, layer);
1719}
1720
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001721void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1722{
1723 CHECK_LAYERS(graph, 0, layerIndex);
1724
1725 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1726 CHECK_VALID_SIZE(inputs.size(), 1);
1727
1728 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1729 CHECK_VALID_SIZE(outputs.size(), 1);
1730
1731 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
1732
1733 auto flatBufferBegin = flatBufferDescriptor->begin();
1734 auto flatBufferEnd = flatBufferDescriptor->end();
1735 auto flatBufferStride = flatBufferDescriptor->stride();
1736
1737 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
1738 flatBufferBegin->Length() == flatBufferStride->Length()))
1739 {
1740 throw ParseException(boost::str(
1741 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
1742 }
1743
1744 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
1745 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
1746 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
1747
1748 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
1749 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
1750 descriptor.m_EndMask = flatBufferDescriptor->endMask();
1751 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
1752 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
1753 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
1754 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1755
1756 auto layerName = GetLayerName(graph, layerIndex);
1757 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
1758
1759 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1760 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1761
1762 RegisterInputSlots(graph, layerIndex, layer);
1763 RegisterOutputSlots(graph, layerIndex, layer);
1764}
1765
Conor Kennedyda1f9752019-03-01 14:37:12 +00001766void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1767{
1768 CHECK_LAYERS(graph, 0, layerIndex);
1769 auto inputs = GetInputs(graph, layerIndex);
1770 CHECK_LOCATION();
1771 CHECK_VALID_SIZE(inputs.size(), 2);
1772
1773 auto outputs = GetOutputs(graph, layerIndex);
1774 CHECK_VALID_SIZE(outputs.size(), 1);
1775
1776 auto layerName = GetLayerName(graph, layerIndex);
1777 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1778
1779 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1780 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1781
1782 RegisterInputSlots(graph, layerIndex, layer);
1783 RegisterOutputSlots(graph, layerIndex, layer);
1784}
1785
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001786void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
1787{
1788 CHECK_LAYERS(graph, 0, layerIndex);
1789
1790 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1791 CHECK_VALID_SIZE(inputs.size(), 2);
1792
1793 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1794 CHECK_VALID_SIZE(outputs.size(), 1);
1795
1796 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001797 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
1798
1799 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001800 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1801
1802 RegisterInputSlots(graph, layerIndex, layer);
1803 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001804}
1805
Sadik Armaganac97c8c2019-03-04 17:44:21 +00001806void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
1807{
1808 CHECK_LAYERS(graph, 0, layerIndex);
1809
1810 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1811 CHECK_VALID_SIZE(inputs.size(), 1);
1812
1813 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1814 CHECK_VALID_SIZE(outputs.size(), 1);
1815
1816 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
1817 auto flatBufferAxis = flatBufferDescriptor->axis();
1818 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
1819
1820 armnn::MeanDescriptor descriptor;
1821 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
1822 descriptor.m_KeepDims = flatBufferKeepDims;
1823
1824 auto layerName = GetLayerName(graph, layerIndex);
1825 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
1826
1827 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1828 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1829
1830 RegisterInputSlots(graph, layerIndex, layer);
1831 RegisterOutputSlots(graph, layerIndex, layer);
1832}
1833
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001834} // namespace armnnDeserializer