blob: 73c20420249f24b3c507ae01dea28d15a87c40ac [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000191 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Conor Kennedy76277882019-02-26 08:29:54 +0000192 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000193 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
194 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000195 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000196 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000197 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000198 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000199 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000200 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000201 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000202 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000203 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000204 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000205 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000206 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
207 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000208 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000209 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000210 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000211 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000212 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Kevin May43a799c2019-02-08 16:31:42 +0000213}
214
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000215Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000216{
217 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
218
219 switch(layerType)
220 {
Mike Kellyaf484012019-02-20 16:53:11 +0000221 case Layer::Layer_ActivationLayer:
222 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000223 case Layer::Layer_AdditionLayer:
224 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000225 case Layer::Layer_BatchToSpaceNdLayer:
226 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000227 case Layer::Layer_BatchNormalizationLayer:
228 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000229 case Layer::Layer_ConstantLayer:
230 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000231 case Layer::Layer_Convolution2dLayer:
232 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000233 case Layer::Layer_DepthwiseConvolution2dLayer:
234 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000235 case Layer::Layer_DivisionLayer:
236 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000237 case Layer::Layer_EqualLayer:
238 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000239 case Layer::Layer_FullyConnectedLayer:
240 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000241 case Layer::Layer_FloorLayer:
242 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000243 case Layer::Layer_GreaterLayer:
244 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000245 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000246 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
247 case Layer::Layer_MinimumLayer:
248 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000249 case Layer::Layer_MaximumLayer:
250 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000251 case Layer::Layer_MultiplicationLayer:
252 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000253 case Layer::Layer_NormalizationLayer:
254 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000255 case Layer::Layer_OutputLayer:
256 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000257 case Layer::Layer_PadLayer:
258 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000259 case Layer::Layer_PermuteLayer:
260 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000261 case Layer::Layer_Pooling2dLayer:
262 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000263 case Layer::Layer_ReshapeLayer:
264 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000265 case Layer::Layer_ResizeBilinearLayer:
266 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000267 case Layer::Layer_RsqrtLayer:
268 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000269 case Layer::Layer_SoftmaxLayer:
270 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000271 case Layer::Layer_SpaceToBatchNdLayer:
272 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000273 case Layer::Layer_SubtractionLayer:
274 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000275 case Layer::Layer_NONE:
276 default:
277 throw ParseException(boost::str(
278 boost::format("Layer must have a type %1%") %
279 Layer::Layer_NONE));
280 }
281}
282
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000283std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
284{
285 auto layer = GetBaseLayer(graph, index);
286 assert(layer);
287 return layer->layerName()->str();
288}
289
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000290int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000291{
292 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
293
294 if (layerType == Layer::Layer_InputLayer)
295 {
296 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
297 }
298 else if ( layerType == Layer::Layer_OutputLayer )
299 {
300 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
301 }
302 return 0;
303}
304
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000305armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000306{
307 switch (dataLayout)
308 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000309 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000310 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000311 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000312 default:
313 return armnn::DataLayout::NCHW;
314 }
315}
316
Mike Kellyaf484012019-02-20 16:53:11 +0000317armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
318{
319 switch (function)
320 {
321 case armnnSerializer::ActivationFunction_Sigmoid:
322 return armnn::ActivationFunction::Sigmoid;
323 case armnnSerializer::ActivationFunction_TanH:
324 return armnn::ActivationFunction::TanH;
325 case armnnSerializer::ActivationFunction_Linear:
326 return armnn::ActivationFunction::Linear;
327 case armnnSerializer::ActivationFunction_ReLu:
328 return armnn::ActivationFunction::ReLu;
329 case armnnSerializer::ActivationFunction_BoundedReLu:
330 return armnn::ActivationFunction::BoundedReLu;
331 case armnnSerializer::ActivationFunction_LeakyReLu:
332 return armnn::ActivationFunction::LeakyReLu;
333 case armnnSerializer::ActivationFunction_Abs:
334 return armnn::ActivationFunction::Abs;
335 case armnnSerializer::ActivationFunction_Sqrt:
336 return armnn::ActivationFunction::Sqrt;
337 case armnnSerializer::ActivationFunction_Square:
338 return armnn::ActivationFunction::Square;
339 default:
340 return armnn::ActivationFunction::Sigmoid;
341 }
342}
343
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000344armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000345{
346 armnn::DataType type;
347 CHECK_TENSOR_PTR(tensorPtr);
348
349 switch (tensorPtr->dataType())
350 {
351 case DataType_QuantisedAsymm8:
352 type = armnn::DataType::QuantisedAsymm8;
353 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000354 case DataType_Signed32:
355 type = armnn::DataType::Signed32;
356 break;
Kevin May43a799c2019-02-08 16:31:42 +0000357 case DataType_Float32:
358 type = armnn::DataType::Float32;
359 break;
360 case DataType_Float16:
361 type = armnn::DataType::Float16;
362 break;
363 case DataType_Boolean:
364 type = armnn::DataType::Boolean;
365 break;
366 default:
367 {
368 CheckLocation location = CHECK_LOCATION();
369 throw ParseException(
370 boost::str(
371 boost::format("Unsupported data type %1% = %2%. %3%") %
372 tensorPtr->dataType() %
373 EnumNameDataType(tensorPtr->dataType()) %
374 location.AsString()));
375 }
376 }
377 float quantizationScale = tensorPtr->quantizationScale();
378 int32_t quantizationOffset = tensorPtr->quantizationOffset();
379
380 auto dimensions = tensorPtr->dimensions();
381 unsigned int size = dimensions->size();
382 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
383
384 // two statements (on purpose) for easier debugging:
385 armnn::TensorInfo result(size,
386 outputDims.data(),
387 type,
388 quantizationScale,
389 quantizationOffset);
390 return result;
391}
392
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000393armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000394{
395 CHECK_CONST_TENSOR_PTR(constTensorPtr);
396 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
397
398 switch (constTensorPtr->data_type())
399 {
400 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000401 {
402 auto byteData = constTensorPtr->data_as_ByteData()->data();
403 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
404 return armnn::ConstTensor(tensorInfo, byteData->data());
405 }
Mike Kellya0766c32019-02-19 17:22:07 +0000406 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000407 {
408 auto shortData = constTensorPtr->data_as_ShortData()->data();
409 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
410 return armnn::ConstTensor(tensorInfo, shortData->data());
411 }
Mike Kellya0766c32019-02-19 17:22:07 +0000412 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000413 {
414 auto intData = constTensorPtr->data_as_IntData()->data();
415 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
416 return armnn::ConstTensor(tensorInfo, intData->data());
417 }
Mike Kellya0766c32019-02-19 17:22:07 +0000418 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000419 {
420 auto longData = constTensorPtr->data_as_LongData()->data();
421 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
422 return armnn::ConstTensor(tensorInfo, longData->data());
423 }
Mike Kellya0766c32019-02-19 17:22:07 +0000424 default:
425 {
426 CheckLocation location = CHECK_LOCATION();
427 throw ParseException(
428 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
429 constTensorPtr->data_type() %
430 EnumNameConstTensorData(constTensorPtr->data_type()) %
431 location.AsString()));
432 }
433 }
434}
435
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000436Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000437{
438
439 CHECK_GRAPH(graphPtr, 0);
440 const auto& numInputs = graphPtr->inputIds()->size();
441
442 LayerBaseRawPtrVector result(numInputs);
443
444 for (unsigned int i=0; i<numInputs; ++i)
445 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000446 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000447 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
448 }
449 return result;
450}
451
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000452Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000453{
454 CHECK_GRAPH(graphPtr, 0);
455 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000456 LayerBaseRawPtrVector result(numOutputs);
457
458 for (unsigned int i=0; i<numOutputs; ++i)
459 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000460 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000461
Kevin May43a799c2019-02-08 16:31:42 +0000462 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
463 }
464 return result;
465}
466
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000467Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000468 unsigned int layerIndex)
469{
470 CHECK_LAYERS(graphPtr, 0, layerIndex);
471 auto layer = GetBaseLayer(graphPtr, layerIndex);
472 const auto& numInputs = layer->inputSlots()->size();
473
474 TensorRawPtrVector result(numInputs);
475
476 for (unsigned int i=0; i<numInputs; ++i)
477 {
478 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
479 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
480 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
481 }
482 return result;
483}
484
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000485Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000486 unsigned int layerIndex)
487{
488 CHECK_LAYERS(graphPtr, 0, layerIndex);
489 auto layer = GetBaseLayer(graphPtr, layerIndex);
490 const auto& numOutputs = layer->outputSlots()->size();
491
492 TensorRawPtrVector result(numOutputs);
493
494 for (unsigned int i=0; i<numOutputs; ++i)
495 {
496 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
497 }
498 return result;
499}
500
Derek Lamberti8ddae332019-02-21 16:29:43 +0000501void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000502{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000503 CHECK_LAYERS(graph, 0, layerIndex);
504 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000505 throw ParseException(
506 boost::str(
507 boost::format("Layer not supported. "
508 "layerIndex: %1% "
509 "layerName: %2% / %3%") %
510 layerIndex %
511 layerName %
512 CHECK_LOCATION().AsString()));
513}
514
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000515void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000516{
517 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000518 m_InputBindings.clear();
519 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000520}
521
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000522IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000523{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000524 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000525}
526
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000527IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000528{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000529 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000530}
531
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000532void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000533{
534 delete parser;
535}
536
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000537INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000538{
539 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000540 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
541 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000542}
543
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000544armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000545{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000546 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000547 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
548 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
549 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000550}
551
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000552Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000553{
554 if (binaryContent == nullptr)
555 {
556 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
557 CHECK_LOCATION().AsString()));
558 }
559 flatbuffers::Verifier verifier(binaryContent, len);
560 if (verifier.VerifyBuffer<SerializedGraph>() == false)
561 {
562 throw ParseException(
563 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
564 "flatbuffers format. size:%1% %2%") %
565 len %
566 CHECK_LOCATION().AsString()));
567 }
568 return GetSerializedGraph(binaryContent);
569}
570
Derek Lamberti8ddae332019-02-21 16:29:43 +0000571INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000572{
573 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000574 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000575 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000576 m_GraphConnections.emplace_back(graph->layers()->size());
577 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000578 {
579 if (layer->layer_type() != Layer_InputLayer &&
580 layer->layer_type() != Layer_OutputLayer)
581 {
582 // lookup and call the parser function
583 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000584 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000585 }
586 ++layerIndex;
587 }
588
Derek Lamberti8ddae332019-02-21 16:29:43 +0000589 SetupInputLayers(graph);
590 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000591
592 // establish the connections from the layer outputs to the inputs of the subsequent layers
593 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
594 {
595 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
596 {
597 for (size_t inputSlotIdx = 0;
598 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
599 ++inputSlotIdx)
600 {
601 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
602 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
603 }
604 }
605 }
606
607 return std::move(m_Network);
608}
609
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000610BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000611 const std::string& name) const
612{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000613 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000614 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000615 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000616 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000617 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000618 }
619 }
620 throw ParseException(
621 boost::str(
622 boost::format("No input binding found for layer:%1% / %2%") %
623 name %
624 CHECK_LOCATION().AsString()));
625}
626
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000627BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000628 const std::string& name) const
629{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000630 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000631 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000632 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000633 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000634 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000635 }
636 }
637 throw ParseException(
638 boost::str(
639 boost::format("No output binding found for layer:%1% / %2%") %
640 name %
641 CHECK_LOCATION().AsString()));
642}
643
Derek Lamberti8ddae332019-02-21 16:29:43 +0000644void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000645{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000646 CHECK_GRAPH(graph, 0);
647 auto inputs = GetGraphInputs(graph);
648 m_InputBindings.clear();
649 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000650 for (auto const& input : inputs)
651 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000652 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000653 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000654 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000655
656 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
657 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
658
Derek Lamberti8ddae332019-02-21 16:29:43 +0000659 RegisterOutputSlots(graph, input->index(), layer);
660
661 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
662 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
663 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000664 }
665}
666
Derek Lamberti8ddae332019-02-21 16:29:43 +0000667void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000668{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000669 CHECK_GRAPH(graph, 0);
670 auto outputs = GetGraphOutputs(graph);
671 m_OutputBindings.clear();
672 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000673 for (auto const& output : outputs)
674 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000675 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000676 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000677 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000678
Derek Lamberti8ddae332019-02-21 16:29:43 +0000679 RegisterInputSlots(graph, output->index(), layer);
680
681 auto baseLayer = GetBaseLayer(graph, output->index());
682 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
683 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
684 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
685
686 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
687 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
688 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000689 }
690}
691
Derek Lamberti8ddae332019-02-21 16:29:43 +0000692void Deserializer::RegisterOutputSlots(GraphPtr graph,
693 uint32_t layerIndex,
694 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000695{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000696 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000697 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000698 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000699 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
700 {
701 throw ParseException(
702 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
703 " for layer index: %3% %4%") %
704 parsedLayer->outputSlots()->size() %
705 layer->GetNumOutputSlots() %
706 layerIndex %
707 CHECK_LOCATION().AsString()));
708 }
709
710 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
711 {
712 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
713 RegisterOutputSlotOfConnection(layerIndex, slot);
714 }
715}
716
Derek Lamberti8ddae332019-02-21 16:29:43 +0000717void Deserializer::RegisterInputSlots(GraphPtr graph,
718 uint32_t layerIndex,
719 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000720{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000721 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000722 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000723 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000724 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
725 {
726 throw ParseException(
727 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
728 " for layer index:%3% %4%") %
729 parsedLayer->inputSlots()->size() %
730 layer->GetNumInputSlots() %
731 layerIndex %
732 CHECK_LOCATION().AsString()));
733 }
734
735 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
736 {
737 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
738 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
739 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
740 }
741}
742
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000743void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000744 armnn::IInputSlot* slot)
745{
746 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
747
748 Slots& slots = m_GraphConnections[0][connectionIndex];
749 slots.inputSlots.push_back(slot);
750}
751
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000752void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000753 armnn::IOutputSlot* slot)
754{
755 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
756
757 Slots& slots = m_GraphConnections[0][connectionIndex];
758
759 // assuming there is only one producer for that tensor
760 if (slots.outputSlot != nullptr)
761 {
762 throw ParseException(boost::str(
763 boost::format("Another layer has already registered itself as the producer of "
764 "connection:%1% / %2%") %
765 connectionIndex %
766 CHECK_LOCATION().AsString()));
767 }
768
769 slots.outputSlot = slot;
770}
771
Derek Lamberti8ddae332019-02-21 16:29:43 +0000772void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000773{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000774 CHECK_LAYERS(graph, 0, layerIndex);
775 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000776 CHECK_LOCATION();
777 CHECK_VALID_SIZE(inputs.size(), 1);
778
Derek Lamberti8ddae332019-02-21 16:29:43 +0000779 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000780 CHECK_VALID_SIZE(outputs.size(), 1);
781
Derek Lamberti8ddae332019-02-21 16:29:43 +0000782 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000783 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000784 auto serializerDescriptor = serializerLayer->descriptor();
785
786 armnn::ActivationDescriptor descriptor;
787 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
788 descriptor.m_A = serializerDescriptor->a();
789 descriptor.m_B = serializerDescriptor->b();
790
791 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
792 layerName.c_str());
793 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
794 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
795
Derek Lamberti8ddae332019-02-21 16:29:43 +0000796 RegisterInputSlots(graph, layerIndex, layer);
797 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000798}
799
Derek Lamberti8ddae332019-02-21 16:29:43 +0000800void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000801{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000802 CHECK_LAYERS(graph, 0, layerIndex);
803 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000804 CHECK_LOCATION();
805 CHECK_VALID_SIZE(inputs.size(), 2);
806
Derek Lamberti8ddae332019-02-21 16:29:43 +0000807 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000808 CHECK_VALID_SIZE(outputs.size(), 1);
809
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000810 auto layerName = GetLayerName(graph, layerIndex);
811 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000812
813 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
814 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
815
Derek Lamberti8ddae332019-02-21 16:29:43 +0000816 RegisterInputSlots(graph, layerIndex, layer);
817 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000818}
819
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000820void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
821{
822 CHECK_LAYERS(graph, 0, layerIndex);
823
824 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
825 CHECK_VALID_SIZE(inputs.size(), 1);
826
827 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
828 CHECK_VALID_SIZE(outputs.size(), 1);
829
830 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
831 auto flatBufferCrops = flatBufferDescriptor->crops();
832 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
833
834 if (flatBufferCrops->Length() % 2 != 0)
835 {
836 throw ParseException(boost::str(
837 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
838 }
839
840 std::vector<std::pair<unsigned int, unsigned int>> crops;
841 crops.reserve(flatBufferCrops->Length() / 2);
842 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
843 {
844 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
845 }
846
847 armnn::BatchToSpaceNdDescriptor descriptor;
848 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
849 descriptor.m_BlockShape =
850 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
851 descriptor.m_Crops = crops;
852
853 auto layerName = GetLayerName(graph, layerIndex);
854 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
855
856 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
857 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
858
859 RegisterInputSlots(graph, layerIndex, layer);
860 RegisterOutputSlots(graph, layerIndex, layer);
861}
862
ruoyan018e7fa232019-02-28 15:09:07 +0000863void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
864{
865 CHECK_LAYERS(graph, 0, layerIndex);
866
867 auto inputs = GetInputs(graph, layerIndex);
868 CHECK_VALID_SIZE(inputs.size(), 1);
869
870 auto outputs = GetOutputs(graph, layerIndex);
871 CHECK_VALID_SIZE(outputs.size(), 1);
872 auto outputInfo = ToTensorInfo(outputs[0]);
873
874 auto layerName = boost::str(boost::format("BatchNormalization:%1%") % layerIndex);
875
876 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
877 auto serializerDescriptor = serializerLayer->descriptor();
878
879 armnn::BatchNormalizationDescriptor descriptor;
880 descriptor.m_Eps = serializerDescriptor->eps();
881 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
882
883 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
884 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
885 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
886 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
887
888 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
889 mean,
890 variance,
891 beta,
892 gamma,
893 layerName.c_str());
894 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
895
896 RegisterInputSlots(graph, layerIndex, layer);
897 RegisterOutputSlots(graph, layerIndex, layer);
898}
899
Conor Kennedy76277882019-02-26 08:29:54 +0000900void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
901{
902 CHECK_LAYERS(graph, 0, layerIndex);
903 CHECK_LOCATION();
904
905 auto outputs = GetOutputs(graph, layerIndex);
906 CHECK_VALID_SIZE(outputs.size(), 1);
907
908 auto layerName = GetLayerName(graph, layerIndex);
909
910 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
911 auto serializerInput = serializerLayer->input();
912
913 armnn::ConstTensor input = ToConstTensor(serializerInput);
914
915 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
916
917 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
918 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
919
920 RegisterOutputSlots(graph, layerIndex, layer);
921}
922
Derek Lamberti8ddae332019-02-21 16:29:43 +0000923void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000924{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000925 CHECK_LAYERS(graph, 0, layerIndex);
926 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000927 CHECK_LOCATION();
928 CHECK_VALID_SIZE(inputs.size(), 1);
929
Derek Lamberti8ddae332019-02-21 16:29:43 +0000930 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000931 CHECK_VALID_SIZE(outputs.size(), 1);
932
Derek Lamberti8ddae332019-02-21 16:29:43 +0000933 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000934 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000935 auto serializerDescriptor = serializerLayer->descriptor();
936
937 armnn::Convolution2dDescriptor descriptor;
938 descriptor.m_PadLeft = serializerDescriptor->padLeft();
939 descriptor.m_PadRight = serializerDescriptor->padRight();
940 descriptor.m_PadTop = serializerDescriptor->padTop();
941 descriptor.m_PadBottom = serializerDescriptor->padBottom();
942 descriptor.m_StrideX = serializerDescriptor->strideX();
943 descriptor.m_StrideY = serializerDescriptor->strideY();;
944 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
945 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
946
947 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
948 armnn::ConstTensor biases;
949
950 if (descriptor.m_BiasEnabled)
951 {
952 biases = ToConstTensor(serializerLayer->biases());
953 }
954 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
955 weights,
956 biases,
957 layerName.c_str());
958 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
959 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
960
Derek Lamberti8ddae332019-02-21 16:29:43 +0000961 RegisterInputSlots(graph, layerIndex, layer);
962 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000963}
964
Derek Lamberti8ddae332019-02-21 16:29:43 +0000965void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000966{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000967 CHECK_LAYERS(graph, 0, layerIndex);
968 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000969 CHECK_LOCATION();
970 CHECK_VALID_SIZE(inputs.size(), 1);
971
Derek Lamberti8ddae332019-02-21 16:29:43 +0000972 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000973 CHECK_VALID_SIZE(outputs.size(), 1);
974
Derek Lamberti8ddae332019-02-21 16:29:43 +0000975 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000976 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000977 auto serializerDescriptor = serializerLayer->descriptor();
978
979 armnn::DepthwiseConvolution2dDescriptor descriptor;
980 descriptor.m_PadLeft = serializerDescriptor->padLeft();
981 descriptor.m_PadRight = serializerDescriptor->padRight();
982 descriptor.m_PadTop = serializerDescriptor->padTop();
983 descriptor.m_PadBottom = serializerDescriptor->padBottom();
984 descriptor.m_StrideX = serializerDescriptor->strideX();
985 descriptor.m_StrideY = serializerDescriptor->strideY();;
986 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
987 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
988
989 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
990 armnn::ConstTensor biases;
991
992 if (descriptor.m_BiasEnabled)
993 {
994 biases = ToConstTensor(serializerLayer->biases());
995 }
996 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
997 weights,
998 biases,
999 layerName.c_str());
1000
1001 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1002 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1003
Derek Lamberti8ddae332019-02-21 16:29:43 +00001004 RegisterInputSlots(graph, layerIndex, layer);
1005 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001006}
1007
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001008void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1009{
1010 CHECK_LAYERS(graph, 0, layerIndex);
1011 auto inputs = GetInputs(graph, layerIndex);
1012 CHECK_LOCATION();
1013 CHECK_VALID_SIZE(inputs.size(), 2);
1014
1015 auto outputs = GetOutputs(graph, layerIndex);
1016 CHECK_VALID_SIZE(outputs.size(), 1);
1017
1018 auto layerName = GetLayerName(graph, layerIndex);
1019 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1020
1021 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1022 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1023
1024 RegisterInputSlots(graph, layerIndex, layer);
1025 RegisterOutputSlots(graph, layerIndex, layer);
1026}
1027
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001028void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1029{
1030 CHECK_LAYERS(graph, 0, layerIndex);
1031 auto inputs = GetInputs(graph, layerIndex);
1032 CHECK_LOCATION();
1033 CHECK_VALID_SIZE(inputs.size(), 2);
1034
1035 auto outputs = GetOutputs(graph, layerIndex);
1036 CHECK_VALID_SIZE(outputs.size(), 1);
1037
1038 auto layerName = GetLayerName(graph, layerIndex);
1039 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1040
1041 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1042 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1043
1044 RegisterInputSlots(graph, layerIndex, layer);
1045 RegisterOutputSlots(graph, layerIndex, layer);
1046}
1047
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001048void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1049{
1050 CHECK_LAYERS(graph, 0, layerIndex);
1051 auto inputs = GetInputs(graph, layerIndex);
1052 CHECK_LOCATION();
1053 CHECK_VALID_SIZE(inputs.size(), 2);
1054
1055 auto outputs = GetOutputs(graph, layerIndex);
1056 CHECK_VALID_SIZE(outputs.size(), 1);
1057
1058 auto layerName = GetLayerName(graph, layerIndex);
1059 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1060
1061 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1062 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1063
1064 RegisterInputSlots(graph, layerIndex, layer);
1065 RegisterOutputSlots(graph, layerIndex, layer);
1066}
1067
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001068void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1069{
1070 CHECK_LAYERS(graph, 0, layerIndex);
1071 auto inputs = GetInputs(graph, layerIndex);
1072 CHECK_LOCATION();
1073 CHECK_VALID_SIZE(inputs.size(), 2);
1074
1075 auto outputs = GetOutputs(graph, layerIndex);
1076 CHECK_VALID_SIZE(outputs.size(), 1);
1077
1078 auto layerName = GetLayerName(graph, layerIndex);
1079 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1080
1081 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1082 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1083
1084 RegisterInputSlots(graph, layerIndex, layer);
1085 RegisterOutputSlots(graph, layerIndex, layer);
1086}
1087
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001088void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1089{
1090 CHECK_LAYERS(graph, 0, layerIndex);
1091 auto inputs = GetInputs(graph, layerIndex);
1092 CHECK_LOCATION();
1093 CHECK_VALID_SIZE(inputs.size(), 2);
1094
1095 auto outputs = GetOutputs(graph, layerIndex);
1096 CHECK_VALID_SIZE(outputs.size(), 1);
1097
1098 auto layerName = GetLayerName(graph, layerIndex);
1099 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1100
1101 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1102 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1103
1104 RegisterInputSlots(graph, layerIndex, layer);
1105 RegisterOutputSlots(graph, layerIndex, layer);
1106}
1107
Derek Lamberti8ddae332019-02-21 16:29:43 +00001108void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001109{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001110 CHECK_LAYERS(graph, 0, layerIndex);
1111 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001112 CHECK_LOCATION();
1113 CHECK_VALID_SIZE(inputs.size(), 2);
1114
Derek Lamberti8ddae332019-02-21 16:29:43 +00001115 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001116 CHECK_VALID_SIZE(outputs.size(), 1);
1117
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001118 auto layerName = GetLayerName(graph, layerIndex);
1119 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001120
1121 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1122 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1123
Derek Lamberti8ddae332019-02-21 16:29:43 +00001124 RegisterInputSlots(graph, layerIndex, layer);
1125 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001126}
1127
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001128void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1129{
1130 CHECK_LAYERS(graph, 0, layerIndex);
1131 CHECK_LOCATION();
1132
1133 auto inputs = GetInputs(graph, layerIndex);
1134 CHECK_VALID_SIZE(inputs.size(), 1);
1135
1136 auto outputs = GetOutputs(graph, layerIndex);
1137 CHECK_VALID_SIZE(outputs.size(), 1);
1138
1139 auto layerName = GetLayerName(graph, layerIndex);
1140
1141 armnn::IConnectableLayer* layer;
1142
1143 layer = m_Network->AddFloorLayer();
1144
1145 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1146 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1147
1148 RegisterInputSlots(graph, layerIndex, layer);
1149 RegisterOutputSlots(graph, layerIndex, layer);
1150}
1151
Derek Lamberti8ddae332019-02-21 16:29:43 +00001152void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001153{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001154 CHECK_LAYERS(graph, 0, layerIndex);
1155 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001156 CHECK_LOCATION();
1157 CHECK_VALID_SIZE(inputs.size(), 1);
1158
Derek Lamberti8ddae332019-02-21 16:29:43 +00001159 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001160 CHECK_VALID_SIZE(outputs.size(), 1);
1161
Derek Lamberti8ddae332019-02-21 16:29:43 +00001162 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001163 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001164 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1165
1166 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1167 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1168 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1169
1170 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1171
1172 armnn::IConnectableLayer* layer;
1173 if (flatBufferDescriptor->biasEnabled())
1174 {
1175 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1176 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1177 weightsTensor,
1178 biasTensorData,
1179 layerName.c_str());
1180 }
1181 else
1182 {
1183 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1184 weightsTensor,
1185 layerName.c_str());
1186 }
1187
1188 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1189 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1190
Derek Lamberti8ddae332019-02-21 16:29:43 +00001191 RegisterInputSlots(graph, layerIndex, layer);
1192 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001193}
1194
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001195void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1196{
1197 CHECK_LAYERS(graph, 0, layerIndex);
1198
1199 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1200 CHECK_VALID_SIZE(inputs.size(), 1);
1201
1202 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1203 CHECK_VALID_SIZE(outputs.size(), 1);
1204
1205 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1206 auto flatBufferPadList = flatBufferDescriptor->padList();
1207
1208 if (flatBufferPadList->Length() % 2 != 0)
1209 {
1210 throw ParseException(boost::str(
1211 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1212 }
1213
1214 std::vector<std::pair<unsigned int, unsigned int>> padList;
1215 padList.reserve(flatBufferPadList->Length() / 2);
1216 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1217 {
1218 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1219 }
1220
1221 armnn::PadDescriptor descriptor(padList);
1222
1223 auto layerName = GetLayerName(graph, layerIndex);
1224 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1225
1226 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1227 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1228
1229 RegisterInputSlots(graph, layerIndex, layer);
1230 RegisterOutputSlots(graph, layerIndex, layer);
1231}
1232
Derek Lamberti8ddae332019-02-21 16:29:43 +00001233void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001234{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001235 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001236
1237 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001238 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001239
Derek Lamberti8ddae332019-02-21 16:29:43 +00001240 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001241 CHECK_VALID_SIZE(inputs.size(), 1);
1242
Derek Lamberti8ddae332019-02-21 16:29:43 +00001243 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001244 CHECK_VALID_SIZE(outputs.size(), 1);
1245 auto outputInfo = ToTensorInfo(outputs[0]);
1246
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001247 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001248 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1249
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001250 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001251 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1252
Derek Lamberti8ddae332019-02-21 16:29:43 +00001253 RegisterInputSlots(graph, layerIndex, layer);
1254 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001255}
1256
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001257armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001258 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001259{
1260 armnn::Pooling2dDescriptor desc;
1261
1262 switch (pooling2dDesc->poolType())
1263 {
1264 case PoolingAlgorithm_Average:
1265 {
1266 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001267 break;
1268 }
1269 case PoolingAlgorithm_Max:
1270 {
1271 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001272 break;
1273 }
1274 default:
1275 {
1276 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1277 }
1278 }
1279
1280 switch (pooling2dDesc->outputShapeRounding())
1281 {
1282 case OutputShapeRounding_Floor:
1283 {
1284 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1285 break;
1286 }
1287 case OutputShapeRounding_Ceiling:
1288 {
1289 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1290 break;
1291 }
1292 default:
1293 {
1294 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1295 }
1296 }
1297
1298 switch (pooling2dDesc->paddingMethod())
1299 {
1300 case PaddingMethod_Exclude:
1301 {
1302 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1303 break;
1304 }
1305 case PaddingMethod_IgnoreValue:
1306 {
1307 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1308 break;
1309 }
1310 default:
1311 {
1312 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1313 }
1314 }
1315
1316 switch (pooling2dDesc->dataLayout())
1317 {
1318 case DataLayout_NCHW:
1319 {
1320 desc.m_DataLayout = armnn::DataLayout::NCHW;
1321 break;
1322 }
1323 case DataLayout_NHWC:
1324 {
1325 desc.m_DataLayout = armnn::DataLayout::NHWC;
1326 break;
1327 }
1328 default:
1329 {
1330 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1331 }
1332 }
1333
1334 desc.m_PadRight = pooling2dDesc->padRight();
1335 desc.m_PadLeft = pooling2dDesc->padLeft();
1336 desc.m_PadBottom = pooling2dDesc->padBottom();
1337 desc.m_PadTop = pooling2dDesc->padTop();
1338 desc.m_StrideX = pooling2dDesc->strideX();
1339 desc.m_StrideY = pooling2dDesc->strideY();
1340 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1341 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1342
1343 return desc;
1344}
1345
Derek Lamberti8ddae332019-02-21 16:29:43 +00001346void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001347{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001348 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001349
Derek Lamberti8ddae332019-02-21 16:29:43 +00001350 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001351 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001352 CHECK_VALID_SIZE(inputs.size(), 1);
1353
Derek Lamberti8ddae332019-02-21 16:29:43 +00001354 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001355 CHECK_VALID_SIZE(outputs.size(), 1);
1356 auto outputInfo = ToTensorInfo(outputs[0]);
1357
1358 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001359 auto layerName = GetLayerName(graph, layerIndex);
1360 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001361 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1362
Derek Lamberti8ddae332019-02-21 16:29:43 +00001363 RegisterInputSlots(graph, layerIndex, layer);
1364 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001365}
1366
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001367armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001368 const std::vector<uint32_t>& targetDimsIn)
1369{
1370 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1371 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1372
1373 if (stretchDim != targetDimsIn.end())
1374 {
1375 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1376 {
1377 throw ParseException(boost::str(
1378 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1379 }
1380
1381 auto targetNumElements =
1382 boost::numeric_cast<unsigned int>(
1383 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1384
1385 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1386 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1387 }
1388
1389 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1390
1391 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1392 reshapeInfo.SetShape(outputShape);
1393
1394 return reshapeInfo;
1395}
1396
Derek Lamberti8ddae332019-02-21 16:29:43 +00001397void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001398{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001399 CHECK_LAYERS(graph, 0, layerIndex);
1400 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001401
Derek Lamberti8ddae332019-02-21 16:29:43 +00001402 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001403 CHECK_VALID_SIZE(outputs.size(), 1);
1404
1405 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1406 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1407
Derek Lamberti8ddae332019-02-21 16:29:43 +00001408 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001409 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1410
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001411 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001412 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1413
1414 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1415 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1416
1417 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1418 {
1419 std::stringstream ss;
1420 ss << "New shape defined in reshape parameters "
1421 << reshapeOutputTensorShape
1422 << " does not equal output shape "
1423 << actualOutputTensorInfo.GetShape()
1424 << ": "
1425 << CHECK_LOCATION().AsString();
1426 throw ParseException(ss.str());
1427 }
1428
1429 armnn::ReshapeDescriptor reshapeDesc;
1430 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1431
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001432 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001433 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1434 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1435
Derek Lamberti8ddae332019-02-21 16:29:43 +00001436 RegisterInputSlots(graph, layerIndex, layer);
1437 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001438}
1439
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001440void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1441{
1442 CHECK_LAYERS(graph, 0, layerIndex);
1443
1444 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1445 CHECK_VALID_SIZE(inputs.size(), 1);
1446
1447 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1448 CHECK_VALID_SIZE(outputs.size(), 1);
1449
1450 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1451
1452 armnn::ResizeBilinearDescriptor descriptor;
1453 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1454 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1455 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1456
1457 auto layerName = GetLayerName(graph, layerIndex);
1458 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1459
1460 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1461 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1462
1463 RegisterInputSlots(graph, layerIndex, layer);
1464 RegisterOutputSlots(graph, layerIndex, layer);
1465}
1466
Derek Lamberti8ddae332019-02-21 16:29:43 +00001467void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001468{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001469 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001470
Derek Lamberti8ddae332019-02-21 16:29:43 +00001471 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001472 CHECK_VALID_SIZE(inputs.size(), 1);
1473
Derek Lamberti8ddae332019-02-21 16:29:43 +00001474 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001475 CHECK_VALID_SIZE(outputs.size(), 1);
1476
1477 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001478 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001479 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001480
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001481 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1482
1483 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1484 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1485
Derek Lamberti8ddae332019-02-21 16:29:43 +00001486 RegisterInputSlots(graph, layerIndex, layer);
1487 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001488}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001489
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001490void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1491{
1492 CHECK_LAYERS(graph, 0, layerIndex);
1493
1494 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1495 CHECK_VALID_SIZE(inputs.size(), 1);
1496
1497 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1498 CHECK_VALID_SIZE(outputs.size(), 1);
1499
1500 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1501 auto flatBufferPadList = flatBufferDescriptor->padList();
1502 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1503
1504 if (flatBufferPadList->Length() % 2 != 0)
1505 {
1506 throw ParseException(boost::str(
1507 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1508 }
1509
1510 std::vector<std::pair<unsigned int, unsigned int>> padList;
1511 padList.reserve(flatBufferPadList->Length() / 2);
1512 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1513 {
1514 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1515 }
1516
1517 armnn::SpaceToBatchNdDescriptor descriptor;
1518 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1519 descriptor.m_BlockShape =
1520 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1521 descriptor.m_PadList = padList;
1522
1523 auto layerName = GetLayerName(graph, layerIndex);
1524 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1525
1526 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1527 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1528
1529 RegisterInputSlots(graph, layerIndex, layer);
1530 RegisterOutputSlots(graph, layerIndex, layer);
1531}
1532
Nina Drozd57728782019-02-27 10:53:27 +00001533armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1534 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1535 unsigned int layerIndex)
1536{
1537 armnn::NormalizationDescriptor desc;
1538
1539 switch (normalizationDescriptor->normChannelType())
1540 {
1541 case NormalizationAlgorithmChannel_Across:
1542 {
1543 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1544 break;
1545 }
1546 case NormalizationAlgorithmChannel_Within:
1547 {
1548 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1549 break;
1550 }
1551 default:
1552 {
1553 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1554 }
1555 }
1556
1557 switch (normalizationDescriptor->normMethodType())
1558 {
1559 case NormalizationAlgorithmMethod_LocalBrightness:
1560 {
1561 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1562 break;
1563 }
1564 case NormalizationAlgorithmMethod_LocalContrast:
1565 {
1566 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1567 break;
1568 }
1569 default:
1570 {
1571 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1572 }
1573 }
1574
1575 switch (normalizationDescriptor->dataLayout())
1576 {
1577 case DataLayout_NCHW:
1578 {
1579 desc.m_DataLayout = armnn::DataLayout::NCHW;
1580 break;
1581 }
1582 case DataLayout_NHWC:
1583 {
1584 desc.m_DataLayout = armnn::DataLayout::NHWC;
1585 break;
1586 }
1587 default:
1588 {
1589 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1590 }
1591 }
1592
1593 desc.m_Alpha = normalizationDescriptor->alpha();
1594 desc.m_Beta = normalizationDescriptor->beta();
1595 desc.m_K = normalizationDescriptor->k();
1596 desc.m_NormSize = normalizationDescriptor->normSize();
1597
1598 return desc;
1599}
1600
1601void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1602{
1603 CHECK_LAYERS(graph, 0, layerIndex);
1604
1605 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1606
1607 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1608 CHECK_VALID_SIZE(inputs.size(), 1);
1609
1610 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1611 CHECK_VALID_SIZE(outputs.size(), 1);
1612
1613 auto outputInfo = ToTensorInfo(outputs[0]);
1614
1615 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1616 auto layerName = GetLayerName(graph, layerIndex);
1617
1618 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1619 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1620
1621 RegisterInputSlots(graph, layerIndex, layer);
1622 RegisterOutputSlots(graph, layerIndex, layer);
1623}
1624
Sadik Armagan8b42a382019-03-01 14:24:49 +00001625void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1626{
1627 CHECK_LAYERS(graph, 0, layerIndex);
1628 auto inputs = GetInputs(graph, layerIndex);
1629 CHECK_LOCATION();
1630 CHECK_VALID_SIZE(inputs.size(), 1);
1631
1632 auto outputs = GetOutputs(graph, layerIndex);
1633 CHECK_VALID_SIZE(outputs.size(), 1);
1634
1635 auto layerName = GetLayerName(graph, layerIndex);
1636 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1637
1638 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1639 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1640
1641 RegisterInputSlots(graph, layerIndex, layer);
1642 RegisterOutputSlots(graph, layerIndex, layer);
1643}
1644
Conor Kennedyda1f9752019-03-01 14:37:12 +00001645void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1646{
1647 CHECK_LAYERS(graph, 0, layerIndex);
1648 auto inputs = GetInputs(graph, layerIndex);
1649 CHECK_LOCATION();
1650 CHECK_VALID_SIZE(inputs.size(), 2);
1651
1652 auto outputs = GetOutputs(graph, layerIndex);
1653 CHECK_VALID_SIZE(outputs.size(), 1);
1654
1655 auto layerName = GetLayerName(graph, layerIndex);
1656 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1657
1658 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1659 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1660
1661 RegisterInputSlots(graph, layerIndex, layer);
1662 RegisterOutputSlots(graph, layerIndex, layer);
1663}
1664
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001665} // namespace armnnDeserializer