blob: b263c3a769a26139fd5cd3695efd551c41458122 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Conor Kennedy76277882019-02-26 08:29:54 +0000190 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000191 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
192 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000193 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000195 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000196 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
197 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
198 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Kevin May43a799c2019-02-08 16:31:42 +0000199}
200
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000201Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000202{
203 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
204
205 switch(layerType)
206 {
Mike Kellyaf484012019-02-20 16:53:11 +0000207 case Layer::Layer_ActivationLayer:
208 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000209 case Layer::Layer_AdditionLayer:
210 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000211 case Layer::Layer_ConstantLayer:
212 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000213 case Layer::Layer_Convolution2dLayer:
214 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000215 case Layer::Layer_DepthwiseConvolution2dLayer:
216 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000217 case Layer::Layer_FullyConnectedLayer:
218 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000219 case Layer::Layer_InputLayer:
220 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000221 case Layer::Layer_MultiplicationLayer:
222 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000223 case Layer::Layer_OutputLayer:
224 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000225 case Layer::Layer_PermuteLayer:
226 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000227 case Layer::Layer_Pooling2dLayer:
228 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000229 case Layer::Layer_ReshapeLayer:
230 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000231 case Layer::Layer_SoftmaxLayer:
232 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000233 case Layer::Layer_NONE:
234 default:
235 throw ParseException(boost::str(
236 boost::format("Layer must have a type %1%") %
237 Layer::Layer_NONE));
238 }
239}
240
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000241std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
242{
243 auto layer = GetBaseLayer(graph, index);
244 assert(layer);
245 return layer->layerName()->str();
246}
247
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000248int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000249{
250 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
251
252 if (layerType == Layer::Layer_InputLayer)
253 {
254 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
255 }
256 else if ( layerType == Layer::Layer_OutputLayer )
257 {
258 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
259 }
260 return 0;
261}
262
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000263armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000264{
265 switch (dataLayout)
266 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000267 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000268 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000269 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000270 default:
271 return armnn::DataLayout::NCHW;
272 }
273}
274
Mike Kellyaf484012019-02-20 16:53:11 +0000275armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
276{
277 switch (function)
278 {
279 case armnnSerializer::ActivationFunction_Sigmoid:
280 return armnn::ActivationFunction::Sigmoid;
281 case armnnSerializer::ActivationFunction_TanH:
282 return armnn::ActivationFunction::TanH;
283 case armnnSerializer::ActivationFunction_Linear:
284 return armnn::ActivationFunction::Linear;
285 case armnnSerializer::ActivationFunction_ReLu:
286 return armnn::ActivationFunction::ReLu;
287 case armnnSerializer::ActivationFunction_BoundedReLu:
288 return armnn::ActivationFunction::BoundedReLu;
289 case armnnSerializer::ActivationFunction_LeakyReLu:
290 return armnn::ActivationFunction::LeakyReLu;
291 case armnnSerializer::ActivationFunction_Abs:
292 return armnn::ActivationFunction::Abs;
293 case armnnSerializer::ActivationFunction_Sqrt:
294 return armnn::ActivationFunction::Sqrt;
295 case armnnSerializer::ActivationFunction_Square:
296 return armnn::ActivationFunction::Square;
297 default:
298 return armnn::ActivationFunction::Sigmoid;
299 }
300}
301
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000302armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000303{
304 armnn::DataType type;
305 CHECK_TENSOR_PTR(tensorPtr);
306
307 switch (tensorPtr->dataType())
308 {
309 case DataType_QuantisedAsymm8:
310 type = armnn::DataType::QuantisedAsymm8;
311 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000312 case DataType_Signed32:
313 type = armnn::DataType::Signed32;
314 break;
Kevin May43a799c2019-02-08 16:31:42 +0000315 case DataType_Float32:
316 type = armnn::DataType::Float32;
317 break;
318 case DataType_Float16:
319 type = armnn::DataType::Float16;
320 break;
321 case DataType_Boolean:
322 type = armnn::DataType::Boolean;
323 break;
324 default:
325 {
326 CheckLocation location = CHECK_LOCATION();
327 throw ParseException(
328 boost::str(
329 boost::format("Unsupported data type %1% = %2%. %3%") %
330 tensorPtr->dataType() %
331 EnumNameDataType(tensorPtr->dataType()) %
332 location.AsString()));
333 }
334 }
335 float quantizationScale = tensorPtr->quantizationScale();
336 int32_t quantizationOffset = tensorPtr->quantizationOffset();
337
338 auto dimensions = tensorPtr->dimensions();
339 unsigned int size = dimensions->size();
340 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
341
342 // two statements (on purpose) for easier debugging:
343 armnn::TensorInfo result(size,
344 outputDims.data(),
345 type,
346 quantizationScale,
347 quantizationOffset);
348 return result;
349}
350
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000351armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000352{
353 CHECK_CONST_TENSOR_PTR(constTensorPtr);
354 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
355
356 switch (constTensorPtr->data_type())
357 {
358 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000359 {
360 auto byteData = constTensorPtr->data_as_ByteData()->data();
361 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
362 return armnn::ConstTensor(tensorInfo, byteData->data());
363 }
Mike Kellya0766c32019-02-19 17:22:07 +0000364 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000365 {
366 auto shortData = constTensorPtr->data_as_ShortData()->data();
367 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
368 return armnn::ConstTensor(tensorInfo, shortData->data());
369 }
Mike Kellya0766c32019-02-19 17:22:07 +0000370 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000371 {
372 auto intData = constTensorPtr->data_as_IntData()->data();
373 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
374 return armnn::ConstTensor(tensorInfo, intData->data());
375 }
Mike Kellya0766c32019-02-19 17:22:07 +0000376 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000377 {
378 auto longData = constTensorPtr->data_as_LongData()->data();
379 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
380 return armnn::ConstTensor(tensorInfo, longData->data());
381 }
Mike Kellya0766c32019-02-19 17:22:07 +0000382 default:
383 {
384 CheckLocation location = CHECK_LOCATION();
385 throw ParseException(
386 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
387 constTensorPtr->data_type() %
388 EnumNameConstTensorData(constTensorPtr->data_type()) %
389 location.AsString()));
390 }
391 }
392}
393
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000394Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000395{
396
397 CHECK_GRAPH(graphPtr, 0);
398 const auto& numInputs = graphPtr->inputIds()->size();
399
400 LayerBaseRawPtrVector result(numInputs);
401
402 for (unsigned int i=0; i<numInputs; ++i)
403 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000404 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000405 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
406 }
407 return result;
408}
409
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000410Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000411{
412 CHECK_GRAPH(graphPtr, 0);
413 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000414 LayerBaseRawPtrVector result(numOutputs);
415
416 for (unsigned int i=0; i<numOutputs; ++i)
417 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000418 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000419
Kevin May43a799c2019-02-08 16:31:42 +0000420 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
421 }
422 return result;
423}
424
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000425Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000426 unsigned int layerIndex)
427{
428 CHECK_LAYERS(graphPtr, 0, layerIndex);
429 auto layer = GetBaseLayer(graphPtr, layerIndex);
430 const auto& numInputs = layer->inputSlots()->size();
431
432 TensorRawPtrVector result(numInputs);
433
434 for (unsigned int i=0; i<numInputs; ++i)
435 {
436 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
437 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
438 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
439 }
440 return result;
441}
442
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000443Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000444 unsigned int layerIndex)
445{
446 CHECK_LAYERS(graphPtr, 0, layerIndex);
447 auto layer = GetBaseLayer(graphPtr, layerIndex);
448 const auto& numOutputs = layer->outputSlots()->size();
449
450 TensorRawPtrVector result(numOutputs);
451
452 for (unsigned int i=0; i<numOutputs; ++i)
453 {
454 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
455 }
456 return result;
457}
458
Derek Lamberti8ddae332019-02-21 16:29:43 +0000459void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000460{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000461 CHECK_LAYERS(graph, 0, layerIndex);
462 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000463 throw ParseException(
464 boost::str(
465 boost::format("Layer not supported. "
466 "layerIndex: %1% "
467 "layerName: %2% / %3%") %
468 layerIndex %
469 layerName %
470 CHECK_LOCATION().AsString()));
471}
472
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000473void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000474{
475 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000476 m_InputBindings.clear();
477 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000478}
479
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000480IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000481{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000482 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000483}
484
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000485IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000486{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000487 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000488}
489
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000490void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000491{
492 delete parser;
493}
494
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000495INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000496{
497 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000498 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
499 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000500}
501
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000502armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000503{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000504 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000505 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
506 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
507 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000508}
509
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000510Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000511{
512 if (binaryContent == nullptr)
513 {
514 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
515 CHECK_LOCATION().AsString()));
516 }
517 flatbuffers::Verifier verifier(binaryContent, len);
518 if (verifier.VerifyBuffer<SerializedGraph>() == false)
519 {
520 throw ParseException(
521 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
522 "flatbuffers format. size:%1% %2%") %
523 len %
524 CHECK_LOCATION().AsString()));
525 }
526 return GetSerializedGraph(binaryContent);
527}
528
Derek Lamberti8ddae332019-02-21 16:29:43 +0000529INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000530{
531 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000532 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000533 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000534 m_GraphConnections.emplace_back(graph->layers()->size());
535 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000536 {
537 if (layer->layer_type() != Layer_InputLayer &&
538 layer->layer_type() != Layer_OutputLayer)
539 {
540 // lookup and call the parser function
541 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000542 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000543 }
544 ++layerIndex;
545 }
546
Derek Lamberti8ddae332019-02-21 16:29:43 +0000547 SetupInputLayers(graph);
548 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000549
550 // establish the connections from the layer outputs to the inputs of the subsequent layers
551 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
552 {
553 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
554 {
555 for (size_t inputSlotIdx = 0;
556 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
557 ++inputSlotIdx)
558 {
559 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
560 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
561 }
562 }
563 }
564
565 return std::move(m_Network);
566}
567
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000568BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000569 const std::string& name) const
570{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000571 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000572 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000573 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000574 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000575 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000576 }
577 }
578 throw ParseException(
579 boost::str(
580 boost::format("No input binding found for layer:%1% / %2%") %
581 name %
582 CHECK_LOCATION().AsString()));
583}
584
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000585BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000586 const std::string& name) const
587{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000588 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000589 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000590 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000591 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000592 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000593 }
594 }
595 throw ParseException(
596 boost::str(
597 boost::format("No output binding found for layer:%1% / %2%") %
598 name %
599 CHECK_LOCATION().AsString()));
600}
601
Derek Lamberti8ddae332019-02-21 16:29:43 +0000602void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000603{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000604 CHECK_GRAPH(graph, 0);
605 auto inputs = GetGraphInputs(graph);
606 m_InputBindings.clear();
607 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000608 for (auto const& input : inputs)
609 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000610 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000611 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000612 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000613
614 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
615 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
616
Derek Lamberti8ddae332019-02-21 16:29:43 +0000617 RegisterOutputSlots(graph, input->index(), layer);
618
619 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
620 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
621 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000622 }
623}
624
Derek Lamberti8ddae332019-02-21 16:29:43 +0000625void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000626{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000627 CHECK_GRAPH(graph, 0);
628 auto outputs = GetGraphOutputs(graph);
629 m_OutputBindings.clear();
630 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000631 for (auto const& output : outputs)
632 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000633 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000634 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000635 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000636
Derek Lamberti8ddae332019-02-21 16:29:43 +0000637 RegisterInputSlots(graph, output->index(), layer);
638
639 auto baseLayer = GetBaseLayer(graph, output->index());
640 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
641 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
642 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
643
644 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
645 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
646 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000647 }
648}
649
Derek Lamberti8ddae332019-02-21 16:29:43 +0000650void Deserializer::RegisterOutputSlots(GraphPtr graph,
651 uint32_t layerIndex,
652 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000653{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000654 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000655 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000656 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000657 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
658 {
659 throw ParseException(
660 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
661 " for layer index: %3% %4%") %
662 parsedLayer->outputSlots()->size() %
663 layer->GetNumOutputSlots() %
664 layerIndex %
665 CHECK_LOCATION().AsString()));
666 }
667
668 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
669 {
670 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
671 RegisterOutputSlotOfConnection(layerIndex, slot);
672 }
673}
674
Derek Lamberti8ddae332019-02-21 16:29:43 +0000675void Deserializer::RegisterInputSlots(GraphPtr graph,
676 uint32_t layerIndex,
677 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000678{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000679 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000680 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000681 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000682 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
683 {
684 throw ParseException(
685 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
686 " for layer index:%3% %4%") %
687 parsedLayer->inputSlots()->size() %
688 layer->GetNumInputSlots() %
689 layerIndex %
690 CHECK_LOCATION().AsString()));
691 }
692
693 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
694 {
695 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
696 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
697 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
698 }
699}
700
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000701void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000702 armnn::IInputSlot* slot)
703{
704 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
705
706 Slots& slots = m_GraphConnections[0][connectionIndex];
707 slots.inputSlots.push_back(slot);
708}
709
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000710void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000711 armnn::IOutputSlot* slot)
712{
713 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
714
715 Slots& slots = m_GraphConnections[0][connectionIndex];
716
717 // assuming there is only one producer for that tensor
718 if (slots.outputSlot != nullptr)
719 {
720 throw ParseException(boost::str(
721 boost::format("Another layer has already registered itself as the producer of "
722 "connection:%1% / %2%") %
723 connectionIndex %
724 CHECK_LOCATION().AsString()));
725 }
726
727 slots.outputSlot = slot;
728}
729
Derek Lamberti8ddae332019-02-21 16:29:43 +0000730void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000731{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000732 CHECK_LAYERS(graph, 0, layerIndex);
733 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000734 CHECK_LOCATION();
735 CHECK_VALID_SIZE(inputs.size(), 1);
736
Derek Lamberti8ddae332019-02-21 16:29:43 +0000737 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000738 CHECK_VALID_SIZE(outputs.size(), 1);
739
Derek Lamberti8ddae332019-02-21 16:29:43 +0000740 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000741 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000742 auto serializerDescriptor = serializerLayer->descriptor();
743
744 armnn::ActivationDescriptor descriptor;
745 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
746 descriptor.m_A = serializerDescriptor->a();
747 descriptor.m_B = serializerDescriptor->b();
748
749 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
750 layerName.c_str());
751 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
752 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
753
Derek Lamberti8ddae332019-02-21 16:29:43 +0000754 RegisterInputSlots(graph, layerIndex, layer);
755 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000756}
757
Derek Lamberti8ddae332019-02-21 16:29:43 +0000758void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000759{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000760 CHECK_LAYERS(graph, 0, layerIndex);
761 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000762 CHECK_LOCATION();
763 CHECK_VALID_SIZE(inputs.size(), 2);
764
Derek Lamberti8ddae332019-02-21 16:29:43 +0000765 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000766 CHECK_VALID_SIZE(outputs.size(), 1);
767
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000768 auto layerName = GetLayerName(graph, layerIndex);
769 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000770
771 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
772 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
773
Derek Lamberti8ddae332019-02-21 16:29:43 +0000774 RegisterInputSlots(graph, layerIndex, layer);
775 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000776}
777
Conor Kennedy76277882019-02-26 08:29:54 +0000778void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
779{
780 CHECK_LAYERS(graph, 0, layerIndex);
781 CHECK_LOCATION();
782
783 auto outputs = GetOutputs(graph, layerIndex);
784 CHECK_VALID_SIZE(outputs.size(), 1);
785
786 auto layerName = GetLayerName(graph, layerIndex);
787
788 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
789 auto serializerInput = serializerLayer->input();
790
791 armnn::ConstTensor input = ToConstTensor(serializerInput);
792
793 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
794
795 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
796 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
797
798 RegisterOutputSlots(graph, layerIndex, layer);
799}
800
Derek Lamberti8ddae332019-02-21 16:29:43 +0000801void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000802{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000803 CHECK_LAYERS(graph, 0, layerIndex);
804 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000805 CHECK_LOCATION();
806 CHECK_VALID_SIZE(inputs.size(), 1);
807
Derek Lamberti8ddae332019-02-21 16:29:43 +0000808 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000809 CHECK_VALID_SIZE(outputs.size(), 1);
810
Derek Lamberti8ddae332019-02-21 16:29:43 +0000811 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000812 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000813 auto serializerDescriptor = serializerLayer->descriptor();
814
815 armnn::Convolution2dDescriptor descriptor;
816 descriptor.m_PadLeft = serializerDescriptor->padLeft();
817 descriptor.m_PadRight = serializerDescriptor->padRight();
818 descriptor.m_PadTop = serializerDescriptor->padTop();
819 descriptor.m_PadBottom = serializerDescriptor->padBottom();
820 descriptor.m_StrideX = serializerDescriptor->strideX();
821 descriptor.m_StrideY = serializerDescriptor->strideY();;
822 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
823 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
824
825 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
826 armnn::ConstTensor biases;
827
828 if (descriptor.m_BiasEnabled)
829 {
830 biases = ToConstTensor(serializerLayer->biases());
831 }
832 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
833 weights,
834 biases,
835 layerName.c_str());
836 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
837 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
838
Derek Lamberti8ddae332019-02-21 16:29:43 +0000839 RegisterInputSlots(graph, layerIndex, layer);
840 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000841}
842
Derek Lamberti8ddae332019-02-21 16:29:43 +0000843void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000844{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000845 CHECK_LAYERS(graph, 0, layerIndex);
846 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000847 CHECK_LOCATION();
848 CHECK_VALID_SIZE(inputs.size(), 1);
849
Derek Lamberti8ddae332019-02-21 16:29:43 +0000850 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000851 CHECK_VALID_SIZE(outputs.size(), 1);
852
Derek Lamberti8ddae332019-02-21 16:29:43 +0000853 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000854 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000855 auto serializerDescriptor = serializerLayer->descriptor();
856
857 armnn::DepthwiseConvolution2dDescriptor descriptor;
858 descriptor.m_PadLeft = serializerDescriptor->padLeft();
859 descriptor.m_PadRight = serializerDescriptor->padRight();
860 descriptor.m_PadTop = serializerDescriptor->padTop();
861 descriptor.m_PadBottom = serializerDescriptor->padBottom();
862 descriptor.m_StrideX = serializerDescriptor->strideX();
863 descriptor.m_StrideY = serializerDescriptor->strideY();;
864 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
865 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
866
867 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
868 armnn::ConstTensor biases;
869
870 if (descriptor.m_BiasEnabled)
871 {
872 biases = ToConstTensor(serializerLayer->biases());
873 }
874 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
875 weights,
876 biases,
877 layerName.c_str());
878
879 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
880 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
881
Derek Lamberti8ddae332019-02-21 16:29:43 +0000882 RegisterInputSlots(graph, layerIndex, layer);
883 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000884}
885
Derek Lamberti8ddae332019-02-21 16:29:43 +0000886void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +0000887{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000888 CHECK_LAYERS(graph, 0, layerIndex);
889 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +0000890 CHECK_LOCATION();
891 CHECK_VALID_SIZE(inputs.size(), 2);
892
Derek Lamberti8ddae332019-02-21 16:29:43 +0000893 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +0000894 CHECK_VALID_SIZE(outputs.size(), 1);
895
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000896 auto layerName = GetLayerName(graph, layerIndex);
897 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +0000898
899 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
900 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
901
Derek Lamberti8ddae332019-02-21 16:29:43 +0000902 RegisterInputSlots(graph, layerIndex, layer);
903 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +0000904}
905
Derek Lamberti8ddae332019-02-21 16:29:43 +0000906void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000907{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000908 CHECK_LAYERS(graph, 0, layerIndex);
909 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000910 CHECK_LOCATION();
911 CHECK_VALID_SIZE(inputs.size(), 1);
912
Derek Lamberti8ddae332019-02-21 16:29:43 +0000913 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000914 CHECK_VALID_SIZE(outputs.size(), 1);
915
Derek Lamberti8ddae332019-02-21 16:29:43 +0000916 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000917 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000918 auto flatBufferDescriptor = flatBufferLayer->descriptor();
919
920 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
921 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
922 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
923
924 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
925
926 armnn::IConnectableLayer* layer;
927 if (flatBufferDescriptor->biasEnabled())
928 {
929 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
930 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
931 weightsTensor,
932 biasTensorData,
933 layerName.c_str());
934 }
935 else
936 {
937 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
938 weightsTensor,
939 layerName.c_str());
940 }
941
942 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
943 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
944
Derek Lamberti8ddae332019-02-21 16:29:43 +0000945 RegisterInputSlots(graph, layerIndex, layer);
946 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000947}
948
Derek Lamberti8ddae332019-02-21 16:29:43 +0000949void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000950{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000951 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000952
953 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000954 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000955
Derek Lamberti8ddae332019-02-21 16:29:43 +0000956 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000957 CHECK_VALID_SIZE(inputs.size(), 1);
958
Derek Lamberti8ddae332019-02-21 16:29:43 +0000959 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000960 CHECK_VALID_SIZE(outputs.size(), 1);
961 auto outputInfo = ToTensorInfo(outputs[0]);
962
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000963 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000964 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
965
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000966 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000967 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
968
Derek Lamberti8ddae332019-02-21 16:29:43 +0000969 RegisterInputSlots(graph, layerIndex, layer);
970 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000971}
972
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000973armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000974 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000975{
976 armnn::Pooling2dDescriptor desc;
977
978 switch (pooling2dDesc->poolType())
979 {
980 case PoolingAlgorithm_Average:
981 {
982 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000983 break;
984 }
985 case PoolingAlgorithm_Max:
986 {
987 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000988 break;
989 }
990 default:
991 {
992 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
993 }
994 }
995
996 switch (pooling2dDesc->outputShapeRounding())
997 {
998 case OutputShapeRounding_Floor:
999 {
1000 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1001 break;
1002 }
1003 case OutputShapeRounding_Ceiling:
1004 {
1005 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1006 break;
1007 }
1008 default:
1009 {
1010 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1011 }
1012 }
1013
1014 switch (pooling2dDesc->paddingMethod())
1015 {
1016 case PaddingMethod_Exclude:
1017 {
1018 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1019 break;
1020 }
1021 case PaddingMethod_IgnoreValue:
1022 {
1023 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1024 break;
1025 }
1026 default:
1027 {
1028 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1029 }
1030 }
1031
1032 switch (pooling2dDesc->dataLayout())
1033 {
1034 case DataLayout_NCHW:
1035 {
1036 desc.m_DataLayout = armnn::DataLayout::NCHW;
1037 break;
1038 }
1039 case DataLayout_NHWC:
1040 {
1041 desc.m_DataLayout = armnn::DataLayout::NHWC;
1042 break;
1043 }
1044 default:
1045 {
1046 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1047 }
1048 }
1049
1050 desc.m_PadRight = pooling2dDesc->padRight();
1051 desc.m_PadLeft = pooling2dDesc->padLeft();
1052 desc.m_PadBottom = pooling2dDesc->padBottom();
1053 desc.m_PadTop = pooling2dDesc->padTop();
1054 desc.m_StrideX = pooling2dDesc->strideX();
1055 desc.m_StrideY = pooling2dDesc->strideY();
1056 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1057 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1058
1059 return desc;
1060}
1061
Derek Lamberti8ddae332019-02-21 16:29:43 +00001062void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001063{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001064 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001065
Derek Lamberti8ddae332019-02-21 16:29:43 +00001066 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001067 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001068 CHECK_VALID_SIZE(inputs.size(), 1);
1069
Derek Lamberti8ddae332019-02-21 16:29:43 +00001070 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001071 CHECK_VALID_SIZE(outputs.size(), 1);
1072 auto outputInfo = ToTensorInfo(outputs[0]);
1073
1074 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001075 auto layerName = GetLayerName(graph, layerIndex);
1076 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001077 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1078
Derek Lamberti8ddae332019-02-21 16:29:43 +00001079 RegisterInputSlots(graph, layerIndex, layer);
1080 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001081}
1082
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001083armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001084 const std::vector<uint32_t>& targetDimsIn)
1085{
1086 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1087 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1088
1089 if (stretchDim != targetDimsIn.end())
1090 {
1091 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1092 {
1093 throw ParseException(boost::str(
1094 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1095 }
1096
1097 auto targetNumElements =
1098 boost::numeric_cast<unsigned int>(
1099 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1100
1101 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1102 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1103 }
1104
1105 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1106
1107 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1108 reshapeInfo.SetShape(outputShape);
1109
1110 return reshapeInfo;
1111}
1112
Derek Lamberti8ddae332019-02-21 16:29:43 +00001113void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001114{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001115 CHECK_LAYERS(graph, 0, layerIndex);
1116 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001117
Derek Lamberti8ddae332019-02-21 16:29:43 +00001118 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001119 CHECK_VALID_SIZE(outputs.size(), 1);
1120
1121 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1122 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1123
Derek Lamberti8ddae332019-02-21 16:29:43 +00001124 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001125 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1126
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001127 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001128 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1129
1130 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1131 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1132
1133 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1134 {
1135 std::stringstream ss;
1136 ss << "New shape defined in reshape parameters "
1137 << reshapeOutputTensorShape
1138 << " does not equal output shape "
1139 << actualOutputTensorInfo.GetShape()
1140 << ": "
1141 << CHECK_LOCATION().AsString();
1142 throw ParseException(ss.str());
1143 }
1144
1145 armnn::ReshapeDescriptor reshapeDesc;
1146 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1147
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001148 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001149 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1150 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1151
Derek Lamberti8ddae332019-02-21 16:29:43 +00001152 RegisterInputSlots(graph, layerIndex, layer);
1153 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001154}
1155
Derek Lamberti8ddae332019-02-21 16:29:43 +00001156void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001157{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001158 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001159
Derek Lamberti8ddae332019-02-21 16:29:43 +00001160 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001161 CHECK_VALID_SIZE(inputs.size(), 1);
1162
Derek Lamberti8ddae332019-02-21 16:29:43 +00001163 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001164 CHECK_VALID_SIZE(outputs.size(), 1);
1165
1166 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001167 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001168 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001169
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001170 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1171
1172 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1173 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1174
Derek Lamberti8ddae332019-02-21 16:29:43 +00001175 RegisterInputSlots(graph, layerIndex, layer);
1176 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001177}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001178
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001179} // namespace armnnDeserializer