blob: 64f8e444298e7af9ecdb0c0097e2bec795dddf51 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Conor Kennedy76277882019-02-26 08:29:54 +0000190 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000191 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
192 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000193 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000195 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000196 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
197 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
198 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000199 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Kevin May43a799c2019-02-08 16:31:42 +0000200}
201
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000202Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000203{
204 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
205
206 switch(layerType)
207 {
Mike Kellyaf484012019-02-20 16:53:11 +0000208 case Layer::Layer_ActivationLayer:
209 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000210 case Layer::Layer_AdditionLayer:
211 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000212 case Layer::Layer_ConstantLayer:
213 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000214 case Layer::Layer_Convolution2dLayer:
215 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000216 case Layer::Layer_DepthwiseConvolution2dLayer:
217 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000218 case Layer::Layer_FullyConnectedLayer:
219 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000220 case Layer::Layer_InputLayer:
221 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000222 case Layer::Layer_MultiplicationLayer:
223 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000224 case Layer::Layer_OutputLayer:
225 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000226 case Layer::Layer_PermuteLayer:
227 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000228 case Layer::Layer_Pooling2dLayer:
229 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000230 case Layer::Layer_ReshapeLayer:
231 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000232 case Layer::Layer_SoftmaxLayer:
233 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000234 case Layer::Layer_SpaceToBatchNdLayer:
235 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000236 case Layer::Layer_NONE:
237 default:
238 throw ParseException(boost::str(
239 boost::format("Layer must have a type %1%") %
240 Layer::Layer_NONE));
241 }
242}
243
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000244std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
245{
246 auto layer = GetBaseLayer(graph, index);
247 assert(layer);
248 return layer->layerName()->str();
249}
250
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000251int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000252{
253 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
254
255 if (layerType == Layer::Layer_InputLayer)
256 {
257 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
258 }
259 else if ( layerType == Layer::Layer_OutputLayer )
260 {
261 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
262 }
263 return 0;
264}
265
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000266armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000267{
268 switch (dataLayout)
269 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000270 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000271 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000272 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000273 default:
274 return armnn::DataLayout::NCHW;
275 }
276}
277
Mike Kellyaf484012019-02-20 16:53:11 +0000278armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
279{
280 switch (function)
281 {
282 case armnnSerializer::ActivationFunction_Sigmoid:
283 return armnn::ActivationFunction::Sigmoid;
284 case armnnSerializer::ActivationFunction_TanH:
285 return armnn::ActivationFunction::TanH;
286 case armnnSerializer::ActivationFunction_Linear:
287 return armnn::ActivationFunction::Linear;
288 case armnnSerializer::ActivationFunction_ReLu:
289 return armnn::ActivationFunction::ReLu;
290 case armnnSerializer::ActivationFunction_BoundedReLu:
291 return armnn::ActivationFunction::BoundedReLu;
292 case armnnSerializer::ActivationFunction_LeakyReLu:
293 return armnn::ActivationFunction::LeakyReLu;
294 case armnnSerializer::ActivationFunction_Abs:
295 return armnn::ActivationFunction::Abs;
296 case armnnSerializer::ActivationFunction_Sqrt:
297 return armnn::ActivationFunction::Sqrt;
298 case armnnSerializer::ActivationFunction_Square:
299 return armnn::ActivationFunction::Square;
300 default:
301 return armnn::ActivationFunction::Sigmoid;
302 }
303}
304
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000305armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000306{
307 armnn::DataType type;
308 CHECK_TENSOR_PTR(tensorPtr);
309
310 switch (tensorPtr->dataType())
311 {
312 case DataType_QuantisedAsymm8:
313 type = armnn::DataType::QuantisedAsymm8;
314 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000315 case DataType_Signed32:
316 type = armnn::DataType::Signed32;
317 break;
Kevin May43a799c2019-02-08 16:31:42 +0000318 case DataType_Float32:
319 type = armnn::DataType::Float32;
320 break;
321 case DataType_Float16:
322 type = armnn::DataType::Float16;
323 break;
324 case DataType_Boolean:
325 type = armnn::DataType::Boolean;
326 break;
327 default:
328 {
329 CheckLocation location = CHECK_LOCATION();
330 throw ParseException(
331 boost::str(
332 boost::format("Unsupported data type %1% = %2%. %3%") %
333 tensorPtr->dataType() %
334 EnumNameDataType(tensorPtr->dataType()) %
335 location.AsString()));
336 }
337 }
338 float quantizationScale = tensorPtr->quantizationScale();
339 int32_t quantizationOffset = tensorPtr->quantizationOffset();
340
341 auto dimensions = tensorPtr->dimensions();
342 unsigned int size = dimensions->size();
343 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
344
345 // two statements (on purpose) for easier debugging:
346 armnn::TensorInfo result(size,
347 outputDims.data(),
348 type,
349 quantizationScale,
350 quantizationOffset);
351 return result;
352}
353
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000354armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000355{
356 CHECK_CONST_TENSOR_PTR(constTensorPtr);
357 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
358
359 switch (constTensorPtr->data_type())
360 {
361 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000362 {
363 auto byteData = constTensorPtr->data_as_ByteData()->data();
364 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
365 return armnn::ConstTensor(tensorInfo, byteData->data());
366 }
Mike Kellya0766c32019-02-19 17:22:07 +0000367 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000368 {
369 auto shortData = constTensorPtr->data_as_ShortData()->data();
370 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
371 return armnn::ConstTensor(tensorInfo, shortData->data());
372 }
Mike Kellya0766c32019-02-19 17:22:07 +0000373 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000374 {
375 auto intData = constTensorPtr->data_as_IntData()->data();
376 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
377 return armnn::ConstTensor(tensorInfo, intData->data());
378 }
Mike Kellya0766c32019-02-19 17:22:07 +0000379 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000380 {
381 auto longData = constTensorPtr->data_as_LongData()->data();
382 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
383 return armnn::ConstTensor(tensorInfo, longData->data());
384 }
Mike Kellya0766c32019-02-19 17:22:07 +0000385 default:
386 {
387 CheckLocation location = CHECK_LOCATION();
388 throw ParseException(
389 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
390 constTensorPtr->data_type() %
391 EnumNameConstTensorData(constTensorPtr->data_type()) %
392 location.AsString()));
393 }
394 }
395}
396
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000397Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000398{
399
400 CHECK_GRAPH(graphPtr, 0);
401 const auto& numInputs = graphPtr->inputIds()->size();
402
403 LayerBaseRawPtrVector result(numInputs);
404
405 for (unsigned int i=0; i<numInputs; ++i)
406 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000407 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000408 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
409 }
410 return result;
411}
412
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000413Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000414{
415 CHECK_GRAPH(graphPtr, 0);
416 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000417 LayerBaseRawPtrVector result(numOutputs);
418
419 for (unsigned int i=0; i<numOutputs; ++i)
420 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000421 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000422
Kevin May43a799c2019-02-08 16:31:42 +0000423 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
424 }
425 return result;
426}
427
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000428Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000429 unsigned int layerIndex)
430{
431 CHECK_LAYERS(graphPtr, 0, layerIndex);
432 auto layer = GetBaseLayer(graphPtr, layerIndex);
433 const auto& numInputs = layer->inputSlots()->size();
434
435 TensorRawPtrVector result(numInputs);
436
437 for (unsigned int i=0; i<numInputs; ++i)
438 {
439 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
440 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
441 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
442 }
443 return result;
444}
445
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000446Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000447 unsigned int layerIndex)
448{
449 CHECK_LAYERS(graphPtr, 0, layerIndex);
450 auto layer = GetBaseLayer(graphPtr, layerIndex);
451 const auto& numOutputs = layer->outputSlots()->size();
452
453 TensorRawPtrVector result(numOutputs);
454
455 for (unsigned int i=0; i<numOutputs; ++i)
456 {
457 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
458 }
459 return result;
460}
461
Derek Lamberti8ddae332019-02-21 16:29:43 +0000462void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000463{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000464 CHECK_LAYERS(graph, 0, layerIndex);
465 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000466 throw ParseException(
467 boost::str(
468 boost::format("Layer not supported. "
469 "layerIndex: %1% "
470 "layerName: %2% / %3%") %
471 layerIndex %
472 layerName %
473 CHECK_LOCATION().AsString()));
474}
475
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000476void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000477{
478 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000479 m_InputBindings.clear();
480 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000481}
482
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000483IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000484{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000485 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000486}
487
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000488IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000489{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000490 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000491}
492
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000493void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000494{
495 delete parser;
496}
497
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000498INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000499{
500 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000501 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
502 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000503}
504
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000505armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000506{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000507 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000508 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
509 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
510 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000511}
512
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000513Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000514{
515 if (binaryContent == nullptr)
516 {
517 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
518 CHECK_LOCATION().AsString()));
519 }
520 flatbuffers::Verifier verifier(binaryContent, len);
521 if (verifier.VerifyBuffer<SerializedGraph>() == false)
522 {
523 throw ParseException(
524 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
525 "flatbuffers format. size:%1% %2%") %
526 len %
527 CHECK_LOCATION().AsString()));
528 }
529 return GetSerializedGraph(binaryContent);
530}
531
Derek Lamberti8ddae332019-02-21 16:29:43 +0000532INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000533{
534 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000535 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000536 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000537 m_GraphConnections.emplace_back(graph->layers()->size());
538 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000539 {
540 if (layer->layer_type() != Layer_InputLayer &&
541 layer->layer_type() != Layer_OutputLayer)
542 {
543 // lookup and call the parser function
544 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000545 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000546 }
547 ++layerIndex;
548 }
549
Derek Lamberti8ddae332019-02-21 16:29:43 +0000550 SetupInputLayers(graph);
551 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000552
553 // establish the connections from the layer outputs to the inputs of the subsequent layers
554 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
555 {
556 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
557 {
558 for (size_t inputSlotIdx = 0;
559 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
560 ++inputSlotIdx)
561 {
562 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
563 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
564 }
565 }
566 }
567
568 return std::move(m_Network);
569}
570
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000571BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000572 const std::string& name) const
573{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000574 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000575 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000576 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000577 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000578 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000579 }
580 }
581 throw ParseException(
582 boost::str(
583 boost::format("No input binding found for layer:%1% / %2%") %
584 name %
585 CHECK_LOCATION().AsString()));
586}
587
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000588BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000589 const std::string& name) const
590{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000591 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000592 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000593 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000594 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000595 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000596 }
597 }
598 throw ParseException(
599 boost::str(
600 boost::format("No output binding found for layer:%1% / %2%") %
601 name %
602 CHECK_LOCATION().AsString()));
603}
604
Derek Lamberti8ddae332019-02-21 16:29:43 +0000605void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000606{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000607 CHECK_GRAPH(graph, 0);
608 auto inputs = GetGraphInputs(graph);
609 m_InputBindings.clear();
610 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000611 for (auto const& input : inputs)
612 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000613 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000614 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000615 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000616
617 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
618 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
619
Derek Lamberti8ddae332019-02-21 16:29:43 +0000620 RegisterOutputSlots(graph, input->index(), layer);
621
622 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
623 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
624 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000625 }
626}
627
Derek Lamberti8ddae332019-02-21 16:29:43 +0000628void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000629{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000630 CHECK_GRAPH(graph, 0);
631 auto outputs = GetGraphOutputs(graph);
632 m_OutputBindings.clear();
633 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000634 for (auto const& output : outputs)
635 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000636 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000637 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000638 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000639
Derek Lamberti8ddae332019-02-21 16:29:43 +0000640 RegisterInputSlots(graph, output->index(), layer);
641
642 auto baseLayer = GetBaseLayer(graph, output->index());
643 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
644 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
645 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
646
647 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
648 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
649 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000650 }
651}
652
Derek Lamberti8ddae332019-02-21 16:29:43 +0000653void Deserializer::RegisterOutputSlots(GraphPtr graph,
654 uint32_t layerIndex,
655 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000656{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000657 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000658 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000659 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000660 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
661 {
662 throw ParseException(
663 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
664 " for layer index: %3% %4%") %
665 parsedLayer->outputSlots()->size() %
666 layer->GetNumOutputSlots() %
667 layerIndex %
668 CHECK_LOCATION().AsString()));
669 }
670
671 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
672 {
673 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
674 RegisterOutputSlotOfConnection(layerIndex, slot);
675 }
676}
677
Derek Lamberti8ddae332019-02-21 16:29:43 +0000678void Deserializer::RegisterInputSlots(GraphPtr graph,
679 uint32_t layerIndex,
680 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000681{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000682 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000683 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000684 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000685 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
686 {
687 throw ParseException(
688 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
689 " for layer index:%3% %4%") %
690 parsedLayer->inputSlots()->size() %
691 layer->GetNumInputSlots() %
692 layerIndex %
693 CHECK_LOCATION().AsString()));
694 }
695
696 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
697 {
698 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
699 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
700 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
701 }
702}
703
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000704void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000705 armnn::IInputSlot* slot)
706{
707 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
708
709 Slots& slots = m_GraphConnections[0][connectionIndex];
710 slots.inputSlots.push_back(slot);
711}
712
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000713void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000714 armnn::IOutputSlot* slot)
715{
716 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
717
718 Slots& slots = m_GraphConnections[0][connectionIndex];
719
720 // assuming there is only one producer for that tensor
721 if (slots.outputSlot != nullptr)
722 {
723 throw ParseException(boost::str(
724 boost::format("Another layer has already registered itself as the producer of "
725 "connection:%1% / %2%") %
726 connectionIndex %
727 CHECK_LOCATION().AsString()));
728 }
729
730 slots.outputSlot = slot;
731}
732
Derek Lamberti8ddae332019-02-21 16:29:43 +0000733void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000734{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000735 CHECK_LAYERS(graph, 0, layerIndex);
736 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000737 CHECK_LOCATION();
738 CHECK_VALID_SIZE(inputs.size(), 1);
739
Derek Lamberti8ddae332019-02-21 16:29:43 +0000740 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000741 CHECK_VALID_SIZE(outputs.size(), 1);
742
Derek Lamberti8ddae332019-02-21 16:29:43 +0000743 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000744 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000745 auto serializerDescriptor = serializerLayer->descriptor();
746
747 armnn::ActivationDescriptor descriptor;
748 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
749 descriptor.m_A = serializerDescriptor->a();
750 descriptor.m_B = serializerDescriptor->b();
751
752 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
753 layerName.c_str());
754 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
755 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
756
Derek Lamberti8ddae332019-02-21 16:29:43 +0000757 RegisterInputSlots(graph, layerIndex, layer);
758 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000759}
760
Derek Lamberti8ddae332019-02-21 16:29:43 +0000761void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000762{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000763 CHECK_LAYERS(graph, 0, layerIndex);
764 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000765 CHECK_LOCATION();
766 CHECK_VALID_SIZE(inputs.size(), 2);
767
Derek Lamberti8ddae332019-02-21 16:29:43 +0000768 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000769 CHECK_VALID_SIZE(outputs.size(), 1);
770
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000771 auto layerName = GetLayerName(graph, layerIndex);
772 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000773
774 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
775 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
776
Derek Lamberti8ddae332019-02-21 16:29:43 +0000777 RegisterInputSlots(graph, layerIndex, layer);
778 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000779}
780
Conor Kennedy76277882019-02-26 08:29:54 +0000781void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
782{
783 CHECK_LAYERS(graph, 0, layerIndex);
784 CHECK_LOCATION();
785
786 auto outputs = GetOutputs(graph, layerIndex);
787 CHECK_VALID_SIZE(outputs.size(), 1);
788
789 auto layerName = GetLayerName(graph, layerIndex);
790
791 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
792 auto serializerInput = serializerLayer->input();
793
794 armnn::ConstTensor input = ToConstTensor(serializerInput);
795
796 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
797
798 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
799 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
800
801 RegisterOutputSlots(graph, layerIndex, layer);
802}
803
Derek Lamberti8ddae332019-02-21 16:29:43 +0000804void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000805{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000806 CHECK_LAYERS(graph, 0, layerIndex);
807 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000808 CHECK_LOCATION();
809 CHECK_VALID_SIZE(inputs.size(), 1);
810
Derek Lamberti8ddae332019-02-21 16:29:43 +0000811 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000812 CHECK_VALID_SIZE(outputs.size(), 1);
813
Derek Lamberti8ddae332019-02-21 16:29:43 +0000814 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000815 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000816 auto serializerDescriptor = serializerLayer->descriptor();
817
818 armnn::Convolution2dDescriptor descriptor;
819 descriptor.m_PadLeft = serializerDescriptor->padLeft();
820 descriptor.m_PadRight = serializerDescriptor->padRight();
821 descriptor.m_PadTop = serializerDescriptor->padTop();
822 descriptor.m_PadBottom = serializerDescriptor->padBottom();
823 descriptor.m_StrideX = serializerDescriptor->strideX();
824 descriptor.m_StrideY = serializerDescriptor->strideY();;
825 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
826 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
827
828 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
829 armnn::ConstTensor biases;
830
831 if (descriptor.m_BiasEnabled)
832 {
833 biases = ToConstTensor(serializerLayer->biases());
834 }
835 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
836 weights,
837 biases,
838 layerName.c_str());
839 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
840 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
841
Derek Lamberti8ddae332019-02-21 16:29:43 +0000842 RegisterInputSlots(graph, layerIndex, layer);
843 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000844}
845
Derek Lamberti8ddae332019-02-21 16:29:43 +0000846void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000847{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000848 CHECK_LAYERS(graph, 0, layerIndex);
849 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000850 CHECK_LOCATION();
851 CHECK_VALID_SIZE(inputs.size(), 1);
852
Derek Lamberti8ddae332019-02-21 16:29:43 +0000853 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000854 CHECK_VALID_SIZE(outputs.size(), 1);
855
Derek Lamberti8ddae332019-02-21 16:29:43 +0000856 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000857 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000858 auto serializerDescriptor = serializerLayer->descriptor();
859
860 armnn::DepthwiseConvolution2dDescriptor descriptor;
861 descriptor.m_PadLeft = serializerDescriptor->padLeft();
862 descriptor.m_PadRight = serializerDescriptor->padRight();
863 descriptor.m_PadTop = serializerDescriptor->padTop();
864 descriptor.m_PadBottom = serializerDescriptor->padBottom();
865 descriptor.m_StrideX = serializerDescriptor->strideX();
866 descriptor.m_StrideY = serializerDescriptor->strideY();;
867 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
868 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
869
870 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
871 armnn::ConstTensor biases;
872
873 if (descriptor.m_BiasEnabled)
874 {
875 biases = ToConstTensor(serializerLayer->biases());
876 }
877 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
878 weights,
879 biases,
880 layerName.c_str());
881
882 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
883 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
884
Derek Lamberti8ddae332019-02-21 16:29:43 +0000885 RegisterInputSlots(graph, layerIndex, layer);
886 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000887}
888
Derek Lamberti8ddae332019-02-21 16:29:43 +0000889void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +0000890{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000891 CHECK_LAYERS(graph, 0, layerIndex);
892 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +0000893 CHECK_LOCATION();
894 CHECK_VALID_SIZE(inputs.size(), 2);
895
Derek Lamberti8ddae332019-02-21 16:29:43 +0000896 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +0000897 CHECK_VALID_SIZE(outputs.size(), 1);
898
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000899 auto layerName = GetLayerName(graph, layerIndex);
900 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +0000901
902 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
903 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
904
Derek Lamberti8ddae332019-02-21 16:29:43 +0000905 RegisterInputSlots(graph, layerIndex, layer);
906 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +0000907}
908
Derek Lamberti8ddae332019-02-21 16:29:43 +0000909void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000910{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000911 CHECK_LAYERS(graph, 0, layerIndex);
912 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000913 CHECK_LOCATION();
914 CHECK_VALID_SIZE(inputs.size(), 1);
915
Derek Lamberti8ddae332019-02-21 16:29:43 +0000916 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000917 CHECK_VALID_SIZE(outputs.size(), 1);
918
Derek Lamberti8ddae332019-02-21 16:29:43 +0000919 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000920 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000921 auto flatBufferDescriptor = flatBufferLayer->descriptor();
922
923 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
924 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
925 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
926
927 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
928
929 armnn::IConnectableLayer* layer;
930 if (flatBufferDescriptor->biasEnabled())
931 {
932 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
933 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
934 weightsTensor,
935 biasTensorData,
936 layerName.c_str());
937 }
938 else
939 {
940 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
941 weightsTensor,
942 layerName.c_str());
943 }
944
945 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
946 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
947
Derek Lamberti8ddae332019-02-21 16:29:43 +0000948 RegisterInputSlots(graph, layerIndex, layer);
949 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000950}
951
Derek Lamberti8ddae332019-02-21 16:29:43 +0000952void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000953{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000954 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000955
956 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000957 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000958
Derek Lamberti8ddae332019-02-21 16:29:43 +0000959 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000960 CHECK_VALID_SIZE(inputs.size(), 1);
961
Derek Lamberti8ddae332019-02-21 16:29:43 +0000962 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000963 CHECK_VALID_SIZE(outputs.size(), 1);
964 auto outputInfo = ToTensorInfo(outputs[0]);
965
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000966 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000967 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
968
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000969 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000970 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
971
Derek Lamberti8ddae332019-02-21 16:29:43 +0000972 RegisterInputSlots(graph, layerIndex, layer);
973 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000974}
975
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000976armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000977 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000978{
979 armnn::Pooling2dDescriptor desc;
980
981 switch (pooling2dDesc->poolType())
982 {
983 case PoolingAlgorithm_Average:
984 {
985 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000986 break;
987 }
988 case PoolingAlgorithm_Max:
989 {
990 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000991 break;
992 }
993 default:
994 {
995 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
996 }
997 }
998
999 switch (pooling2dDesc->outputShapeRounding())
1000 {
1001 case OutputShapeRounding_Floor:
1002 {
1003 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1004 break;
1005 }
1006 case OutputShapeRounding_Ceiling:
1007 {
1008 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1009 break;
1010 }
1011 default:
1012 {
1013 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1014 }
1015 }
1016
1017 switch (pooling2dDesc->paddingMethod())
1018 {
1019 case PaddingMethod_Exclude:
1020 {
1021 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1022 break;
1023 }
1024 case PaddingMethod_IgnoreValue:
1025 {
1026 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1027 break;
1028 }
1029 default:
1030 {
1031 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1032 }
1033 }
1034
1035 switch (pooling2dDesc->dataLayout())
1036 {
1037 case DataLayout_NCHW:
1038 {
1039 desc.m_DataLayout = armnn::DataLayout::NCHW;
1040 break;
1041 }
1042 case DataLayout_NHWC:
1043 {
1044 desc.m_DataLayout = armnn::DataLayout::NHWC;
1045 break;
1046 }
1047 default:
1048 {
1049 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1050 }
1051 }
1052
1053 desc.m_PadRight = pooling2dDesc->padRight();
1054 desc.m_PadLeft = pooling2dDesc->padLeft();
1055 desc.m_PadBottom = pooling2dDesc->padBottom();
1056 desc.m_PadTop = pooling2dDesc->padTop();
1057 desc.m_StrideX = pooling2dDesc->strideX();
1058 desc.m_StrideY = pooling2dDesc->strideY();
1059 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1060 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1061
1062 return desc;
1063}
1064
Derek Lamberti8ddae332019-02-21 16:29:43 +00001065void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001066{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001067 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001068
Derek Lamberti8ddae332019-02-21 16:29:43 +00001069 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001070 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001071 CHECK_VALID_SIZE(inputs.size(), 1);
1072
Derek Lamberti8ddae332019-02-21 16:29:43 +00001073 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001074 CHECK_VALID_SIZE(outputs.size(), 1);
1075 auto outputInfo = ToTensorInfo(outputs[0]);
1076
1077 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001078 auto layerName = GetLayerName(graph, layerIndex);
1079 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001080 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1081
Derek Lamberti8ddae332019-02-21 16:29:43 +00001082 RegisterInputSlots(graph, layerIndex, layer);
1083 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001084}
1085
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001086armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001087 const std::vector<uint32_t>& targetDimsIn)
1088{
1089 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1090 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1091
1092 if (stretchDim != targetDimsIn.end())
1093 {
1094 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1095 {
1096 throw ParseException(boost::str(
1097 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1098 }
1099
1100 auto targetNumElements =
1101 boost::numeric_cast<unsigned int>(
1102 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1103
1104 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1105 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1106 }
1107
1108 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1109
1110 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1111 reshapeInfo.SetShape(outputShape);
1112
1113 return reshapeInfo;
1114}
1115
Derek Lamberti8ddae332019-02-21 16:29:43 +00001116void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001117{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001118 CHECK_LAYERS(graph, 0, layerIndex);
1119 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001120
Derek Lamberti8ddae332019-02-21 16:29:43 +00001121 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001122 CHECK_VALID_SIZE(outputs.size(), 1);
1123
1124 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1125 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1126
Derek Lamberti8ddae332019-02-21 16:29:43 +00001127 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001128 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1129
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001130 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001131 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1132
1133 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1134 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1135
1136 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1137 {
1138 std::stringstream ss;
1139 ss << "New shape defined in reshape parameters "
1140 << reshapeOutputTensorShape
1141 << " does not equal output shape "
1142 << actualOutputTensorInfo.GetShape()
1143 << ": "
1144 << CHECK_LOCATION().AsString();
1145 throw ParseException(ss.str());
1146 }
1147
1148 armnn::ReshapeDescriptor reshapeDesc;
1149 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1150
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001151 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001152 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1153 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1154
Derek Lamberti8ddae332019-02-21 16:29:43 +00001155 RegisterInputSlots(graph, layerIndex, layer);
1156 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001157}
1158
Derek Lamberti8ddae332019-02-21 16:29:43 +00001159void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001160{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001161 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001162
Derek Lamberti8ddae332019-02-21 16:29:43 +00001163 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001164 CHECK_VALID_SIZE(inputs.size(), 1);
1165
Derek Lamberti8ddae332019-02-21 16:29:43 +00001166 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001167 CHECK_VALID_SIZE(outputs.size(), 1);
1168
1169 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001170 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001171 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001172
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001173 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1174
1175 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1176 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1177
Derek Lamberti8ddae332019-02-21 16:29:43 +00001178 RegisterInputSlots(graph, layerIndex, layer);
1179 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001180}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001181
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001182void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1183{
1184 CHECK_LAYERS(graph, 0, layerIndex);
1185
1186 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1187 CHECK_VALID_SIZE(inputs.size(), 1);
1188
1189 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1190 CHECK_VALID_SIZE(outputs.size(), 1);
1191
1192 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1193 auto flatBufferPadList = flatBufferDescriptor->padList();
1194 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1195
1196 if (flatBufferPadList->Length() % 2 != 0)
1197 {
1198 throw ParseException(boost::str(
1199 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1200 }
1201
1202 std::vector<std::pair<unsigned int, unsigned int>> padList;
1203 padList.reserve(flatBufferPadList->Length() / 2);
1204 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1205 {
1206 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1207 }
1208
1209 armnn::SpaceToBatchNdDescriptor descriptor;
1210 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1211 descriptor.m_BlockShape =
1212 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1213 descriptor.m_PadList = padList;
1214
1215 auto layerName = GetLayerName(graph, layerIndex);
1216 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1217
1218 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1219 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1220
1221 RegisterInputSlots(graph, layerIndex, layer);
1222 RegisterOutputSlots(graph, layerIndex, layer);
1223}
1224
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001225} // namespace armnnDeserializer