blob: 08fe7a5e90403b3155115e49e5ef6aa984fae169 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000023#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
190 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
191 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000192 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000193 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000194 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000195 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
196 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
197 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Kevin May43a799c2019-02-08 16:31:42 +0000198}
199
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000200Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000201{
202 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
203
204 switch(layerType)
205 {
Mike Kellyaf484012019-02-20 16:53:11 +0000206 case Layer::Layer_ActivationLayer:
207 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000208 case Layer::Layer_AdditionLayer:
209 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000210 case Layer::Layer_Convolution2dLayer:
211 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000212 case Layer::Layer_DepthwiseConvolution2dLayer:
213 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000214 case Layer::Layer_FullyConnectedLayer:
215 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000216 case Layer::Layer_InputLayer:
217 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000218 case Layer::Layer_MultiplicationLayer:
219 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000220 case Layer::Layer_OutputLayer:
221 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000222 case Layer::Layer_PermuteLayer:
223 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000224 case Layer::Layer_Pooling2dLayer:
225 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000226 case Layer::Layer_ReshapeLayer:
227 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000228 case Layer::Layer_SoftmaxLayer:
229 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000230 case Layer::Layer_NONE:
231 default:
232 throw ParseException(boost::str(
233 boost::format("Layer must have a type %1%") %
234 Layer::Layer_NONE));
235 }
236}
237
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000238int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000239{
240 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
241
242 if (layerType == Layer::Layer_InputLayer)
243 {
244 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
245 }
246 else if ( layerType == Layer::Layer_OutputLayer )
247 {
248 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
249 }
250 return 0;
251}
252
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000253armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000254{
255 switch (dataLayout)
256 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000257 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000258 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000259 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000260 default:
261 return armnn::DataLayout::NCHW;
262 }
263}
264
Mike Kellyaf484012019-02-20 16:53:11 +0000265armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
266{
267 switch (function)
268 {
269 case armnnSerializer::ActivationFunction_Sigmoid:
270 return armnn::ActivationFunction::Sigmoid;
271 case armnnSerializer::ActivationFunction_TanH:
272 return armnn::ActivationFunction::TanH;
273 case armnnSerializer::ActivationFunction_Linear:
274 return armnn::ActivationFunction::Linear;
275 case armnnSerializer::ActivationFunction_ReLu:
276 return armnn::ActivationFunction::ReLu;
277 case armnnSerializer::ActivationFunction_BoundedReLu:
278 return armnn::ActivationFunction::BoundedReLu;
279 case armnnSerializer::ActivationFunction_LeakyReLu:
280 return armnn::ActivationFunction::LeakyReLu;
281 case armnnSerializer::ActivationFunction_Abs:
282 return armnn::ActivationFunction::Abs;
283 case armnnSerializer::ActivationFunction_Sqrt:
284 return armnn::ActivationFunction::Sqrt;
285 case armnnSerializer::ActivationFunction_Square:
286 return armnn::ActivationFunction::Square;
287 default:
288 return armnn::ActivationFunction::Sigmoid;
289 }
290}
291
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000292armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000293{
294 armnn::DataType type;
295 CHECK_TENSOR_PTR(tensorPtr);
296
297 switch (tensorPtr->dataType())
298 {
299 case DataType_QuantisedAsymm8:
300 type = armnn::DataType::QuantisedAsymm8;
301 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000302 case DataType_Signed32:
303 type = armnn::DataType::Signed32;
304 break;
Kevin May43a799c2019-02-08 16:31:42 +0000305 case DataType_Float32:
306 type = armnn::DataType::Float32;
307 break;
308 case DataType_Float16:
309 type = armnn::DataType::Float16;
310 break;
311 case DataType_Boolean:
312 type = armnn::DataType::Boolean;
313 break;
314 default:
315 {
316 CheckLocation location = CHECK_LOCATION();
317 throw ParseException(
318 boost::str(
319 boost::format("Unsupported data type %1% = %2%. %3%") %
320 tensorPtr->dataType() %
321 EnumNameDataType(tensorPtr->dataType()) %
322 location.AsString()));
323 }
324 }
325 float quantizationScale = tensorPtr->quantizationScale();
326 int32_t quantizationOffset = tensorPtr->quantizationOffset();
327
328 auto dimensions = tensorPtr->dimensions();
329 unsigned int size = dimensions->size();
330 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
331
332 // two statements (on purpose) for easier debugging:
333 armnn::TensorInfo result(size,
334 outputDims.data(),
335 type,
336 quantizationScale,
337 quantizationOffset);
338 return result;
339}
340
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000341armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000342{
343 CHECK_CONST_TENSOR_PTR(constTensorPtr);
344 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
345
346 switch (constTensorPtr->data_type())
347 {
348 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000349 {
350 auto byteData = constTensorPtr->data_as_ByteData()->data();
351 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
352 return armnn::ConstTensor(tensorInfo, byteData->data());
353 }
Mike Kellya0766c32019-02-19 17:22:07 +0000354 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000355 {
356 auto shortData = constTensorPtr->data_as_ShortData()->data();
357 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
358 return armnn::ConstTensor(tensorInfo, shortData->data());
359 }
Mike Kellya0766c32019-02-19 17:22:07 +0000360 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000361 {
362 auto intData = constTensorPtr->data_as_IntData()->data();
363 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
364 return armnn::ConstTensor(tensorInfo, intData->data());
365 }
Mike Kellya0766c32019-02-19 17:22:07 +0000366 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000367 {
368 auto longData = constTensorPtr->data_as_LongData()->data();
369 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
370 return armnn::ConstTensor(tensorInfo, longData->data());
371 }
Mike Kellya0766c32019-02-19 17:22:07 +0000372 default:
373 {
374 CheckLocation location = CHECK_LOCATION();
375 throw ParseException(
376 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
377 constTensorPtr->data_type() %
378 EnumNameConstTensorData(constTensorPtr->data_type()) %
379 location.AsString()));
380 }
381 }
382}
383
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000384Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000385{
386
387 CHECK_GRAPH(graphPtr, 0);
388 const auto& numInputs = graphPtr->inputIds()->size();
389
390 LayerBaseRawPtrVector result(numInputs);
391
392 for (unsigned int i=0; i<numInputs; ++i)
393 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000394 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000395 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
396 }
397 return result;
398}
399
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000400Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000401{
402 CHECK_GRAPH(graphPtr, 0);
403 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000404 LayerBaseRawPtrVector result(numOutputs);
405
406 for (unsigned int i=0; i<numOutputs; ++i)
407 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000408 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000409
Kevin May43a799c2019-02-08 16:31:42 +0000410 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
411 }
412 return result;
413}
414
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000415Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000416 unsigned int layerIndex)
417{
418 CHECK_LAYERS(graphPtr, 0, layerIndex);
419 auto layer = GetBaseLayer(graphPtr, layerIndex);
420 const auto& numInputs = layer->inputSlots()->size();
421
422 TensorRawPtrVector result(numInputs);
423
424 for (unsigned int i=0; i<numInputs; ++i)
425 {
426 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
427 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
428 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
429 }
430 return result;
431}
432
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000433Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000434 unsigned int layerIndex)
435{
436 CHECK_LAYERS(graphPtr, 0, layerIndex);
437 auto layer = GetBaseLayer(graphPtr, layerIndex);
438 const auto& numOutputs = layer->outputSlots()->size();
439
440 TensorRawPtrVector result(numOutputs);
441
442 for (unsigned int i=0; i<numOutputs; ++i)
443 {
444 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
445 }
446 return result;
447}
448
Derek Lamberti8ddae332019-02-21 16:29:43 +0000449void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000450{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000451 CHECK_LAYERS(graph, 0, layerIndex);
452 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000453 throw ParseException(
454 boost::str(
455 boost::format("Layer not supported. "
456 "layerIndex: %1% "
457 "layerName: %2% / %3%") %
458 layerIndex %
459 layerName %
460 CHECK_LOCATION().AsString()));
461}
462
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000463void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000464{
465 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000466 m_InputBindings.clear();
467 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000468}
469
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000470IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000471{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000472 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000473}
474
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000475IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000476{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000477 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000478}
479
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000480void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000481{
482 delete parser;
483}
484
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000485INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000486{
487 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000488 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
489 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000490}
491
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000492armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000493{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000494 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000495 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
496 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
497 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000498}
499
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000500Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000501{
502 if (binaryContent == nullptr)
503 {
504 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
505 CHECK_LOCATION().AsString()));
506 }
507 flatbuffers::Verifier verifier(binaryContent, len);
508 if (verifier.VerifyBuffer<SerializedGraph>() == false)
509 {
510 throw ParseException(
511 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
512 "flatbuffers format. size:%1% %2%") %
513 len %
514 CHECK_LOCATION().AsString()));
515 }
516 return GetSerializedGraph(binaryContent);
517}
518
Derek Lamberti8ddae332019-02-21 16:29:43 +0000519INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000520{
521 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000522 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000523 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000524 m_GraphConnections.emplace_back(graph->layers()->size());
525 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000526 {
527 if (layer->layer_type() != Layer_InputLayer &&
528 layer->layer_type() != Layer_OutputLayer)
529 {
530 // lookup and call the parser function
531 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000532 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000533 }
534 ++layerIndex;
535 }
536
Derek Lamberti8ddae332019-02-21 16:29:43 +0000537 SetupInputLayers(graph);
538 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000539
540 // establish the connections from the layer outputs to the inputs of the subsequent layers
541 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
542 {
543 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
544 {
545 for (size_t inputSlotIdx = 0;
546 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
547 ++inputSlotIdx)
548 {
549 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
550 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
551 }
552 }
553 }
554
555 return std::move(m_Network);
556}
557
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000558BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000559 const std::string& name) const
560{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000561 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000562 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000563 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000564 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000565 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000566 }
567 }
568 throw ParseException(
569 boost::str(
570 boost::format("No input binding found for layer:%1% / %2%") %
571 name %
572 CHECK_LOCATION().AsString()));
573}
574
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000575BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000576 const std::string& name) const
577{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000578 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000579 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000580 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000581 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000582 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000583 }
584 }
585 throw ParseException(
586 boost::str(
587 boost::format("No output binding found for layer:%1% / %2%") %
588 name %
589 CHECK_LOCATION().AsString()));
590}
591
Derek Lamberti8ddae332019-02-21 16:29:43 +0000592void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000593{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000594 CHECK_GRAPH(graph, 0);
595 auto inputs = GetGraphInputs(graph);
596 m_InputBindings.clear();
597 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000598 for (auto const& input : inputs)
599 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000600 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000601 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000602 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000603
604 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
605 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
606
Derek Lamberti8ddae332019-02-21 16:29:43 +0000607 RegisterOutputSlots(graph, input->index(), layer);
608
609 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
610 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
611 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000612 }
613}
614
Derek Lamberti8ddae332019-02-21 16:29:43 +0000615void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000616{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000617 CHECK_GRAPH(graph, 0);
618 auto outputs = GetGraphOutputs(graph);
619 m_OutputBindings.clear();
620 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000621 for (auto const& output : outputs)
622 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000623 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000624 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000625 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000626
Derek Lamberti8ddae332019-02-21 16:29:43 +0000627 RegisterInputSlots(graph, output->index(), layer);
628
629 auto baseLayer = GetBaseLayer(graph, output->index());
630 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
631 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
632 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
633
634 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
635 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
636 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000637 }
638}
639
Derek Lamberti8ddae332019-02-21 16:29:43 +0000640void Deserializer::RegisterOutputSlots(GraphPtr graph,
641 uint32_t layerIndex,
642 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000643{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000644 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000645 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000646 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000647 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
648 {
649 throw ParseException(
650 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
651 " for layer index: %3% %4%") %
652 parsedLayer->outputSlots()->size() %
653 layer->GetNumOutputSlots() %
654 layerIndex %
655 CHECK_LOCATION().AsString()));
656 }
657
658 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
659 {
660 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
661 RegisterOutputSlotOfConnection(layerIndex, slot);
662 }
663}
664
Derek Lamberti8ddae332019-02-21 16:29:43 +0000665void Deserializer::RegisterInputSlots(GraphPtr graph,
666 uint32_t layerIndex,
667 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000668{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000669 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000670 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000671 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000672 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
673 {
674 throw ParseException(
675 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
676 " for layer index:%3% %4%") %
677 parsedLayer->inputSlots()->size() %
678 layer->GetNumInputSlots() %
679 layerIndex %
680 CHECK_LOCATION().AsString()));
681 }
682
683 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
684 {
685 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
686 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
687 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
688 }
689}
690
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000691void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000692 armnn::IInputSlot* slot)
693{
694 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
695
696 Slots& slots = m_GraphConnections[0][connectionIndex];
697 slots.inputSlots.push_back(slot);
698}
699
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000700void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000701 armnn::IOutputSlot* slot)
702{
703 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
704
705 Slots& slots = m_GraphConnections[0][connectionIndex];
706
707 // assuming there is only one producer for that tensor
708 if (slots.outputSlot != nullptr)
709 {
710 throw ParseException(boost::str(
711 boost::format("Another layer has already registered itself as the producer of "
712 "connection:%1% / %2%") %
713 connectionIndex %
714 CHECK_LOCATION().AsString()));
715 }
716
717 slots.outputSlot = slot;
718}
719
Derek Lamberti8ddae332019-02-21 16:29:43 +0000720void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000721{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000722 CHECK_LAYERS(graph, 0, layerIndex);
723 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000724 CHECK_LOCATION();
725 CHECK_VALID_SIZE(inputs.size(), 1);
726
Derek Lamberti8ddae332019-02-21 16:29:43 +0000727 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000728 CHECK_VALID_SIZE(outputs.size(), 1);
729
730 auto layerName = boost::str(boost::format("Activation:%1%") % layerIndex);
731
Derek Lamberti8ddae332019-02-21 16:29:43 +0000732 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Mike Kellyaf484012019-02-20 16:53:11 +0000733 auto serializerDescriptor = serializerLayer->descriptor();
734
735 armnn::ActivationDescriptor descriptor;
736 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
737 descriptor.m_A = serializerDescriptor->a();
738 descriptor.m_B = serializerDescriptor->b();
739
740 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
741 layerName.c_str());
742 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
743 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
744
Derek Lamberti8ddae332019-02-21 16:29:43 +0000745 RegisterInputSlots(graph, layerIndex, layer);
746 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000747}
748
Derek Lamberti8ddae332019-02-21 16:29:43 +0000749void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000750{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000751 CHECK_LAYERS(graph, 0, layerIndex);
752 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000753 CHECK_LOCATION();
754 CHECK_VALID_SIZE(inputs.size(), 2);
755
Derek Lamberti8ddae332019-02-21 16:29:43 +0000756 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000757 CHECK_VALID_SIZE(outputs.size(), 1);
758
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000759 m_layerName = boost::str(boost::format("Addition:%1%") % layerIndex);
760 IConnectableLayer* layer = m_Network->AddAdditionLayer(m_layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000761
762 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
763 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
764
Derek Lamberti8ddae332019-02-21 16:29:43 +0000765 RegisterInputSlots(graph, layerIndex, layer);
766 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000767}
768
Derek Lamberti8ddae332019-02-21 16:29:43 +0000769void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000770{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000771 CHECK_LAYERS(graph, 0, layerIndex);
772 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000773 CHECK_LOCATION();
774 CHECK_VALID_SIZE(inputs.size(), 1);
775
Derek Lamberti8ddae332019-02-21 16:29:43 +0000776 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000777 CHECK_VALID_SIZE(outputs.size(), 1);
778
779 auto layerName = boost::str(boost::format("Convolution2d:%1%") % layerIndex);
780
Derek Lamberti8ddae332019-02-21 16:29:43 +0000781 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Mike Kellya0766c32019-02-19 17:22:07 +0000782 auto serializerDescriptor = serializerLayer->descriptor();
783
784 armnn::Convolution2dDescriptor descriptor;
785 descriptor.m_PadLeft = serializerDescriptor->padLeft();
786 descriptor.m_PadRight = serializerDescriptor->padRight();
787 descriptor.m_PadTop = serializerDescriptor->padTop();
788 descriptor.m_PadBottom = serializerDescriptor->padBottom();
789 descriptor.m_StrideX = serializerDescriptor->strideX();
790 descriptor.m_StrideY = serializerDescriptor->strideY();;
791 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
792 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
793
794 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
795 armnn::ConstTensor biases;
796
797 if (descriptor.m_BiasEnabled)
798 {
799 biases = ToConstTensor(serializerLayer->biases());
800 }
801 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
802 weights,
803 biases,
804 layerName.c_str());
805 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
806 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
807
Derek Lamberti8ddae332019-02-21 16:29:43 +0000808 RegisterInputSlots(graph, layerIndex, layer);
809 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000810}
811
Derek Lamberti8ddae332019-02-21 16:29:43 +0000812void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000813{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000814 CHECK_LAYERS(graph, 0, layerIndex);
815 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000816 CHECK_LOCATION();
817 CHECK_VALID_SIZE(inputs.size(), 1);
818
Derek Lamberti8ddae332019-02-21 16:29:43 +0000819 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000820 CHECK_VALID_SIZE(outputs.size(), 1);
821
822 auto layerName = boost::str(boost::format("DepthwiseConvolution2d:%1%") % layerIndex);
823
Derek Lamberti8ddae332019-02-21 16:29:43 +0000824 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000825 auto serializerDescriptor = serializerLayer->descriptor();
826
827 armnn::DepthwiseConvolution2dDescriptor descriptor;
828 descriptor.m_PadLeft = serializerDescriptor->padLeft();
829 descriptor.m_PadRight = serializerDescriptor->padRight();
830 descriptor.m_PadTop = serializerDescriptor->padTop();
831 descriptor.m_PadBottom = serializerDescriptor->padBottom();
832 descriptor.m_StrideX = serializerDescriptor->strideX();
833 descriptor.m_StrideY = serializerDescriptor->strideY();;
834 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
835 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
836
837 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
838 armnn::ConstTensor biases;
839
840 if (descriptor.m_BiasEnabled)
841 {
842 biases = ToConstTensor(serializerLayer->biases());
843 }
844 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
845 weights,
846 biases,
847 layerName.c_str());
848
849 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
850 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
851
Derek Lamberti8ddae332019-02-21 16:29:43 +0000852 RegisterInputSlots(graph, layerIndex, layer);
853 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000854}
855
Derek Lamberti8ddae332019-02-21 16:29:43 +0000856void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +0000857{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000858 CHECK_LAYERS(graph, 0, layerIndex);
859 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +0000860 CHECK_LOCATION();
861 CHECK_VALID_SIZE(inputs.size(), 2);
862
Derek Lamberti8ddae332019-02-21 16:29:43 +0000863 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +0000864 CHECK_VALID_SIZE(outputs.size(), 1);
865
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000866 m_layerName = boost::str(boost::format("Multiplication:%1%") % layerIndex);
867 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(m_layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +0000868
869 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
870 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
871
Derek Lamberti8ddae332019-02-21 16:29:43 +0000872 RegisterInputSlots(graph, layerIndex, layer);
873 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +0000874}
875
Derek Lamberti8ddae332019-02-21 16:29:43 +0000876void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000877{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000878 CHECK_LAYERS(graph, 0, layerIndex);
879 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000880 CHECK_LOCATION();
881 CHECK_VALID_SIZE(inputs.size(), 1);
882
Derek Lamberti8ddae332019-02-21 16:29:43 +0000883 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000884 CHECK_VALID_SIZE(outputs.size(), 1);
885
886 auto layerName = boost::str(boost::format("FullyConnected:%1%") % layerIndex);
887
Derek Lamberti8ddae332019-02-21 16:29:43 +0000888 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000889 auto flatBufferDescriptor = flatBufferLayer->descriptor();
890
891 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
892 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
893 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
894
895 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
896
897 armnn::IConnectableLayer* layer;
898 if (flatBufferDescriptor->biasEnabled())
899 {
900 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
901 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
902 weightsTensor,
903 biasTensorData,
904 layerName.c_str());
905 }
906 else
907 {
908 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
909 weightsTensor,
910 layerName.c_str());
911 }
912
913 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
914 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
915
Derek Lamberti8ddae332019-02-21 16:29:43 +0000916 RegisterInputSlots(graph, layerIndex, layer);
917 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000918}
919
Derek Lamberti8ddae332019-02-21 16:29:43 +0000920void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000921{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000922 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000923
924 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000925 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000926
Derek Lamberti8ddae332019-02-21 16:29:43 +0000927 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000928 CHECK_VALID_SIZE(inputs.size(), 1);
929
Derek Lamberti8ddae332019-02-21 16:29:43 +0000930 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000931 CHECK_VALID_SIZE(outputs.size(), 1);
932 auto outputInfo = ToTensorInfo(outputs[0]);
933
934 m_layerName = boost::str(boost::format("Permute:%1%") % layerIndex);
935 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
936
937 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, m_layerName.c_str());
938 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
939
Derek Lamberti8ddae332019-02-21 16:29:43 +0000940 RegisterInputSlots(graph, layerIndex, layer);
941 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000942}
943
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000944armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000945 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000946{
947 armnn::Pooling2dDescriptor desc;
948
949 switch (pooling2dDesc->poolType())
950 {
951 case PoolingAlgorithm_Average:
952 {
953 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
954 m_layerName = boost::str(boost::format("AveragePool2D:%1%") % layerIndex);
955 break;
956 }
957 case PoolingAlgorithm_Max:
958 {
959 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
960 m_layerName = boost::str(boost::format("MaxPool2D:%1%") % layerIndex);
961 break;
962 }
963 default:
964 {
965 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
966 }
967 }
968
969 switch (pooling2dDesc->outputShapeRounding())
970 {
971 case OutputShapeRounding_Floor:
972 {
973 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
974 break;
975 }
976 case OutputShapeRounding_Ceiling:
977 {
978 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
979 break;
980 }
981 default:
982 {
983 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
984 }
985 }
986
987 switch (pooling2dDesc->paddingMethod())
988 {
989 case PaddingMethod_Exclude:
990 {
991 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
992 break;
993 }
994 case PaddingMethod_IgnoreValue:
995 {
996 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
997 break;
998 }
999 default:
1000 {
1001 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1002 }
1003 }
1004
1005 switch (pooling2dDesc->dataLayout())
1006 {
1007 case DataLayout_NCHW:
1008 {
1009 desc.m_DataLayout = armnn::DataLayout::NCHW;
1010 break;
1011 }
1012 case DataLayout_NHWC:
1013 {
1014 desc.m_DataLayout = armnn::DataLayout::NHWC;
1015 break;
1016 }
1017 default:
1018 {
1019 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1020 }
1021 }
1022
1023 desc.m_PadRight = pooling2dDesc->padRight();
1024 desc.m_PadLeft = pooling2dDesc->padLeft();
1025 desc.m_PadBottom = pooling2dDesc->padBottom();
1026 desc.m_PadTop = pooling2dDesc->padTop();
1027 desc.m_StrideX = pooling2dDesc->strideX();
1028 desc.m_StrideY = pooling2dDesc->strideY();
1029 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1030 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1031
1032 return desc;
1033}
1034
Derek Lamberti8ddae332019-02-21 16:29:43 +00001035void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001036{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001037 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001038
Derek Lamberti8ddae332019-02-21 16:29:43 +00001039 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001040
Derek Lamberti8ddae332019-02-21 16:29:43 +00001041 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001042 CHECK_VALID_SIZE(inputs.size(), 1);
1043
Derek Lamberti8ddae332019-02-21 16:29:43 +00001044 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001045 CHECK_VALID_SIZE(outputs.size(), 1);
1046 auto outputInfo = ToTensorInfo(outputs[0]);
1047
1048 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
1049
1050 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, m_layerName.c_str());
1051 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1052
Derek Lamberti8ddae332019-02-21 16:29:43 +00001053 RegisterInputSlots(graph, layerIndex, layer);
1054 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001055}
1056
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001057armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001058 const std::vector<uint32_t>& targetDimsIn)
1059{
1060 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1061 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1062
1063 if (stretchDim != targetDimsIn.end())
1064 {
1065 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1066 {
1067 throw ParseException(boost::str(
1068 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1069 }
1070
1071 auto targetNumElements =
1072 boost::numeric_cast<unsigned int>(
1073 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1074
1075 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1076 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1077 }
1078
1079 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1080
1081 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1082 reshapeInfo.SetShape(outputShape);
1083
1084 return reshapeInfo;
1085}
1086
Derek Lamberti8ddae332019-02-21 16:29:43 +00001087void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001088{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001089 CHECK_LAYERS(graph, 0, layerIndex);
1090 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001091
Derek Lamberti8ddae332019-02-21 16:29:43 +00001092 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001093 CHECK_VALID_SIZE(outputs.size(), 1);
1094
1095 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1096 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1097
Derek Lamberti8ddae332019-02-21 16:29:43 +00001098 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001099 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1100
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001101 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001102 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1103
1104 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1105 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1106
1107 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1108 {
1109 std::stringstream ss;
1110 ss << "New shape defined in reshape parameters "
1111 << reshapeOutputTensorShape
1112 << " does not equal output shape "
1113 << actualOutputTensorInfo.GetShape()
1114 << ": "
1115 << CHECK_LOCATION().AsString();
1116 throw ParseException(ss.str());
1117 }
1118
1119 armnn::ReshapeDescriptor reshapeDesc;
1120 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1121
1122 auto layerName = boost::str(boost::format("Reshape:%1%") % layerIndex);
1123 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1124 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1125
Derek Lamberti8ddae332019-02-21 16:29:43 +00001126 RegisterInputSlots(graph, layerIndex, layer);
1127 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001128}
1129
Derek Lamberti8ddae332019-02-21 16:29:43 +00001130void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001131{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001132 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001133
Derek Lamberti8ddae332019-02-21 16:29:43 +00001134 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001135 CHECK_VALID_SIZE(inputs.size(), 1);
1136
Derek Lamberti8ddae332019-02-21 16:29:43 +00001137 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001138 CHECK_VALID_SIZE(outputs.size(), 1);
1139
1140 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001141 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001142
1143 const std::string layerName = boost::str(boost::format("Softmax:%1%") % layerIndex);
1144 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1145
1146 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1147 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1148
Derek Lamberti8ddae332019-02-21 16:29:43 +00001149 RegisterInputSlots(graph, layerIndex, layer);
1150 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001151}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001152
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001153} // namespace armnnDeserializer