blob: 0bfbe7ea6482c12ed8361fbb9ef014225360ddb0 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
23#include <Schema_generated.h>
24
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Kevin May43a799c2019-02-08 16:31:42 +0000135#define CHECK_TENSOR_PTR(TENSOR_PTR) \
136 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
137
Mike Kellya0766c32019-02-19 17:22:07 +0000138#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
139 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
140
Kevin May43a799c2019-02-08 16:31:42 +0000141#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
142 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
143
144#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
145 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
146}
147
Saoirse Stewart263829c2019-02-19 15:54:14 +0000148bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
149{
150 const unsigned int actualSize = actual.GetNumDimensions();
151 if (actualSize != expected.size())
152 {
153 return false;
154 }
155
156 for (unsigned int i = 0u; i < actualSize; i++)
157 {
158 if (actual[i] != static_cast<unsigned int>(expected[i]))
159 {
160 return false;
161 }
162 }
163
164 return true;
165}
166
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000167Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000168: m_Network(nullptr, nullptr),
169//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000170m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000171{
172 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000173 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000174 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
175 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
176 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000177 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000178 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000179 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000180 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
181 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
182 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Kevin May43a799c2019-02-08 16:31:42 +0000183}
184
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
188
189 switch(layerType)
190 {
Mike Kellyaf484012019-02-20 16:53:11 +0000191 case Layer::Layer_ActivationLayer:
192 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000193 case Layer::Layer_AdditionLayer:
194 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000195 case Layer::Layer_Convolution2dLayer:
196 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000197 case Layer::Layer_DepthwiseConvolution2dLayer:
198 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000199 case Layer::Layer_FullyConnectedLayer:
200 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000201 case Layer::Layer_InputLayer:
202 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000203 case Layer::Layer_MultiplicationLayer:
204 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000205 case Layer::Layer_OutputLayer:
206 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000207 case Layer::Layer_PermuteLayer:
208 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000209 case Layer::Layer_Pooling2dLayer:
210 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000211 case Layer::Layer_ReshapeLayer:
212 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000213 case Layer::Layer_SoftmaxLayer:
214 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000215 case Layer::Layer_NONE:
216 default:
217 throw ParseException(boost::str(
218 boost::format("Layer must have a type %1%") %
219 Layer::Layer_NONE));
220 }
221}
222
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000223int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000224{
225 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
226
227 if (layerType == Layer::Layer_InputLayer)
228 {
229 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
230 }
231 else if ( layerType == Layer::Layer_OutputLayer )
232 {
233 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
234 }
235 return 0;
236}
237
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000238armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000239{
240 switch (dataLayout)
241 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000242 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000243 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000244 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000245 default:
246 return armnn::DataLayout::NCHW;
247 }
248}
249
Mike Kellyaf484012019-02-20 16:53:11 +0000250armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
251{
252 switch (function)
253 {
254 case armnnSerializer::ActivationFunction_Sigmoid:
255 return armnn::ActivationFunction::Sigmoid;
256 case armnnSerializer::ActivationFunction_TanH:
257 return armnn::ActivationFunction::TanH;
258 case armnnSerializer::ActivationFunction_Linear:
259 return armnn::ActivationFunction::Linear;
260 case armnnSerializer::ActivationFunction_ReLu:
261 return armnn::ActivationFunction::ReLu;
262 case armnnSerializer::ActivationFunction_BoundedReLu:
263 return armnn::ActivationFunction::BoundedReLu;
264 case armnnSerializer::ActivationFunction_LeakyReLu:
265 return armnn::ActivationFunction::LeakyReLu;
266 case armnnSerializer::ActivationFunction_Abs:
267 return armnn::ActivationFunction::Abs;
268 case armnnSerializer::ActivationFunction_Sqrt:
269 return armnn::ActivationFunction::Sqrt;
270 case armnnSerializer::ActivationFunction_Square:
271 return armnn::ActivationFunction::Square;
272 default:
273 return armnn::ActivationFunction::Sigmoid;
274 }
275}
276
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000277armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000278{
279 armnn::DataType type;
280 CHECK_TENSOR_PTR(tensorPtr);
281
282 switch (tensorPtr->dataType())
283 {
284 case DataType_QuantisedAsymm8:
285 type = armnn::DataType::QuantisedAsymm8;
286 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000287 case DataType_Signed32:
288 type = armnn::DataType::Signed32;
289 break;
Kevin May43a799c2019-02-08 16:31:42 +0000290 case DataType_Float32:
291 type = armnn::DataType::Float32;
292 break;
293 case DataType_Float16:
294 type = armnn::DataType::Float16;
295 break;
296 case DataType_Boolean:
297 type = armnn::DataType::Boolean;
298 break;
299 default:
300 {
301 CheckLocation location = CHECK_LOCATION();
302 throw ParseException(
303 boost::str(
304 boost::format("Unsupported data type %1% = %2%. %3%") %
305 tensorPtr->dataType() %
306 EnumNameDataType(tensorPtr->dataType()) %
307 location.AsString()));
308 }
309 }
310 float quantizationScale = tensorPtr->quantizationScale();
311 int32_t quantizationOffset = tensorPtr->quantizationOffset();
312
313 auto dimensions = tensorPtr->dimensions();
314 unsigned int size = dimensions->size();
315 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
316
317 // two statements (on purpose) for easier debugging:
318 armnn::TensorInfo result(size,
319 outputDims.data(),
320 type,
321 quantizationScale,
322 quantizationOffset);
323 return result;
324}
325
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000326armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000327{
328 CHECK_CONST_TENSOR_PTR(constTensorPtr);
329 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
330
331 switch (constTensorPtr->data_type())
332 {
333 case ConstTensorData_ByteData:
334 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_ByteData()->data()->data());
335 case ConstTensorData_ShortData:
336 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_ShortData()->data()->data());
337 case ConstTensorData_IntData:
338 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_IntData()->data()->data());
339 case ConstTensorData_LongData:
340 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_LongData()->data()->data());
341 default:
342 {
343 CheckLocation location = CHECK_LOCATION();
344 throw ParseException(
345 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
346 constTensorPtr->data_type() %
347 EnumNameConstTensorData(constTensorPtr->data_type()) %
348 location.AsString()));
349 }
350 }
351}
352
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000353Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000354{
355
356 CHECK_GRAPH(graphPtr, 0);
357 const auto& numInputs = graphPtr->inputIds()->size();
358
359 LayerBaseRawPtrVector result(numInputs);
360
361 for (unsigned int i=0; i<numInputs; ++i)
362 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000363 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000364 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
365 }
366 return result;
367}
368
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000369Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000370{
371 CHECK_GRAPH(graphPtr, 0);
372 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000373 LayerBaseRawPtrVector result(numOutputs);
374
375 for (unsigned int i=0; i<numOutputs; ++i)
376 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000377 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000378
Kevin May43a799c2019-02-08 16:31:42 +0000379 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
380 }
381 return result;
382}
383
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000384Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000385 unsigned int layerIndex)
386{
387 CHECK_LAYERS(graphPtr, 0, layerIndex);
388 auto layer = GetBaseLayer(graphPtr, layerIndex);
389 const auto& numInputs = layer->inputSlots()->size();
390
391 TensorRawPtrVector result(numInputs);
392
393 for (unsigned int i=0; i<numInputs; ++i)
394 {
395 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
396 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
397 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
398 }
399 return result;
400}
401
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000402Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000403 unsigned int layerIndex)
404{
405 CHECK_LAYERS(graphPtr, 0, layerIndex);
406 auto layer = GetBaseLayer(graphPtr, layerIndex);
407 const auto& numOutputs = layer->outputSlots()->size();
408
409 TensorRawPtrVector result(numOutputs);
410
411 for (unsigned int i=0; i<numOutputs; ++i)
412 {
413 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
414 }
415 return result;
416}
417
Derek Lamberti8ddae332019-02-21 16:29:43 +0000418void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000419{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000420 CHECK_LAYERS(graph, 0, layerIndex);
421 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000422 throw ParseException(
423 boost::str(
424 boost::format("Layer not supported. "
425 "layerIndex: %1% "
426 "layerName: %2% / %3%") %
427 layerIndex %
428 layerName %
429 CHECK_LOCATION().AsString()));
430}
431
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000432void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000433{
434 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000435 m_InputBindings.clear();
436 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000437}
438
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000439IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000440{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000441 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000442}
443
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000444IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000445{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000446 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000447}
448
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000449void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000450{
451 delete parser;
452}
453
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000454INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000455{
456 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000457 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
458 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000459}
460
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000462{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000463 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000464 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
465 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
466 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000467}
468
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000469Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000470{
471 if (binaryContent == nullptr)
472 {
473 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
474 CHECK_LOCATION().AsString()));
475 }
476 flatbuffers::Verifier verifier(binaryContent, len);
477 if (verifier.VerifyBuffer<SerializedGraph>() == false)
478 {
479 throw ParseException(
480 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
481 "flatbuffers format. size:%1% %2%") %
482 len %
483 CHECK_LOCATION().AsString()));
484 }
485 return GetSerializedGraph(binaryContent);
486}
487
Derek Lamberti8ddae332019-02-21 16:29:43 +0000488INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000489{
490 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000491 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000492 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000493 m_GraphConnections.emplace_back(graph->layers()->size());
494 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000495 {
496 if (layer->layer_type() != Layer_InputLayer &&
497 layer->layer_type() != Layer_OutputLayer)
498 {
499 // lookup and call the parser function
500 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000501 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000502 }
503 ++layerIndex;
504 }
505
Derek Lamberti8ddae332019-02-21 16:29:43 +0000506 SetupInputLayers(graph);
507 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000508
509 // establish the connections from the layer outputs to the inputs of the subsequent layers
510 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
511 {
512 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
513 {
514 for (size_t inputSlotIdx = 0;
515 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
516 ++inputSlotIdx)
517 {
518 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
519 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
520 }
521 }
522 }
523
524 return std::move(m_Network);
525}
526
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000527BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000528 const std::string& name) const
529{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000530 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000531 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000532 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000533 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000534 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000535 }
536 }
537 throw ParseException(
538 boost::str(
539 boost::format("No input binding found for layer:%1% / %2%") %
540 name %
541 CHECK_LOCATION().AsString()));
542}
543
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000544BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000545 const std::string& name) const
546{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000547 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000548 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000549 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000550 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000551 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000552 }
553 }
554 throw ParseException(
555 boost::str(
556 boost::format("No output binding found for layer:%1% / %2%") %
557 name %
558 CHECK_LOCATION().AsString()));
559}
560
Derek Lamberti8ddae332019-02-21 16:29:43 +0000561void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000562{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000563 CHECK_GRAPH(graph, 0);
564 auto inputs = GetGraphInputs(graph);
565 m_InputBindings.clear();
566 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000567 for (auto const& input : inputs)
568 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000569 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000570 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000571 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000572
573 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
574 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
575
Derek Lamberti8ddae332019-02-21 16:29:43 +0000576 RegisterOutputSlots(graph, input->index(), layer);
577
578 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
579 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
580 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000581 }
582}
583
Derek Lamberti8ddae332019-02-21 16:29:43 +0000584void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000585{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000586 CHECK_GRAPH(graph, 0);
587 auto outputs = GetGraphOutputs(graph);
588 m_OutputBindings.clear();
589 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000590 for (auto const& output : outputs)
591 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000592 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000593 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000594 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000595
Derek Lamberti8ddae332019-02-21 16:29:43 +0000596 RegisterInputSlots(graph, output->index(), layer);
597
598 auto baseLayer = GetBaseLayer(graph, output->index());
599 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
600 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
601 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
602
603 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
604 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
605 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000606 }
607}
608
Derek Lamberti8ddae332019-02-21 16:29:43 +0000609void Deserializer::RegisterOutputSlots(GraphPtr graph,
610 uint32_t layerIndex,
611 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000612{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000613 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000614 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000615 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000616 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
617 {
618 throw ParseException(
619 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
620 " for layer index: %3% %4%") %
621 parsedLayer->outputSlots()->size() %
622 layer->GetNumOutputSlots() %
623 layerIndex %
624 CHECK_LOCATION().AsString()));
625 }
626
627 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
628 {
629 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
630 RegisterOutputSlotOfConnection(layerIndex, slot);
631 }
632}
633
Derek Lamberti8ddae332019-02-21 16:29:43 +0000634void Deserializer::RegisterInputSlots(GraphPtr graph,
635 uint32_t layerIndex,
636 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000637{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000638 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000639 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000640 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000641 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
642 {
643 throw ParseException(
644 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
645 " for layer index:%3% %4%") %
646 parsedLayer->inputSlots()->size() %
647 layer->GetNumInputSlots() %
648 layerIndex %
649 CHECK_LOCATION().AsString()));
650 }
651
652 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
653 {
654 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
655 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
656 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
657 }
658}
659
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000660void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000661 armnn::IInputSlot* slot)
662{
663 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
664
665 Slots& slots = m_GraphConnections[0][connectionIndex];
666 slots.inputSlots.push_back(slot);
667}
668
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000669void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000670 armnn::IOutputSlot* slot)
671{
672 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
673
674 Slots& slots = m_GraphConnections[0][connectionIndex];
675
676 // assuming there is only one producer for that tensor
677 if (slots.outputSlot != nullptr)
678 {
679 throw ParseException(boost::str(
680 boost::format("Another layer has already registered itself as the producer of "
681 "connection:%1% / %2%") %
682 connectionIndex %
683 CHECK_LOCATION().AsString()));
684 }
685
686 slots.outputSlot = slot;
687}
688
Derek Lamberti8ddae332019-02-21 16:29:43 +0000689void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000690{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000691 CHECK_LAYERS(graph, 0, layerIndex);
692 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000693 CHECK_LOCATION();
694 CHECK_VALID_SIZE(inputs.size(), 1);
695
Derek Lamberti8ddae332019-02-21 16:29:43 +0000696 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000697 CHECK_VALID_SIZE(outputs.size(), 1);
698
699 auto layerName = boost::str(boost::format("Activation:%1%") % layerIndex);
700
Derek Lamberti8ddae332019-02-21 16:29:43 +0000701 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Mike Kellyaf484012019-02-20 16:53:11 +0000702 auto serializerDescriptor = serializerLayer->descriptor();
703
704 armnn::ActivationDescriptor descriptor;
705 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
706 descriptor.m_A = serializerDescriptor->a();
707 descriptor.m_B = serializerDescriptor->b();
708
709 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
710 layerName.c_str());
711 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
712 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
713
Derek Lamberti8ddae332019-02-21 16:29:43 +0000714 RegisterInputSlots(graph, layerIndex, layer);
715 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000716}
717
Derek Lamberti8ddae332019-02-21 16:29:43 +0000718void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000719{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000720 CHECK_LAYERS(graph, 0, layerIndex);
721 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000722 CHECK_LOCATION();
723 CHECK_VALID_SIZE(inputs.size(), 2);
724
Derek Lamberti8ddae332019-02-21 16:29:43 +0000725 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000726 CHECK_VALID_SIZE(outputs.size(), 1);
727
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000728 m_layerName = boost::str(boost::format("Addition:%1%") % layerIndex);
729 IConnectableLayer* layer = m_Network->AddAdditionLayer(m_layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000730
731 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
732 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
733
Derek Lamberti8ddae332019-02-21 16:29:43 +0000734 RegisterInputSlots(graph, layerIndex, layer);
735 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000736}
737
Derek Lamberti8ddae332019-02-21 16:29:43 +0000738void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000739{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000740 CHECK_LAYERS(graph, 0, layerIndex);
741 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000742 CHECK_LOCATION();
743 CHECK_VALID_SIZE(inputs.size(), 1);
744
Derek Lamberti8ddae332019-02-21 16:29:43 +0000745 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000746 CHECK_VALID_SIZE(outputs.size(), 1);
747
748 auto layerName = boost::str(boost::format("Convolution2d:%1%") % layerIndex);
749
Derek Lamberti8ddae332019-02-21 16:29:43 +0000750 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Mike Kellya0766c32019-02-19 17:22:07 +0000751 auto serializerDescriptor = serializerLayer->descriptor();
752
753 armnn::Convolution2dDescriptor descriptor;
754 descriptor.m_PadLeft = serializerDescriptor->padLeft();
755 descriptor.m_PadRight = serializerDescriptor->padRight();
756 descriptor.m_PadTop = serializerDescriptor->padTop();
757 descriptor.m_PadBottom = serializerDescriptor->padBottom();
758 descriptor.m_StrideX = serializerDescriptor->strideX();
759 descriptor.m_StrideY = serializerDescriptor->strideY();;
760 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
761 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
762
763 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
764 armnn::ConstTensor biases;
765
766 if (descriptor.m_BiasEnabled)
767 {
768 biases = ToConstTensor(serializerLayer->biases());
769 }
770 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
771 weights,
772 biases,
773 layerName.c_str());
774 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
775 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
776
Derek Lamberti8ddae332019-02-21 16:29:43 +0000777 RegisterInputSlots(graph, layerIndex, layer);
778 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000779}
780
Derek Lamberti8ddae332019-02-21 16:29:43 +0000781void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000782{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000783 CHECK_LAYERS(graph, 0, layerIndex);
784 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000785 CHECK_LOCATION();
786 CHECK_VALID_SIZE(inputs.size(), 1);
787
Derek Lamberti8ddae332019-02-21 16:29:43 +0000788 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000789 CHECK_VALID_SIZE(outputs.size(), 1);
790
791 auto layerName = boost::str(boost::format("DepthwiseConvolution2d:%1%") % layerIndex);
792
Derek Lamberti8ddae332019-02-21 16:29:43 +0000793 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000794 auto serializerDescriptor = serializerLayer->descriptor();
795
796 armnn::DepthwiseConvolution2dDescriptor descriptor;
797 descriptor.m_PadLeft = serializerDescriptor->padLeft();
798 descriptor.m_PadRight = serializerDescriptor->padRight();
799 descriptor.m_PadTop = serializerDescriptor->padTop();
800 descriptor.m_PadBottom = serializerDescriptor->padBottom();
801 descriptor.m_StrideX = serializerDescriptor->strideX();
802 descriptor.m_StrideY = serializerDescriptor->strideY();;
803 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
804 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
805
806 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
807 armnn::ConstTensor biases;
808
809 if (descriptor.m_BiasEnabled)
810 {
811 biases = ToConstTensor(serializerLayer->biases());
812 }
813 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
814 weights,
815 biases,
816 layerName.c_str());
817
818 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
819 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
820
Derek Lamberti8ddae332019-02-21 16:29:43 +0000821 RegisterInputSlots(graph, layerIndex, layer);
822 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000823}
824
Derek Lamberti8ddae332019-02-21 16:29:43 +0000825void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +0000826{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000827 CHECK_LAYERS(graph, 0, layerIndex);
828 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +0000829 CHECK_LOCATION();
830 CHECK_VALID_SIZE(inputs.size(), 2);
831
Derek Lamberti8ddae332019-02-21 16:29:43 +0000832 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +0000833 CHECK_VALID_SIZE(outputs.size(), 1);
834
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000835 m_layerName = boost::str(boost::format("Multiplication:%1%") % layerIndex);
836 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(m_layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +0000837
838 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
839 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
840
Derek Lamberti8ddae332019-02-21 16:29:43 +0000841 RegisterInputSlots(graph, layerIndex, layer);
842 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +0000843}
844
Derek Lamberti8ddae332019-02-21 16:29:43 +0000845void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000846{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000847 CHECK_LAYERS(graph, 0, layerIndex);
848 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000849 CHECK_LOCATION();
850 CHECK_VALID_SIZE(inputs.size(), 1);
851
Derek Lamberti8ddae332019-02-21 16:29:43 +0000852 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000853 CHECK_VALID_SIZE(outputs.size(), 1);
854
855 auto layerName = boost::str(boost::format("FullyConnected:%1%") % layerIndex);
856
Derek Lamberti8ddae332019-02-21 16:29:43 +0000857 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000858 auto flatBufferDescriptor = flatBufferLayer->descriptor();
859
860 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
861 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
862 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
863
864 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
865
866 armnn::IConnectableLayer* layer;
867 if (flatBufferDescriptor->biasEnabled())
868 {
869 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
870 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
871 weightsTensor,
872 biasTensorData,
873 layerName.c_str());
874 }
875 else
876 {
877 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
878 weightsTensor,
879 layerName.c_str());
880 }
881
882 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
883 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
884
Derek Lamberti8ddae332019-02-21 16:29:43 +0000885 RegisterInputSlots(graph, layerIndex, layer);
886 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000887}
888
Derek Lamberti8ddae332019-02-21 16:29:43 +0000889void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000890{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000891 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000892
893 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000894 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000895
Derek Lamberti8ddae332019-02-21 16:29:43 +0000896 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000897 CHECK_VALID_SIZE(inputs.size(), 1);
898
Derek Lamberti8ddae332019-02-21 16:29:43 +0000899 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000900 CHECK_VALID_SIZE(outputs.size(), 1);
901 auto outputInfo = ToTensorInfo(outputs[0]);
902
903 m_layerName = boost::str(boost::format("Permute:%1%") % layerIndex);
904 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
905
906 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, m_layerName.c_str());
907 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
908
Derek Lamberti8ddae332019-02-21 16:29:43 +0000909 RegisterInputSlots(graph, layerIndex, layer);
910 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000911}
912
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000913armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000914 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000915{
916 armnn::Pooling2dDescriptor desc;
917
918 switch (pooling2dDesc->poolType())
919 {
920 case PoolingAlgorithm_Average:
921 {
922 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
923 m_layerName = boost::str(boost::format("AveragePool2D:%1%") % layerIndex);
924 break;
925 }
926 case PoolingAlgorithm_Max:
927 {
928 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
929 m_layerName = boost::str(boost::format("MaxPool2D:%1%") % layerIndex);
930 break;
931 }
932 default:
933 {
934 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
935 }
936 }
937
938 switch (pooling2dDesc->outputShapeRounding())
939 {
940 case OutputShapeRounding_Floor:
941 {
942 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
943 break;
944 }
945 case OutputShapeRounding_Ceiling:
946 {
947 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
948 break;
949 }
950 default:
951 {
952 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
953 }
954 }
955
956 switch (pooling2dDesc->paddingMethod())
957 {
958 case PaddingMethod_Exclude:
959 {
960 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
961 break;
962 }
963 case PaddingMethod_IgnoreValue:
964 {
965 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
966 break;
967 }
968 default:
969 {
970 BOOST_ASSERT_MSG(false, "Unsupported padding method");
971 }
972 }
973
974 switch (pooling2dDesc->dataLayout())
975 {
976 case DataLayout_NCHW:
977 {
978 desc.m_DataLayout = armnn::DataLayout::NCHW;
979 break;
980 }
981 case DataLayout_NHWC:
982 {
983 desc.m_DataLayout = armnn::DataLayout::NHWC;
984 break;
985 }
986 default:
987 {
988 BOOST_ASSERT_MSG(false, "Unsupported data layout");
989 }
990 }
991
992 desc.m_PadRight = pooling2dDesc->padRight();
993 desc.m_PadLeft = pooling2dDesc->padLeft();
994 desc.m_PadBottom = pooling2dDesc->padBottom();
995 desc.m_PadTop = pooling2dDesc->padTop();
996 desc.m_StrideX = pooling2dDesc->strideX();
997 desc.m_StrideY = pooling2dDesc->strideY();
998 desc.m_PoolWidth = pooling2dDesc->poolWidth();
999 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1000
1001 return desc;
1002}
1003
Derek Lamberti8ddae332019-02-21 16:29:43 +00001004void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001005{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001006 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001007
Derek Lamberti8ddae332019-02-21 16:29:43 +00001008 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001009
Derek Lamberti8ddae332019-02-21 16:29:43 +00001010 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001011 CHECK_VALID_SIZE(inputs.size(), 1);
1012
Derek Lamberti8ddae332019-02-21 16:29:43 +00001013 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001014 CHECK_VALID_SIZE(outputs.size(), 1);
1015 auto outputInfo = ToTensorInfo(outputs[0]);
1016
1017 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
1018
1019 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, m_layerName.c_str());
1020 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1021
Derek Lamberti8ddae332019-02-21 16:29:43 +00001022 RegisterInputSlots(graph, layerIndex, layer);
1023 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001024}
1025
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001026armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001027 const std::vector<uint32_t>& targetDimsIn)
1028{
1029 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1030 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1031
1032 if (stretchDim != targetDimsIn.end())
1033 {
1034 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1035 {
1036 throw ParseException(boost::str(
1037 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1038 }
1039
1040 auto targetNumElements =
1041 boost::numeric_cast<unsigned int>(
1042 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1043
1044 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1045 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1046 }
1047
1048 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1049
1050 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1051 reshapeInfo.SetShape(outputShape);
1052
1053 return reshapeInfo;
1054}
1055
Derek Lamberti8ddae332019-02-21 16:29:43 +00001056void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001057{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001058 CHECK_LAYERS(graph, 0, layerIndex);
1059 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001060
Derek Lamberti8ddae332019-02-21 16:29:43 +00001061 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001062 CHECK_VALID_SIZE(outputs.size(), 1);
1063
1064 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1065 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1066
Derek Lamberti8ddae332019-02-21 16:29:43 +00001067 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001068 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1069
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001070 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001071 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1072
1073 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1074 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1075
1076 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1077 {
1078 std::stringstream ss;
1079 ss << "New shape defined in reshape parameters "
1080 << reshapeOutputTensorShape
1081 << " does not equal output shape "
1082 << actualOutputTensorInfo.GetShape()
1083 << ": "
1084 << CHECK_LOCATION().AsString();
1085 throw ParseException(ss.str());
1086 }
1087
1088 armnn::ReshapeDescriptor reshapeDesc;
1089 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1090
1091 auto layerName = boost::str(boost::format("Reshape:%1%") % layerIndex);
1092 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1093 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1094
Derek Lamberti8ddae332019-02-21 16:29:43 +00001095 RegisterInputSlots(graph, layerIndex, layer);
1096 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001097}
1098
Derek Lamberti8ddae332019-02-21 16:29:43 +00001099void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001100{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001101 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001102
Derek Lamberti8ddae332019-02-21 16:29:43 +00001103 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001104 CHECK_VALID_SIZE(inputs.size(), 1);
1105
Derek Lamberti8ddae332019-02-21 16:29:43 +00001106 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001107 CHECK_VALID_SIZE(outputs.size(), 1);
1108
1109 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001110 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001111
1112 const std::string layerName = boost::str(boost::format("Softmax:%1%") % layerIndex);
1113 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1114
1115 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1116 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1117
Derek Lamberti8ddae332019-02-21 16:29:43 +00001118 RegisterInputSlots(graph, layerIndex, layer);
1119 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001120}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001121
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001122} // namespace armnnDeserializer