blob: 0259f89db4a49d5c353e88ccec4b8973d68bc7bb [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
6#include "DeserializeParser.hpp"
7
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
23#include <Schema_generated.h>
24
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
32using namespace armnn::armnnSerializer;
33
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000034namespace armnnDeserializeParser
35{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
42 void CheckGraph(const DeserializeParser::GraphPtr& graph,
43 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
69void CheckLayers(const DeserializeParser::GraphPtr& graph,
70 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
109void CheckTensorPtr(DeserializeParser::TensorRawPtr rawPtr,
110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Mike Kellya0766c32019-02-19 17:22:07 +0000124void CheckConstTensorPtr(DeserializeParser::ConstTensorRawPtr rawPtr,
125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Kevin May43a799c2019-02-08 16:31:42 +0000135#define CHECK_TENSOR_PTR(TENSOR_PTR) \
136 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
137
Mike Kellya0766c32019-02-19 17:22:07 +0000138#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
139 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
140
Kevin May43a799c2019-02-08 16:31:42 +0000141#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
142 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
143
144#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
145 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
146}
147
Saoirse Stewart263829c2019-02-19 15:54:14 +0000148bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
149{
150 const unsigned int actualSize = actual.GetNumDimensions();
151 if (actualSize != expected.size())
152 {
153 return false;
154 }
155
156 for (unsigned int i = 0u; i < actualSize; i++)
157 {
158 if (actual[i] != static_cast<unsigned int>(expected[i]))
159 {
160 return false;
161 }
162 }
163
164 return true;
165}
166
Kevin May43a799c2019-02-08 16:31:42 +0000167DeserializeParser::DeserializeParser()
168: m_Network(nullptr, nullptr),
169//May require LayerType_Max to be included
170m_ParserFunctions(Layer_MAX+1, &DeserializeParser::ParseUnsupportedLayer)
171{
172 // register supported layers
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000173 m_ParserFunctions[Layer_AdditionLayer] = &DeserializeParser::ParseAdd;
Mike Kellya0766c32019-02-19 17:22:07 +0000174 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializeParser::ParseConvolution2d;
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000175 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializeParser::ParseMultiplication;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000176 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializeParser::ParsePooling2d;
Saoirse Stewart263829c2019-02-19 15:54:14 +0000177 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializeParser::ParseReshape;
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000178 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializeParser::ParseSoftmax;
Kevin May43a799c2019-02-08 16:31:42 +0000179}
180
181DeserializeParser::LayerBaseRawPtr DeserializeParser::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
182{
183 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
184
185 switch(layerType)
186 {
187 case Layer::Layer_AdditionLayer:
188 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000189 case Layer::Layer_Convolution2dLayer:
190 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000191 case Layer::Layer_InputLayer:
192 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000193 case Layer::Layer_MultiplicationLayer:
194 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000195 case Layer::Layer_OutputLayer:
196 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000197 case Layer::Layer_Pooling2dLayer:
198 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000199 case Layer::Layer_ReshapeLayer:
200 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000201 case Layer::Layer_SoftmaxLayer:
202 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000203 case Layer::Layer_NONE:
204 default:
205 throw ParseException(boost::str(
206 boost::format("Layer must have a type %1%") %
207 Layer::Layer_NONE));
208 }
209}
210
211int32_t DeserializeParser::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
212{
213 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
214
215 if (layerType == Layer::Layer_InputLayer)
216 {
217 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
218 }
219 else if ( layerType == Layer::Layer_OutputLayer )
220 {
221 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
222 }
223 return 0;
224}
225
Mike Kellya0766c32019-02-19 17:22:07 +0000226armnn::DataLayout ToDataLayout(armnn::armnnSerializer::DataLayout dataLayout)
227{
228 switch (dataLayout)
229 {
230 case armnn::armnnSerializer::DataLayout::DataLayout_NHWC:
231 return armnn::DataLayout::NHWC;
232 case armnn::armnnSerializer::DataLayout::DataLayout_NCHW:
233 default:
234 return armnn::DataLayout::NCHW;
235 }
236}
237
Kevin May43a799c2019-02-08 16:31:42 +0000238armnn::TensorInfo ToTensorInfo(DeserializeParser::TensorRawPtr tensorPtr)
239{
240 armnn::DataType type;
241 CHECK_TENSOR_PTR(tensorPtr);
242
243 switch (tensorPtr->dataType())
244 {
245 case DataType_QuantisedAsymm8:
246 type = armnn::DataType::QuantisedAsymm8;
247 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000248 case DataType_Signed32:
249 type = armnn::DataType::Signed32;
250 break;
Kevin May43a799c2019-02-08 16:31:42 +0000251 case DataType_Float32:
252 type = armnn::DataType::Float32;
253 break;
254 case DataType_Float16:
255 type = armnn::DataType::Float16;
256 break;
257 case DataType_Boolean:
258 type = armnn::DataType::Boolean;
259 break;
260 default:
261 {
262 CheckLocation location = CHECK_LOCATION();
263 throw ParseException(
264 boost::str(
265 boost::format("Unsupported data type %1% = %2%. %3%") %
266 tensorPtr->dataType() %
267 EnumNameDataType(tensorPtr->dataType()) %
268 location.AsString()));
269 }
270 }
271 float quantizationScale = tensorPtr->quantizationScale();
272 int32_t quantizationOffset = tensorPtr->quantizationOffset();
273
274 auto dimensions = tensorPtr->dimensions();
275 unsigned int size = dimensions->size();
276 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
277
278 // two statements (on purpose) for easier debugging:
279 armnn::TensorInfo result(size,
280 outputDims.data(),
281 type,
282 quantizationScale,
283 quantizationOffset);
284 return result;
285}
286
Mike Kellya0766c32019-02-19 17:22:07 +0000287armnn::ConstTensor ToConstTensor(DeserializeParser::ConstTensorRawPtr constTensorPtr)
288{
289 CHECK_CONST_TENSOR_PTR(constTensorPtr);
290 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
291
292 switch (constTensorPtr->data_type())
293 {
294 case ConstTensorData_ByteData:
295 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_ByteData()->data()->data());
296 case ConstTensorData_ShortData:
297 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_ShortData()->data()->data());
298 case ConstTensorData_IntData:
299 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_IntData()->data()->data());
300 case ConstTensorData_LongData:
301 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_LongData()->data()->data());
302 default:
303 {
304 CheckLocation location = CHECK_LOCATION();
305 throw ParseException(
306 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
307 constTensorPtr->data_type() %
308 EnumNameConstTensorData(constTensorPtr->data_type()) %
309 location.AsString()));
310 }
311 }
312}
313
Kevin May43a799c2019-02-08 16:31:42 +0000314DeserializeParser::LayerBaseRawPtrVector DeserializeParser::GetGraphInputs(const GraphPtr& graphPtr)
315{
316
317 CHECK_GRAPH(graphPtr, 0);
318 const auto& numInputs = graphPtr->inputIds()->size();
319
320 LayerBaseRawPtrVector result(numInputs);
321
322 for (unsigned int i=0; i<numInputs; ++i)
323 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000324 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000325 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
326 }
327 return result;
328}
329
330DeserializeParser::LayerBaseRawPtrVector DeserializeParser::GetGraphOutputs(const GraphPtr& graphPtr)
331{
332 CHECK_GRAPH(graphPtr, 0);
333 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000334 LayerBaseRawPtrVector result(numOutputs);
335
336 for (unsigned int i=0; i<numOutputs; ++i)
337 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000338 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000339
Kevin May43a799c2019-02-08 16:31:42 +0000340 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
341 }
342 return result;
343}
344
345DeserializeParser::TensorRawPtrVector DeserializeParser::GetInputs(const GraphPtr& graphPtr,
346 unsigned int layerIndex)
347{
348 CHECK_LAYERS(graphPtr, 0, layerIndex);
349 auto layer = GetBaseLayer(graphPtr, layerIndex);
350 const auto& numInputs = layer->inputSlots()->size();
351
352 TensorRawPtrVector result(numInputs);
353
354 for (unsigned int i=0; i<numInputs; ++i)
355 {
356 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
357 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
358 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
359 }
360 return result;
361}
362
363DeserializeParser::TensorRawPtrVector DeserializeParser::GetOutputs(const GraphPtr& graphPtr,
364 unsigned int layerIndex)
365{
366 CHECK_LAYERS(graphPtr, 0, layerIndex);
367 auto layer = GetBaseLayer(graphPtr, layerIndex);
368 const auto& numOutputs = layer->outputSlots()->size();
369
370 TensorRawPtrVector result(numOutputs);
371
372 for (unsigned int i=0; i<numOutputs; ++i)
373 {
374 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
375 }
376 return result;
377}
378
379void DeserializeParser::ParseUnsupportedLayer(unsigned int layerIndex)
380{
381 CHECK_LAYERS(m_Graph, 0, layerIndex);
382 const auto layerName = GetBaseLayer(m_Graph, layerIndex)->layerName()->c_str();
383 throw ParseException(
384 boost::str(
385 boost::format("Layer not supported. "
386 "layerIndex: %1% "
387 "layerName: %2% / %3%") %
388 layerIndex %
389 layerName %
390 CHECK_LOCATION().AsString()));
391}
392
393void DeserializeParser::ResetParser()
394{
395 m_Network = armnn::INetworkPtr(nullptr, nullptr);
396 m_Graph = nullptr;
397}
398
399IDeserializeParser* IDeserializeParser::CreateRaw()
400{
401 return new DeserializeParser();
402}
403
404IDeserializeParserPtr IDeserializeParser::Create()
405{
406 return IDeserializeParserPtr(CreateRaw(), &IDeserializeParser::Destroy);
407}
408
409void IDeserializeParser::Destroy(IDeserializeParser* parser)
410{
411 delete parser;
412}
413
Kevin May43a799c2019-02-08 16:31:42 +0000414INetworkPtr DeserializeParser::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
415{
416 ResetParser();
417 m_Graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
418 return CreateNetworkFromGraph();
419}
420
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000421armnn::INetworkPtr DeserializeParser::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000422{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000423 ResetParser();
424 m_Graph = LoadGraphFromBinary(binaryContent);
425 return CreateNetworkFromGraph();
Kevin May43a799c2019-02-08 16:31:42 +0000426}
427
428DeserializeParser::GraphPtr DeserializeParser::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
429{
430 if (binaryContent == nullptr)
431 {
432 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
433 CHECK_LOCATION().AsString()));
434 }
435 flatbuffers::Verifier verifier(binaryContent, len);
436 if (verifier.VerifyBuffer<SerializedGraph>() == false)
437 {
438 throw ParseException(
439 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
440 "flatbuffers format. size:%1% %2%") %
441 len %
442 CHECK_LOCATION().AsString()));
443 }
444 return GetSerializedGraph(binaryContent);
445}
446
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000447DeserializeParser::GraphPtr DeserializeParser::LoadGraphFromBinary(std::istream& binaryContent)
448{
449 std::string content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
450 return GetSerializedGraph(content.data());
451}
452
Kevin May43a799c2019-02-08 16:31:42 +0000453INetworkPtr DeserializeParser::CreateNetworkFromGraph()
454{
455 m_Network = INetwork::Create();
456 BOOST_ASSERT(m_Graph != nullptr);
457 unsigned int layerIndex = 0;
458 m_GraphConnections.emplace_back(m_Graph->layers()->size());
459 for (AnyLayer const* layer : *m_Graph->layers())
460 {
461 if (layer->layer_type() != Layer_InputLayer &&
462 layer->layer_type() != Layer_OutputLayer)
463 {
464 // lookup and call the parser function
465 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
466 (this->*parserFunction)(layerIndex);
467 }
468 ++layerIndex;
469 }
470
471 SetupInputLayers();
472 SetupOutputLayers();
473
474 // establish the connections from the layer outputs to the inputs of the subsequent layers
475 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
476 {
477 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
478 {
479 for (size_t inputSlotIdx = 0;
480 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
481 ++inputSlotIdx)
482 {
483 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
484 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
485 }
486 }
487 }
488
489 return std::move(m_Network);
490}
491
492BindingPointInfo DeserializeParser::GetNetworkInputBindingInfo(unsigned int layerIndex,
493 const std::string& name) const
494{
495 CHECK_LAYERS(m_Graph, 0, layerIndex);
496 auto inputs = GetGraphInputs(m_Graph);
497
498 for (auto const& input : inputs)
499 {
500 if (input->layerName()->c_str() == name)
501 {
502 int bindingId = reinterpret_cast<armnn::LayerBindingId>(GetBindingLayerInfo(m_Graph, input->index()));
503 auto layerBase = GetBaseLayer(m_Graph,input->index())->outputSlots()->Get(layerIndex);
504 return std::make_pair(bindingId, ToTensorInfo(layerBase->tensorInfo()));
505 }
506 }
507 throw ParseException(
508 boost::str(
509 boost::format("No input binding found for layer:%1% / %2%") %
510 name %
511 CHECK_LOCATION().AsString()));
512}
513
514BindingPointInfo DeserializeParser::GetNetworkOutputBindingInfo(unsigned int layerIndex,
515 const std::string& name) const
516{
517 CHECK_LAYERS(m_Graph, 0, layerIndex);
518 auto outputs = GetGraphOutputs(m_Graph);
519
520 for (auto const& output : outputs)
521 {
522 if (output->layerName()->c_str() == name)
523 {
524 int bindingId = reinterpret_cast<armnn::LayerBindingId>(GetBindingLayerInfo(m_Graph, output->index()));
525 auto layer = GetBaseLayer(m_Graph, output->index());
526 auto sourceLayerIndex = layer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
527 auto sourceLayer = GetBaseLayer(m_Graph, sourceLayerIndex);
528 return std::make_pair(bindingId, ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo()));
529 }
530 }
531 throw ParseException(
532 boost::str(
533 boost::format("No output binding found for layer:%1% / %2%") %
534 name %
535 CHECK_LOCATION().AsString()));
536}
537
538void DeserializeParser::SetupInputLayers()
539{
540 CHECK_GRAPH(m_Graph, 0);
541 auto inputs = GetGraphInputs(m_Graph);
542 for (auto const& input : inputs)
543 {
544 IConnectableLayer* layer =
Saoirse Stewart3fcef202019-02-14 14:57:37 +0000545 m_Network->AddInputLayer(GetBindingLayerInfo(m_Graph, input->index()), input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000546
547 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
548 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
549
550 RegisterOutputSlots(input->index(), layer);
551 }
552}
553
554void DeserializeParser::SetupOutputLayers()
555{
556 CHECK_GRAPH(m_Graph, 0);
557 auto outputs = GetGraphOutputs(m_Graph);
558 for (auto const& output : outputs)
559 {
560 IConnectableLayer* layer =
Saoirse Stewart3fcef202019-02-14 14:57:37 +0000561 m_Network->AddOutputLayer(GetBindingLayerInfo(m_Graph, output->index()), output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000562
563 RegisterInputSlots(output->index(), layer);
564 }
565}
566
567void DeserializeParser::RegisterOutputSlots(uint32_t layerIndex,
568 IConnectableLayer* layer)
569{
570 CHECK_LAYERS(m_Graph, 0, layerIndex);
571 BOOST_ASSERT(layer != nullptr);
572 auto parsedLayer = GetBaseLayer(m_Graph, layerIndex);
573 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
574 {
575 throw ParseException(
576 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
577 " for layer index: %3% %4%") %
578 parsedLayer->outputSlots()->size() %
579 layer->GetNumOutputSlots() %
580 layerIndex %
581 CHECK_LOCATION().AsString()));
582 }
583
584 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
585 {
586 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
587 RegisterOutputSlotOfConnection(layerIndex, slot);
588 }
589}
590
591void DeserializeParser::RegisterInputSlots(uint32_t layerIndex,
592 armnn::IConnectableLayer* layer)
593{
594 CHECK_LAYERS(m_Graph, 0, layerIndex);
595 BOOST_ASSERT(layer != nullptr);
596 auto parsedLayer = GetBaseLayer(m_Graph, layerIndex);
597 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
598 {
599 throw ParseException(
600 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
601 " for layer index:%3% %4%") %
602 parsedLayer->inputSlots()->size() %
603 layer->GetNumInputSlots() %
604 layerIndex %
605 CHECK_LOCATION().AsString()));
606 }
607
608 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
609 {
610 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
611 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
612 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
613 }
614}
615
616void DeserializeParser::RegisterInputSlotOfConnection(uint32_t connectionIndex,
617 armnn::IInputSlot* slot)
618{
619 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
620
621 Slots& slots = m_GraphConnections[0][connectionIndex];
622 slots.inputSlots.push_back(slot);
623}
624
625void DeserializeParser::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
626 armnn::IOutputSlot* slot)
627{
628 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
629
630 Slots& slots = m_GraphConnections[0][connectionIndex];
631
632 // assuming there is only one producer for that tensor
633 if (slots.outputSlot != nullptr)
634 {
635 throw ParseException(boost::str(
636 boost::format("Another layer has already registered itself as the producer of "
637 "connection:%1% / %2%") %
638 connectionIndex %
639 CHECK_LOCATION().AsString()));
640 }
641
642 slots.outputSlot = slot;
643}
644
645void DeserializeParser::ParseAdd(unsigned int layerIndex)
646{
647 CHECK_LAYERS(m_Graph, 0, layerIndex);
648 auto inputs = GetInputs(m_Graph, layerIndex);
649 CHECK_LOCATION();
650 CHECK_VALID_SIZE(inputs.size(), 2);
651
652 auto outputs = GetOutputs(m_Graph, layerIndex);
653 CHECK_VALID_SIZE(outputs.size(), 1);
654
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000655 m_layerName = boost::str(boost::format("Addition:%1%") % layerIndex);
656 IConnectableLayer* layer = m_Network->AddAdditionLayer(m_layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000657
658 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
659 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
660
661 RegisterInputSlots(layerIndex, layer);
662 RegisterOutputSlots(layerIndex, layer);
663}
664
Mike Kellya0766c32019-02-19 17:22:07 +0000665void DeserializeParser::ParseConvolution2d(unsigned int layerIndex)
666{
667 CHECK_LAYERS(m_Graph, 0, layerIndex);
668 auto inputs = GetInputs(m_Graph, layerIndex);
669 CHECK_LOCATION();
670 CHECK_VALID_SIZE(inputs.size(), 1);
671
672 auto outputs = GetOutputs(m_Graph, layerIndex);
673 CHECK_VALID_SIZE(outputs.size(), 1);
674
675 auto layerName = boost::str(boost::format("Convolution2d:%1%") % layerIndex);
676
677 auto serializerLayer = m_Graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
678 auto serializerDescriptor = serializerLayer->descriptor();
679
680 armnn::Convolution2dDescriptor descriptor;
681 descriptor.m_PadLeft = serializerDescriptor->padLeft();
682 descriptor.m_PadRight = serializerDescriptor->padRight();
683 descriptor.m_PadTop = serializerDescriptor->padTop();
684 descriptor.m_PadBottom = serializerDescriptor->padBottom();
685 descriptor.m_StrideX = serializerDescriptor->strideX();
686 descriptor.m_StrideY = serializerDescriptor->strideY();;
687 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
688 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
689
690 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
691 armnn::ConstTensor biases;
692
693 if (descriptor.m_BiasEnabled)
694 {
695 biases = ToConstTensor(serializerLayer->biases());
696 }
697 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
698 weights,
699 biases,
700 layerName.c_str());
701 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
702 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
703
704 RegisterInputSlots(layerIndex, layer);
705 RegisterOutputSlots(layerIndex, layer);
706}
707
Sadik Armagan5f450272019-02-12 14:31:45 +0000708void DeserializeParser::ParseMultiplication(unsigned int layerIndex)
709{
710 CHECK_LAYERS(m_Graph, 0, layerIndex);
711 auto inputs = GetInputs(m_Graph, layerIndex);
712 CHECK_LOCATION();
713 CHECK_VALID_SIZE(inputs.size(), 2);
714
715 auto outputs = GetOutputs(m_Graph, layerIndex);
716 CHECK_VALID_SIZE(outputs.size(), 1);
717
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000718 m_layerName = boost::str(boost::format("Multiplication:%1%") % layerIndex);
719 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(m_layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +0000720
721 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
722 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
723
724 RegisterInputSlots(layerIndex, layer);
725 RegisterOutputSlots(layerIndex, layer);
726}
727
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000728armnn::Pooling2dDescriptor DeserializeParser::GetPoolingDescriptor(DeserializeParser::PoolingDescriptor pooling2dDesc,
729 unsigned int layerIndex)
730{
731 armnn::Pooling2dDescriptor desc;
732
733 switch (pooling2dDesc->poolType())
734 {
735 case PoolingAlgorithm_Average:
736 {
737 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
738 m_layerName = boost::str(boost::format("AveragePool2D:%1%") % layerIndex);
739 break;
740 }
741 case PoolingAlgorithm_Max:
742 {
743 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
744 m_layerName = boost::str(boost::format("MaxPool2D:%1%") % layerIndex);
745 break;
746 }
747 default:
748 {
749 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
750 }
751 }
752
753 switch (pooling2dDesc->outputShapeRounding())
754 {
755 case OutputShapeRounding_Floor:
756 {
757 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
758 break;
759 }
760 case OutputShapeRounding_Ceiling:
761 {
762 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
763 break;
764 }
765 default:
766 {
767 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
768 }
769 }
770
771 switch (pooling2dDesc->paddingMethod())
772 {
773 case PaddingMethod_Exclude:
774 {
775 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
776 break;
777 }
778 case PaddingMethod_IgnoreValue:
779 {
780 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
781 break;
782 }
783 default:
784 {
785 BOOST_ASSERT_MSG(false, "Unsupported padding method");
786 }
787 }
788
789 switch (pooling2dDesc->dataLayout())
790 {
791 case DataLayout_NCHW:
792 {
793 desc.m_DataLayout = armnn::DataLayout::NCHW;
794 break;
795 }
796 case DataLayout_NHWC:
797 {
798 desc.m_DataLayout = armnn::DataLayout::NHWC;
799 break;
800 }
801 default:
802 {
803 BOOST_ASSERT_MSG(false, "Unsupported data layout");
804 }
805 }
806
807 desc.m_PadRight = pooling2dDesc->padRight();
808 desc.m_PadLeft = pooling2dDesc->padLeft();
809 desc.m_PadBottom = pooling2dDesc->padBottom();
810 desc.m_PadTop = pooling2dDesc->padTop();
811 desc.m_StrideX = pooling2dDesc->strideX();
812 desc.m_StrideY = pooling2dDesc->strideY();
813 desc.m_PoolWidth = pooling2dDesc->poolWidth();
814 desc.m_PoolHeight = pooling2dDesc->poolHeight();
815
816 return desc;
817}
818
819void DeserializeParser::ParsePooling2d(unsigned int layerIndex)
820{
821 CHECK_LAYERS(m_Graph, 0, layerIndex);
822
823 auto pooling2dDes = m_Graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
824
825 auto inputs = GetInputs(m_Graph, layerIndex);
826 CHECK_VALID_SIZE(inputs.size(), 1);
827
828 auto outputs = GetOutputs(m_Graph, layerIndex);
829 CHECK_VALID_SIZE(outputs.size(), 1);
830 auto outputInfo = ToTensorInfo(outputs[0]);
831
832 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
833
834 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, m_layerName.c_str());
835 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
836
837 RegisterInputSlots(layerIndex, layer);
838 RegisterOutputSlots(layerIndex, layer);
839}
840
Saoirse Stewart263829c2019-02-19 15:54:14 +0000841armnn::TensorInfo DeserializeParser::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
842 const std::vector<uint32_t>& targetDimsIn)
843{
844 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
845 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
846
847 if (stretchDim != targetDimsIn.end())
848 {
849 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
850 {
851 throw ParseException(boost::str(
852 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
853 }
854
855 auto targetNumElements =
856 boost::numeric_cast<unsigned int>(
857 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
858
859 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
860 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
861 }
862
863 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
864
865 armnn::TensorInfo reshapeInfo = inputTensorInfo;
866 reshapeInfo.SetShape(outputShape);
867
868 return reshapeInfo;
869}
870
871void DeserializeParser::ParseReshape(unsigned int layerIndex)
872{
873 CHECK_LAYERS(m_Graph, 0, layerIndex);
874 auto inputs = GetInputs(m_Graph, layerIndex);
875
876 auto outputs = GetOutputs(m_Graph, layerIndex);
877 CHECK_VALID_SIZE(outputs.size(), 1);
878
879 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
880 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
881
882 const auto targetDims = m_Graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
883 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
884
885 armnn::TensorInfo reshapeOutputTensorInfo = DeserializeParser::OutputShapeOfReshape(inputTensorInfo, outputDims);
886 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
887
888 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
889 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
890
891 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
892 {
893 std::stringstream ss;
894 ss << "New shape defined in reshape parameters "
895 << reshapeOutputTensorShape
896 << " does not equal output shape "
897 << actualOutputTensorInfo.GetShape()
898 << ": "
899 << CHECK_LOCATION().AsString();
900 throw ParseException(ss.str());
901 }
902
903 armnn::ReshapeDescriptor reshapeDesc;
904 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
905
906 auto layerName = boost::str(boost::format("Reshape:%1%") % layerIndex);
907 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
908 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
909
910 RegisterInputSlots(layerIndex, layer);
911 RegisterOutputSlots(layerIndex, layer);
912}
913
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000914void DeserializeParser::ParseSoftmax(unsigned int layerIndex)
915{
916 CHECK_LAYERS(m_Graph, 0, layerIndex);
917
918 DeserializeParser::TensorRawPtrVector inputs = GetInputs(m_Graph, layerIndex);
919 CHECK_VALID_SIZE(inputs.size(), 1);
920
921 DeserializeParser::TensorRawPtrVector outputs = GetOutputs(m_Graph, layerIndex);
922 CHECK_VALID_SIZE(outputs.size(), 1);
923
924 armnn::SoftmaxDescriptor descriptor;
925 descriptor.m_Beta = m_Graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
926
927 const std::string layerName = boost::str(boost::format("Softmax:%1%") % layerIndex);
928 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
929
930 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
931 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
932
933 RegisterInputSlots(layerIndex, layer);
934 RegisterOutputSlots(layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000935}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000936
937} // namespace armnnDeserializeParser