blob: 2462061190314400f9c08d03163dedf6c53e484f [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
21
22// The generated code based on the Serialize schema:
23#include <Schema_generated.h>
24
25#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Kevin May43a799c2019-02-08 16:31:42 +0000135#define CHECK_TENSOR_PTR(TENSOR_PTR) \
136 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
137
Mike Kellya0766c32019-02-19 17:22:07 +0000138#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
139 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
140
Kevin May43a799c2019-02-08 16:31:42 +0000141#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
142 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
143
144#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
145 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
146}
147
Saoirse Stewart263829c2019-02-19 15:54:14 +0000148bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
149{
150 const unsigned int actualSize = actual.GetNumDimensions();
151 if (actualSize != expected.size())
152 {
153 return false;
154 }
155
156 for (unsigned int i = 0u; i < actualSize; i++)
157 {
158 if (actual[i] != static_cast<unsigned int>(expected[i]))
159 {
160 return false;
161 }
162 }
163
164 return true;
165}
166
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000167Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000168: m_Network(nullptr, nullptr),
169//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000170m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000171{
172 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000173 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000174 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
175 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
176 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
177 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
178 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
179 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
180 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Kevin May43a799c2019-02-08 16:31:42 +0000181}
182
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000183Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000184{
185 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
186
187 switch(layerType)
188 {
Mike Kellyaf484012019-02-20 16:53:11 +0000189 case Layer::Layer_ActivationLayer:
190 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000191 case Layer::Layer_AdditionLayer:
192 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000193 case Layer::Layer_Convolution2dLayer:
194 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000195 case Layer::Layer_DepthwiseConvolution2dLayer:
196 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000197 case Layer::Layer_InputLayer:
198 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000199 case Layer::Layer_MultiplicationLayer:
200 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000201 case Layer::Layer_OutputLayer:
202 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000203 case Layer::Layer_Pooling2dLayer:
204 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000205 case Layer::Layer_ReshapeLayer:
206 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000207 case Layer::Layer_SoftmaxLayer:
208 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000209 case Layer::Layer_NONE:
210 default:
211 throw ParseException(boost::str(
212 boost::format("Layer must have a type %1%") %
213 Layer::Layer_NONE));
214 }
215}
216
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000217int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000218{
219 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
220
221 if (layerType == Layer::Layer_InputLayer)
222 {
223 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
224 }
225 else if ( layerType == Layer::Layer_OutputLayer )
226 {
227 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
228 }
229 return 0;
230}
231
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000232armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000233{
234 switch (dataLayout)
235 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000236 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000237 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000238 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000239 default:
240 return armnn::DataLayout::NCHW;
241 }
242}
243
Mike Kellyaf484012019-02-20 16:53:11 +0000244armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
245{
246 switch (function)
247 {
248 case armnnSerializer::ActivationFunction_Sigmoid:
249 return armnn::ActivationFunction::Sigmoid;
250 case armnnSerializer::ActivationFunction_TanH:
251 return armnn::ActivationFunction::TanH;
252 case armnnSerializer::ActivationFunction_Linear:
253 return armnn::ActivationFunction::Linear;
254 case armnnSerializer::ActivationFunction_ReLu:
255 return armnn::ActivationFunction::ReLu;
256 case armnnSerializer::ActivationFunction_BoundedReLu:
257 return armnn::ActivationFunction::BoundedReLu;
258 case armnnSerializer::ActivationFunction_LeakyReLu:
259 return armnn::ActivationFunction::LeakyReLu;
260 case armnnSerializer::ActivationFunction_Abs:
261 return armnn::ActivationFunction::Abs;
262 case armnnSerializer::ActivationFunction_Sqrt:
263 return armnn::ActivationFunction::Sqrt;
264 case armnnSerializer::ActivationFunction_Square:
265 return armnn::ActivationFunction::Square;
266 default:
267 return armnn::ActivationFunction::Sigmoid;
268 }
269}
270
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000271armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000272{
273 armnn::DataType type;
274 CHECK_TENSOR_PTR(tensorPtr);
275
276 switch (tensorPtr->dataType())
277 {
278 case DataType_QuantisedAsymm8:
279 type = armnn::DataType::QuantisedAsymm8;
280 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000281 case DataType_Signed32:
282 type = armnn::DataType::Signed32;
283 break;
Kevin May43a799c2019-02-08 16:31:42 +0000284 case DataType_Float32:
285 type = armnn::DataType::Float32;
286 break;
287 case DataType_Float16:
288 type = armnn::DataType::Float16;
289 break;
290 case DataType_Boolean:
291 type = armnn::DataType::Boolean;
292 break;
293 default:
294 {
295 CheckLocation location = CHECK_LOCATION();
296 throw ParseException(
297 boost::str(
298 boost::format("Unsupported data type %1% = %2%. %3%") %
299 tensorPtr->dataType() %
300 EnumNameDataType(tensorPtr->dataType()) %
301 location.AsString()));
302 }
303 }
304 float quantizationScale = tensorPtr->quantizationScale();
305 int32_t quantizationOffset = tensorPtr->quantizationOffset();
306
307 auto dimensions = tensorPtr->dimensions();
308 unsigned int size = dimensions->size();
309 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
310
311 // two statements (on purpose) for easier debugging:
312 armnn::TensorInfo result(size,
313 outputDims.data(),
314 type,
315 quantizationScale,
316 quantizationOffset);
317 return result;
318}
319
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000320armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000321{
322 CHECK_CONST_TENSOR_PTR(constTensorPtr);
323 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
324
325 switch (constTensorPtr->data_type())
326 {
327 case ConstTensorData_ByteData:
328 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_ByteData()->data()->data());
329 case ConstTensorData_ShortData:
330 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_ShortData()->data()->data());
331 case ConstTensorData_IntData:
332 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_IntData()->data()->data());
333 case ConstTensorData_LongData:
334 return armnn::ConstTensor(tensorInfo, constTensorPtr->data_as_LongData()->data()->data());
335 default:
336 {
337 CheckLocation location = CHECK_LOCATION();
338 throw ParseException(
339 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
340 constTensorPtr->data_type() %
341 EnumNameConstTensorData(constTensorPtr->data_type()) %
342 location.AsString()));
343 }
344 }
345}
346
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000347Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000348{
349
350 CHECK_GRAPH(graphPtr, 0);
351 const auto& numInputs = graphPtr->inputIds()->size();
352
353 LayerBaseRawPtrVector result(numInputs);
354
355 for (unsigned int i=0; i<numInputs; ++i)
356 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000357 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000358 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
359 }
360 return result;
361}
362
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000363Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000364{
365 CHECK_GRAPH(graphPtr, 0);
366 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000367 LayerBaseRawPtrVector result(numOutputs);
368
369 for (unsigned int i=0; i<numOutputs; ++i)
370 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000371 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000372
Kevin May43a799c2019-02-08 16:31:42 +0000373 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
374 }
375 return result;
376}
377
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000378Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000379 unsigned int layerIndex)
380{
381 CHECK_LAYERS(graphPtr, 0, layerIndex);
382 auto layer = GetBaseLayer(graphPtr, layerIndex);
383 const auto& numInputs = layer->inputSlots()->size();
384
385 TensorRawPtrVector result(numInputs);
386
387 for (unsigned int i=0; i<numInputs; ++i)
388 {
389 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
390 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
391 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
392 }
393 return result;
394}
395
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000396Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000397 unsigned int layerIndex)
398{
399 CHECK_LAYERS(graphPtr, 0, layerIndex);
400 auto layer = GetBaseLayer(graphPtr, layerIndex);
401 const auto& numOutputs = layer->outputSlots()->size();
402
403 TensorRawPtrVector result(numOutputs);
404
405 for (unsigned int i=0; i<numOutputs; ++i)
406 {
407 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
408 }
409 return result;
410}
411
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000412void Deserializer::ParseUnsupportedLayer(unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000413{
414 CHECK_LAYERS(m_Graph, 0, layerIndex);
415 const auto layerName = GetBaseLayer(m_Graph, layerIndex)->layerName()->c_str();
416 throw ParseException(
417 boost::str(
418 boost::format("Layer not supported. "
419 "layerIndex: %1% "
420 "layerName: %2% / %3%") %
421 layerIndex %
422 layerName %
423 CHECK_LOCATION().AsString()));
424}
425
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000426void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000427{
428 m_Network = armnn::INetworkPtr(nullptr, nullptr);
429 m_Graph = nullptr;
430}
431
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000432IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000433{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000434 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000435}
436
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000437IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000438{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000439 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000440}
441
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000442void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000443{
444 delete parser;
445}
446
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000447INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000448{
449 ResetParser();
450 m_Graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
451 return CreateNetworkFromGraph();
452}
453
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000454armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000455{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000456 ResetParser();
457 m_Graph = LoadGraphFromBinary(binaryContent);
458 return CreateNetworkFromGraph();
Kevin May43a799c2019-02-08 16:31:42 +0000459}
460
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000462{
463 if (binaryContent == nullptr)
464 {
465 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
466 CHECK_LOCATION().AsString()));
467 }
468 flatbuffers::Verifier verifier(binaryContent, len);
469 if (verifier.VerifyBuffer<SerializedGraph>() == false)
470 {
471 throw ParseException(
472 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
473 "flatbuffers format. size:%1% %2%") %
474 len %
475 CHECK_LOCATION().AsString()));
476 }
477 return GetSerializedGraph(binaryContent);
478}
479
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000480Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(std::istream& binaryContent)
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000481{
482 std::string content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
483 return GetSerializedGraph(content.data());
484}
485
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000486INetworkPtr Deserializer::CreateNetworkFromGraph()
Kevin May43a799c2019-02-08 16:31:42 +0000487{
488 m_Network = INetwork::Create();
489 BOOST_ASSERT(m_Graph != nullptr);
490 unsigned int layerIndex = 0;
491 m_GraphConnections.emplace_back(m_Graph->layers()->size());
492 for (AnyLayer const* layer : *m_Graph->layers())
493 {
494 if (layer->layer_type() != Layer_InputLayer &&
495 layer->layer_type() != Layer_OutputLayer)
496 {
497 // lookup and call the parser function
498 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
499 (this->*parserFunction)(layerIndex);
500 }
501 ++layerIndex;
502 }
503
504 SetupInputLayers();
505 SetupOutputLayers();
506
507 // establish the connections from the layer outputs to the inputs of the subsequent layers
508 for (size_t connectionIndex = 0; connectionIndex < m_GraphConnections[0].size(); ++connectionIndex)
509 {
510 if (m_GraphConnections[0][connectionIndex].outputSlot != nullptr)
511 {
512 for (size_t inputSlotIdx = 0;
513 inputSlotIdx < m_GraphConnections[0][connectionIndex].inputSlots.size();
514 ++inputSlotIdx)
515 {
516 m_GraphConnections[0][connectionIndex].outputSlot->Connect(
517 *(m_GraphConnections[0][connectionIndex].inputSlots[inputSlotIdx]));
518 }
519 }
520 }
521
522 return std::move(m_Network);
523}
524
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000525BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000526 const std::string& name) const
527{
528 CHECK_LAYERS(m_Graph, 0, layerIndex);
529 auto inputs = GetGraphInputs(m_Graph);
530
531 for (auto const& input : inputs)
532 {
533 if (input->layerName()->c_str() == name)
534 {
535 int bindingId = reinterpret_cast<armnn::LayerBindingId>(GetBindingLayerInfo(m_Graph, input->index()));
536 auto layerBase = GetBaseLayer(m_Graph,input->index())->outputSlots()->Get(layerIndex);
537 return std::make_pair(bindingId, ToTensorInfo(layerBase->tensorInfo()));
538 }
539 }
540 throw ParseException(
541 boost::str(
542 boost::format("No input binding found for layer:%1% / %2%") %
543 name %
544 CHECK_LOCATION().AsString()));
545}
546
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000547BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000548 const std::string& name) const
549{
550 CHECK_LAYERS(m_Graph, 0, layerIndex);
551 auto outputs = GetGraphOutputs(m_Graph);
552
553 for (auto const& output : outputs)
554 {
555 if (output->layerName()->c_str() == name)
556 {
557 int bindingId = reinterpret_cast<armnn::LayerBindingId>(GetBindingLayerInfo(m_Graph, output->index()));
558 auto layer = GetBaseLayer(m_Graph, output->index());
559 auto sourceLayerIndex = layer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
560 auto sourceLayer = GetBaseLayer(m_Graph, sourceLayerIndex);
561 return std::make_pair(bindingId, ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo()));
562 }
563 }
564 throw ParseException(
565 boost::str(
566 boost::format("No output binding found for layer:%1% / %2%") %
567 name %
568 CHECK_LOCATION().AsString()));
569}
570
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000571void Deserializer::SetupInputLayers()
Kevin May43a799c2019-02-08 16:31:42 +0000572{
573 CHECK_GRAPH(m_Graph, 0);
574 auto inputs = GetGraphInputs(m_Graph);
575 for (auto const& input : inputs)
576 {
577 IConnectableLayer* layer =
Saoirse Stewart3fcef202019-02-14 14:57:37 +0000578 m_Network->AddInputLayer(GetBindingLayerInfo(m_Graph, input->index()), input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000579
580 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
581 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
582
583 RegisterOutputSlots(input->index(), layer);
584 }
585}
586
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000587void Deserializer::SetupOutputLayers()
Kevin May43a799c2019-02-08 16:31:42 +0000588{
589 CHECK_GRAPH(m_Graph, 0);
590 auto outputs = GetGraphOutputs(m_Graph);
591 for (auto const& output : outputs)
592 {
593 IConnectableLayer* layer =
Saoirse Stewart3fcef202019-02-14 14:57:37 +0000594 m_Network->AddOutputLayer(GetBindingLayerInfo(m_Graph, output->index()), output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000595
596 RegisterInputSlots(output->index(), layer);
597 }
598}
599
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000600void Deserializer::RegisterOutputSlots(uint32_t layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000601 IConnectableLayer* layer)
602{
603 CHECK_LAYERS(m_Graph, 0, layerIndex);
604 BOOST_ASSERT(layer != nullptr);
605 auto parsedLayer = GetBaseLayer(m_Graph, layerIndex);
606 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
607 {
608 throw ParseException(
609 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
610 " for layer index: %3% %4%") %
611 parsedLayer->outputSlots()->size() %
612 layer->GetNumOutputSlots() %
613 layerIndex %
614 CHECK_LOCATION().AsString()));
615 }
616
617 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
618 {
619 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
620 RegisterOutputSlotOfConnection(layerIndex, slot);
621 }
622}
623
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000624void Deserializer::RegisterInputSlots(uint32_t layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000625 armnn::IConnectableLayer* layer)
626{
627 CHECK_LAYERS(m_Graph, 0, layerIndex);
628 BOOST_ASSERT(layer != nullptr);
629 auto parsedLayer = GetBaseLayer(m_Graph, layerIndex);
630 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
631 {
632 throw ParseException(
633 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
634 " for layer index:%3% %4%") %
635 parsedLayer->inputSlots()->size() %
636 layer->GetNumInputSlots() %
637 layerIndex %
638 CHECK_LOCATION().AsString()));
639 }
640
641 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
642 {
643 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
644 uint32_t sourceLayerIndex = parsedLayer->inputSlots()->Get(slotIndex)->connection()->sourceLayerIndex();
645 RegisterInputSlotOfConnection(sourceLayerIndex, slot);
646 }
647}
648
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000649void Deserializer::RegisterInputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000650 armnn::IInputSlot* slot)
651{
652 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
653
654 Slots& slots = m_GraphConnections[0][connectionIndex];
655 slots.inputSlots.push_back(slot);
656}
657
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000658void Deserializer::RegisterOutputSlotOfConnection(uint32_t connectionIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000659 armnn::IOutputSlot* slot)
660{
661 BOOST_ASSERT(m_GraphConnections[0].size() > connectionIndex);
662
663 Slots& slots = m_GraphConnections[0][connectionIndex];
664
665 // assuming there is only one producer for that tensor
666 if (slots.outputSlot != nullptr)
667 {
668 throw ParseException(boost::str(
669 boost::format("Another layer has already registered itself as the producer of "
670 "connection:%1% / %2%") %
671 connectionIndex %
672 CHECK_LOCATION().AsString()));
673 }
674
675 slots.outputSlot = slot;
676}
677
Mike Kellyaf484012019-02-20 16:53:11 +0000678void Deserializer::ParseActivation(unsigned int layerIndex)
679{
680 CHECK_LAYERS(m_Graph, 0, layerIndex);
681 auto inputs = GetInputs(m_Graph, layerIndex);
682 CHECK_LOCATION();
683 CHECK_VALID_SIZE(inputs.size(), 1);
684
685 auto outputs = GetOutputs(m_Graph, layerIndex);
686 CHECK_VALID_SIZE(outputs.size(), 1);
687
688 auto layerName = boost::str(boost::format("Activation:%1%") % layerIndex);
689
690 auto serializerLayer = m_Graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
691 auto serializerDescriptor = serializerLayer->descriptor();
692
693 armnn::ActivationDescriptor descriptor;
694 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
695 descriptor.m_A = serializerDescriptor->a();
696 descriptor.m_B = serializerDescriptor->b();
697
698 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
699 layerName.c_str());
700 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
701 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
702
703 RegisterInputSlots(layerIndex, layer);
704 RegisterOutputSlots(layerIndex, layer);
705}
706
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000707void Deserializer::ParseAdd(unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000708{
709 CHECK_LAYERS(m_Graph, 0, layerIndex);
710 auto inputs = GetInputs(m_Graph, layerIndex);
711 CHECK_LOCATION();
712 CHECK_VALID_SIZE(inputs.size(), 2);
713
714 auto outputs = GetOutputs(m_Graph, layerIndex);
715 CHECK_VALID_SIZE(outputs.size(), 1);
716
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000717 m_layerName = boost::str(boost::format("Addition:%1%") % layerIndex);
718 IConnectableLayer* layer = m_Network->AddAdditionLayer(m_layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000719
720 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
721 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
722
723 RegisterInputSlots(layerIndex, layer);
724 RegisterOutputSlots(layerIndex, layer);
725}
726
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000727void Deserializer::ParseConvolution2d(unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000728{
729 CHECK_LAYERS(m_Graph, 0, layerIndex);
730 auto inputs = GetInputs(m_Graph, layerIndex);
731 CHECK_LOCATION();
732 CHECK_VALID_SIZE(inputs.size(), 1);
733
734 auto outputs = GetOutputs(m_Graph, layerIndex);
735 CHECK_VALID_SIZE(outputs.size(), 1);
736
737 auto layerName = boost::str(boost::format("Convolution2d:%1%") % layerIndex);
738
739 auto serializerLayer = m_Graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
740 auto serializerDescriptor = serializerLayer->descriptor();
741
742 armnn::Convolution2dDescriptor descriptor;
743 descriptor.m_PadLeft = serializerDescriptor->padLeft();
744 descriptor.m_PadRight = serializerDescriptor->padRight();
745 descriptor.m_PadTop = serializerDescriptor->padTop();
746 descriptor.m_PadBottom = serializerDescriptor->padBottom();
747 descriptor.m_StrideX = serializerDescriptor->strideX();
748 descriptor.m_StrideY = serializerDescriptor->strideY();;
749 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
750 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
751
752 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
753 armnn::ConstTensor biases;
754
755 if (descriptor.m_BiasEnabled)
756 {
757 biases = ToConstTensor(serializerLayer->biases());
758 }
759 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
760 weights,
761 biases,
762 layerName.c_str());
763 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
764 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
765
766 RegisterInputSlots(layerIndex, layer);
767 RegisterOutputSlots(layerIndex, layer);
768}
769
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000770void Deserializer::ParseDepthwiseConvolution2d(unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000771{
772 CHECK_LAYERS(m_Graph, 0, layerIndex);
773 auto inputs = GetInputs(m_Graph, layerIndex);
774 CHECK_LOCATION();
775 CHECK_VALID_SIZE(inputs.size(), 1);
776
777 auto outputs = GetOutputs(m_Graph, layerIndex);
778 CHECK_VALID_SIZE(outputs.size(), 1);
779
780 auto layerName = boost::str(boost::format("DepthwiseConvolution2d:%1%") % layerIndex);
781
782 auto serializerLayer = m_Graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
783 auto serializerDescriptor = serializerLayer->descriptor();
784
785 armnn::DepthwiseConvolution2dDescriptor descriptor;
786 descriptor.m_PadLeft = serializerDescriptor->padLeft();
787 descriptor.m_PadRight = serializerDescriptor->padRight();
788 descriptor.m_PadTop = serializerDescriptor->padTop();
789 descriptor.m_PadBottom = serializerDescriptor->padBottom();
790 descriptor.m_StrideX = serializerDescriptor->strideX();
791 descriptor.m_StrideY = serializerDescriptor->strideY();;
792 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
793 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
794
795 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
796 armnn::ConstTensor biases;
797
798 if (descriptor.m_BiasEnabled)
799 {
800 biases = ToConstTensor(serializerLayer->biases());
801 }
802 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
803 weights,
804 biases,
805 layerName.c_str());
806
807 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
808 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
809
810 RegisterInputSlots(layerIndex, layer);
811 RegisterOutputSlots(layerIndex, layer);
812}
813
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000814void Deserializer::ParseMultiplication(unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +0000815{
816 CHECK_LAYERS(m_Graph, 0, layerIndex);
817 auto inputs = GetInputs(m_Graph, layerIndex);
818 CHECK_LOCATION();
819 CHECK_VALID_SIZE(inputs.size(), 2);
820
821 auto outputs = GetOutputs(m_Graph, layerIndex);
822 CHECK_VALID_SIZE(outputs.size(), 1);
823
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000824 m_layerName = boost::str(boost::format("Multiplication:%1%") % layerIndex);
825 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(m_layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +0000826
827 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
828 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
829
830 RegisterInputSlots(layerIndex, layer);
831 RegisterOutputSlots(layerIndex, layer);
832}
833
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000834armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000835 unsigned int layerIndex)
836{
837 armnn::Pooling2dDescriptor desc;
838
839 switch (pooling2dDesc->poolType())
840 {
841 case PoolingAlgorithm_Average:
842 {
843 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
844 m_layerName = boost::str(boost::format("AveragePool2D:%1%") % layerIndex);
845 break;
846 }
847 case PoolingAlgorithm_Max:
848 {
849 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
850 m_layerName = boost::str(boost::format("MaxPool2D:%1%") % layerIndex);
851 break;
852 }
853 default:
854 {
855 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
856 }
857 }
858
859 switch (pooling2dDesc->outputShapeRounding())
860 {
861 case OutputShapeRounding_Floor:
862 {
863 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
864 break;
865 }
866 case OutputShapeRounding_Ceiling:
867 {
868 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
869 break;
870 }
871 default:
872 {
873 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
874 }
875 }
876
877 switch (pooling2dDesc->paddingMethod())
878 {
879 case PaddingMethod_Exclude:
880 {
881 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
882 break;
883 }
884 case PaddingMethod_IgnoreValue:
885 {
886 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
887 break;
888 }
889 default:
890 {
891 BOOST_ASSERT_MSG(false, "Unsupported padding method");
892 }
893 }
894
895 switch (pooling2dDesc->dataLayout())
896 {
897 case DataLayout_NCHW:
898 {
899 desc.m_DataLayout = armnn::DataLayout::NCHW;
900 break;
901 }
902 case DataLayout_NHWC:
903 {
904 desc.m_DataLayout = armnn::DataLayout::NHWC;
905 break;
906 }
907 default:
908 {
909 BOOST_ASSERT_MSG(false, "Unsupported data layout");
910 }
911 }
912
913 desc.m_PadRight = pooling2dDesc->padRight();
914 desc.m_PadLeft = pooling2dDesc->padLeft();
915 desc.m_PadBottom = pooling2dDesc->padBottom();
916 desc.m_PadTop = pooling2dDesc->padTop();
917 desc.m_StrideX = pooling2dDesc->strideX();
918 desc.m_StrideY = pooling2dDesc->strideY();
919 desc.m_PoolWidth = pooling2dDesc->poolWidth();
920 desc.m_PoolHeight = pooling2dDesc->poolHeight();
921
922 return desc;
923}
924
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000925void Deserializer::ParsePooling2d(unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000926{
927 CHECK_LAYERS(m_Graph, 0, layerIndex);
928
929 auto pooling2dDes = m_Graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
930
931 auto inputs = GetInputs(m_Graph, layerIndex);
932 CHECK_VALID_SIZE(inputs.size(), 1);
933
934 auto outputs = GetOutputs(m_Graph, layerIndex);
935 CHECK_VALID_SIZE(outputs.size(), 1);
936 auto outputInfo = ToTensorInfo(outputs[0]);
937
938 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
939
940 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, m_layerName.c_str());
941 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
942
943 RegisterInputSlots(layerIndex, layer);
944 RegisterOutputSlots(layerIndex, layer);
945}
946
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000947armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +0000948 const std::vector<uint32_t>& targetDimsIn)
949{
950 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
951 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
952
953 if (stretchDim != targetDimsIn.end())
954 {
955 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
956 {
957 throw ParseException(boost::str(
958 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
959 }
960
961 auto targetNumElements =
962 boost::numeric_cast<unsigned int>(
963 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
964
965 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
966 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
967 }
968
969 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
970
971 armnn::TensorInfo reshapeInfo = inputTensorInfo;
972 reshapeInfo.SetShape(outputShape);
973
974 return reshapeInfo;
975}
976
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000977void Deserializer::ParseReshape(unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +0000978{
979 CHECK_LAYERS(m_Graph, 0, layerIndex);
980 auto inputs = GetInputs(m_Graph, layerIndex);
981
982 auto outputs = GetOutputs(m_Graph, layerIndex);
983 CHECK_VALID_SIZE(outputs.size(), 1);
984
985 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
986 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
987
988 const auto targetDims = m_Graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
989 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
990
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000991 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000992 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
993
994 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
995 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
996
997 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
998 {
999 std::stringstream ss;
1000 ss << "New shape defined in reshape parameters "
1001 << reshapeOutputTensorShape
1002 << " does not equal output shape "
1003 << actualOutputTensorInfo.GetShape()
1004 << ": "
1005 << CHECK_LOCATION().AsString();
1006 throw ParseException(ss.str());
1007 }
1008
1009 armnn::ReshapeDescriptor reshapeDesc;
1010 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1011
1012 auto layerName = boost::str(boost::format("Reshape:%1%") % layerIndex);
1013 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1014 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1015
1016 RegisterInputSlots(layerIndex, layer);
1017 RegisterOutputSlots(layerIndex, layer);
1018}
1019
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001020void Deserializer::ParseSoftmax(unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001021{
1022 CHECK_LAYERS(m_Graph, 0, layerIndex);
1023
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001024 Deserializer::TensorRawPtrVector inputs = GetInputs(m_Graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001025 CHECK_VALID_SIZE(inputs.size(), 1);
1026
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001027 Deserializer::TensorRawPtrVector outputs = GetOutputs(m_Graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001028 CHECK_VALID_SIZE(outputs.size(), 1);
1029
1030 armnn::SoftmaxDescriptor descriptor;
1031 descriptor.m_Beta = m_Graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
1032
1033 const std::string layerName = boost::str(boost::format("Softmax:%1%") % layerIndex);
1034 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1035
1036 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1037 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1038
1039 RegisterInputSlots(layerIndex, layer);
1040 RegisterOutputSlots(layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001041}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001042
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001043} // namespace armnnDeserializer