blob: 152a5b4c939d39448cc17a535d69c152f5016cf5 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000021#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022
23// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000024#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
26#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000038namespace
39{
Kevin May43a799c2019-02-08 16:31:42 +000040
41const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
42
Derek Lamberti0028d1b2019-02-20 13:57:42 +000043 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000044 unsigned int layersIndex,
45 const CheckLocation& location)
46{
47 if (graph->layers() == nullptr)
48 {
49 throw ParseException(
50 boost::str(
51 boost::format("%1% was called with invalid (null) graph. "
52 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
53 "layers:%2% at %3%") %
54 location.m_Function %
55 layersIndex %
56 location.FileLine()));
57 }
58 else if (layersIndex >= graph->layers()->size())
59 {
60 throw ParseException(
61 boost::str(
62 boost::format("%1% was called with an invalid layers index. "
63 "layers:%2% at %3%") %
64 location.m_Function %
65 layersIndex %
66 location.FileLine()));
67 }
68}
69
Derek Lamberti0028d1b2019-02-20 13:57:42 +000070void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000071 unsigned int layersIndex,
72 unsigned int layerIndex,
73 const CheckLocation& location)
74{
75 if (graph->layers() == nullptr)
76 {
77 throw ParseException(
78 boost::str(
79 boost::format("%1% was called with invalid (null) graph. "
80 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000081 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000082 location.m_Function %
83 layersIndex %
84 location.FileLine()));
85 }
86 else if (layersIndex >= graph->layers()->size())
87 {
88 throw ParseException(
89 boost::str(
90 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000091 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000092 location.m_Function %
93 layersIndex %
94 location.FileLine()));
95 }
96 else if (layerIndex >= graph->layers()[layersIndex].size()
97 && layerIndex != VIRTUAL_LAYER_ID)
98 {
99 throw ParseException(
100 boost::str(
101 boost::format("%1% was called with an invalid layer index. "
102 "layers:%2% layer:%3% at %4%") %
103 location.m_Function %
104 layersIndex %
105 layerIndex %
106 location.FileLine()));
107 }
108}
109
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000110void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000111 const CheckLocation& location)
112{
113 if (rawPtr == nullptr)
114 {
115 throw ParseException(
116 boost::str(
117 boost::format("%1% was called with a null tensor pointer. "
118 "at %2%") %
119 location.m_Function %
120 location.FileLine()));
121
122 }
123}
124
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000125void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000126 const CheckLocation& location)
127{
128 if (rawPtr == nullptr)
129 {
130 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
131 location.m_Function %
132 location.FileLine()));
133 }
134}
135
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000136void CheckConstTensorSize(const unsigned int constTensorSize,
137 const unsigned int tensorSize,
138 const CheckLocation& location)
139{
140 if (constTensorSize != tensorSize)
141 {
142 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
143 location.m_Function %
144 location.FileLine()));
145 }
146}
147
Kevin May43a799c2019-02-08 16:31:42 +0000148#define CHECK_TENSOR_PTR(TENSOR_PTR) \
149 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
150
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000151#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
152 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
153
Mike Kellya0766c32019-02-19 17:22:07 +0000154#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
155 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
156
Kevin May43a799c2019-02-08 16:31:42 +0000157#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
158 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
159
160#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
161 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
162}
163
Saoirse Stewart263829c2019-02-19 15:54:14 +0000164bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
165{
166 const unsigned int actualSize = actual.GetNumDimensions();
167 if (actualSize != expected.size())
168 {
169 return false;
170 }
171
172 for (unsigned int i = 0u; i < actualSize; i++)
173 {
174 if (actual[i] != static_cast<unsigned int>(expected[i]))
175 {
176 return false;
177 }
178 }
179
180 return true;
181}
182
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000183Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000184: m_Network(nullptr, nullptr),
185//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000186m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000187{
188 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000189 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000190 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000191 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000192 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Conor Kennedy76277882019-02-26 08:29:54 +0000193 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
195 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000196 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000197 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000198 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000199 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000200 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000201 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000202 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000203 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000204 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000205 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
206 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Jim Flynnac25a1b2019-02-28 10:40:49 +0000207 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseMerger;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000208 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000209 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000210 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000211 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000212 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
213 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000214 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000215 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000216 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000217 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Jim Flynn18ce3382019-03-08 11:08:30 +0000218 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000219 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000220 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Kevin May43a799c2019-02-08 16:31:42 +0000221}
222
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000223Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000224{
225 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
226
227 switch(layerType)
228 {
Mike Kellyaf484012019-02-20 16:53:11 +0000229 case Layer::Layer_ActivationLayer:
230 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000231 case Layer::Layer_AdditionLayer:
232 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000233 case Layer::Layer_BatchToSpaceNdLayer:
234 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000235 case Layer::Layer_BatchNormalizationLayer:
236 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000237 case Layer::Layer_ConstantLayer:
238 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000239 case Layer::Layer_Convolution2dLayer:
240 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000241 case Layer::Layer_DepthwiseConvolution2dLayer:
242 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000243 case Layer::Layer_DetectionPostProcessLayer:
244 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000245 case Layer::Layer_DivisionLayer:
246 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000247 case Layer::Layer_EqualLayer:
248 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000249 case Layer::Layer_FullyConnectedLayer:
250 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000251 case Layer::Layer_FloorLayer:
252 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000253 case Layer::Layer_GatherLayer:
254 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000255 case Layer::Layer_GreaterLayer:
256 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000257 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000258 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000259 case Layer::Layer_L2NormalizationLayer:
260 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000261 case Layer::Layer_MeanLayer:
262 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000263 case Layer::Layer_MinimumLayer:
264 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000265 case Layer::Layer_MaximumLayer:
266 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000267 case Layer::Layer_MergerLayer:
268 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000269 case Layer::Layer_MultiplicationLayer:
270 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000271 case Layer::Layer_NormalizationLayer:
272 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000273 case Layer::Layer_OutputLayer:
274 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000275 case Layer::Layer_PadLayer:
276 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000277 case Layer::Layer_PermuteLayer:
278 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000279 case Layer::Layer_Pooling2dLayer:
280 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000281 case Layer::Layer_ReshapeLayer:
282 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000283 case Layer::Layer_ResizeBilinearLayer:
284 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000285 case Layer::Layer_RsqrtLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000287 case Layer::Layer_SoftmaxLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000289 case Layer::Layer_SpaceToBatchNdLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000291 case Layer::Layer_SplitterLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000293 case Layer::Layer_StridedSliceLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000295 case Layer::Layer_SubtractionLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000297 case Layer::Layer_NONE:
298 default:
299 throw ParseException(boost::str(
300 boost::format("Layer must have a type %1%") %
301 Layer::Layer_NONE));
302 }
303}
304
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000305std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
306{
307 auto layer = GetBaseLayer(graph, index);
308 assert(layer);
309 return layer->layerName()->str();
310}
311
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000312int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000313{
314 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
315
316 if (layerType == Layer::Layer_InputLayer)
317 {
318 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
319 }
320 else if ( layerType == Layer::Layer_OutputLayer )
321 {
322 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
323 }
324 return 0;
325}
326
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000327armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000328{
329 switch (dataLayout)
330 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000331 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000332 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000333 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000334 default:
335 return armnn::DataLayout::NCHW;
336 }
337}
338
Mike Kellyaf484012019-02-20 16:53:11 +0000339armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
340{
341 switch (function)
342 {
343 case armnnSerializer::ActivationFunction_Sigmoid:
344 return armnn::ActivationFunction::Sigmoid;
345 case armnnSerializer::ActivationFunction_TanH:
346 return armnn::ActivationFunction::TanH;
347 case armnnSerializer::ActivationFunction_Linear:
348 return armnn::ActivationFunction::Linear;
349 case armnnSerializer::ActivationFunction_ReLu:
350 return armnn::ActivationFunction::ReLu;
351 case armnnSerializer::ActivationFunction_BoundedReLu:
352 return armnn::ActivationFunction::BoundedReLu;
353 case armnnSerializer::ActivationFunction_LeakyReLu:
354 return armnn::ActivationFunction::LeakyReLu;
355 case armnnSerializer::ActivationFunction_Abs:
356 return armnn::ActivationFunction::Abs;
357 case armnnSerializer::ActivationFunction_Sqrt:
358 return armnn::ActivationFunction::Sqrt;
359 case armnnSerializer::ActivationFunction_Square:
360 return armnn::ActivationFunction::Square;
361 default:
362 return armnn::ActivationFunction::Sigmoid;
363 }
364}
365
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000366armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000367{
368 armnn::DataType type;
369 CHECK_TENSOR_PTR(tensorPtr);
370
371 switch (tensorPtr->dataType())
372 {
373 case DataType_QuantisedAsymm8:
374 type = armnn::DataType::QuantisedAsymm8;
375 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000376 case DataType_QuantisedSymm16:
377 type = armnn::DataType::QuantisedSymm16;
378 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000379 case DataType_Signed32:
380 type = armnn::DataType::Signed32;
381 break;
Kevin May43a799c2019-02-08 16:31:42 +0000382 case DataType_Float32:
383 type = armnn::DataType::Float32;
384 break;
385 case DataType_Float16:
386 type = armnn::DataType::Float16;
387 break;
388 case DataType_Boolean:
389 type = armnn::DataType::Boolean;
390 break;
391 default:
392 {
393 CheckLocation location = CHECK_LOCATION();
394 throw ParseException(
395 boost::str(
396 boost::format("Unsupported data type %1% = %2%. %3%") %
397 tensorPtr->dataType() %
398 EnumNameDataType(tensorPtr->dataType()) %
399 location.AsString()));
400 }
401 }
402 float quantizationScale = tensorPtr->quantizationScale();
403 int32_t quantizationOffset = tensorPtr->quantizationOffset();
404
405 auto dimensions = tensorPtr->dimensions();
406 unsigned int size = dimensions->size();
407 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
408
409 // two statements (on purpose) for easier debugging:
410 armnn::TensorInfo result(size,
411 outputDims.data(),
412 type,
413 quantizationScale,
414 quantizationOffset);
415 return result;
416}
417
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000418armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000419{
420 CHECK_CONST_TENSOR_PTR(constTensorPtr);
421 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
422
423 switch (constTensorPtr->data_type())
424 {
425 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000426 {
427 auto byteData = constTensorPtr->data_as_ByteData()->data();
428 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
429 return armnn::ConstTensor(tensorInfo, byteData->data());
430 }
Mike Kellya0766c32019-02-19 17:22:07 +0000431 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000432 {
433 auto shortData = constTensorPtr->data_as_ShortData()->data();
434 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
435 return armnn::ConstTensor(tensorInfo, shortData->data());
436 }
Mike Kellya0766c32019-02-19 17:22:07 +0000437 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000438 {
439 auto intData = constTensorPtr->data_as_IntData()->data();
440 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
441 return armnn::ConstTensor(tensorInfo, intData->data());
442 }
Mike Kellya0766c32019-02-19 17:22:07 +0000443 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000444 {
445 auto longData = constTensorPtr->data_as_LongData()->data();
446 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
447 return armnn::ConstTensor(tensorInfo, longData->data());
448 }
Mike Kellya0766c32019-02-19 17:22:07 +0000449 default:
450 {
451 CheckLocation location = CHECK_LOCATION();
452 throw ParseException(
453 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
454 constTensorPtr->data_type() %
455 EnumNameConstTensorData(constTensorPtr->data_type()) %
456 location.AsString()));
457 }
458 }
459}
460
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000462{
463
464 CHECK_GRAPH(graphPtr, 0);
465 const auto& numInputs = graphPtr->inputIds()->size();
466
467 LayerBaseRawPtrVector result(numInputs);
468
469 for (unsigned int i=0; i<numInputs; ++i)
470 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000471 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000472 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
473 }
474 return result;
475}
476
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000477Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000478{
479 CHECK_GRAPH(graphPtr, 0);
480 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000481 LayerBaseRawPtrVector result(numOutputs);
482
483 for (unsigned int i=0; i<numOutputs; ++i)
484 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000485 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000486
Kevin May43a799c2019-02-08 16:31:42 +0000487 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
488 }
489 return result;
490}
491
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000492Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000493 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000494{
495 CHECK_LAYERS(graphPtr, 0, layerIndex);
496 auto layer = GetBaseLayer(graphPtr, layerIndex);
497 const auto& numInputs = layer->inputSlots()->size();
498
499 TensorRawPtrVector result(numInputs);
500
501 for (unsigned int i=0; i<numInputs; ++i)
502 {
503 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
504 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
505 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
506 }
507 return result;
508}
509
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000510Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000511 unsigned int layerIndex)
512{
513 CHECK_LAYERS(graphPtr, 0, layerIndex);
514 auto layer = GetBaseLayer(graphPtr, layerIndex);
515 const auto& numOutputs = layer->outputSlots()->size();
516
517 TensorRawPtrVector result(numOutputs);
518
519 for (unsigned int i=0; i<numOutputs; ++i)
520 {
521 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
522 }
523 return result;
524}
525
Derek Lamberti8ddae332019-02-21 16:29:43 +0000526void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000527{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000528 CHECK_LAYERS(graph, 0, layerIndex);
529 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000530 throw ParseException(
531 boost::str(
532 boost::format("Layer not supported. "
533 "layerIndex: %1% "
534 "layerName: %2% / %3%") %
535 layerIndex %
536 layerName %
537 CHECK_LOCATION().AsString()));
538}
539
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000540void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000541{
542 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000543 m_InputBindings.clear();
544 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000545}
546
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000547IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000548{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000549 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000550}
551
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000552IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000553{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000554 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000555}
556
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000557void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000558{
559 delete parser;
560}
561
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000562INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000563{
564 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000565 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
566 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000567}
568
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000569armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000570{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000571 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000572 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
573 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
574 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000575}
576
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000577Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000578{
579 if (binaryContent == nullptr)
580 {
581 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
582 CHECK_LOCATION().AsString()));
583 }
584 flatbuffers::Verifier verifier(binaryContent, len);
585 if (verifier.VerifyBuffer<SerializedGraph>() == false)
586 {
587 throw ParseException(
588 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
589 "flatbuffers format. size:%1% %2%") %
590 len %
591 CHECK_LOCATION().AsString()));
592 }
593 return GetSerializedGraph(binaryContent);
594}
595
Derek Lamberti8ddae332019-02-21 16:29:43 +0000596INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000597{
598 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000599 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000600 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000601 m_GraphConnections.emplace_back(graph->layers()->size());
602 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000603 {
604 if (layer->layer_type() != Layer_InputLayer &&
605 layer->layer_type() != Layer_OutputLayer)
606 {
607 // lookup and call the parser function
608 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000609 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000610 }
611 ++layerIndex;
612 }
613
Derek Lamberti8ddae332019-02-21 16:29:43 +0000614 SetupInputLayers(graph);
615 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000616
617 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000618 for (size_t connectionsIndex = 0; connectionsIndex < m_GraphConnections[0].size(); ++connectionsIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000619 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000620 SlotsMap& slotsMap = m_GraphConnections[0][connectionsIndex];
621 for (unsigned int outputSlotIndex = 0; outputSlotIndex < slotsMap.outputSlots.size(); outputSlotIndex++)
Kevin May43a799c2019-02-08 16:31:42 +0000622 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000623 if (slotsMap.inputSlots.find(outputSlotIndex) != slotsMap.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000624 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000625 for (armnn::IInputSlot* inputSlot : slotsMap.inputSlots[outputSlotIndex])
626 {
627 slotsMap.outputSlots[outputSlotIndex]->Connect(*inputSlot);
628 }
Kevin May43a799c2019-02-08 16:31:42 +0000629 }
630 }
631 }
632
633 return std::move(m_Network);
634}
635
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000636BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000637 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000638{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000639 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000640 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000641 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000642 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000643 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000644 }
645 }
646 throw ParseException(
647 boost::str(
648 boost::format("No input binding found for layer:%1% / %2%") %
649 name %
650 CHECK_LOCATION().AsString()));
651}
652
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000653BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000654 const std::string& name) const
655{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000656 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000657 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000658 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000659 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000660 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000661 }
662 }
663 throw ParseException(
664 boost::str(
665 boost::format("No output binding found for layer:%1% / %2%") %
666 name %
667 CHECK_LOCATION().AsString()));
668}
669
Derek Lamberti8ddae332019-02-21 16:29:43 +0000670void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000671{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000672 CHECK_GRAPH(graph, 0);
673 auto inputs = GetGraphInputs(graph);
674 m_InputBindings.clear();
675 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000676 for (auto const& input : inputs)
677 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000678 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000679 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000680 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000681
682 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
683 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
684
Derek Lamberti8ddae332019-02-21 16:29:43 +0000685 RegisterOutputSlots(graph, input->index(), layer);
686
687 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
688 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
689 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000690 }
691}
692
Derek Lamberti8ddae332019-02-21 16:29:43 +0000693void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000694{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000695 CHECK_GRAPH(graph, 0);
696 auto outputs = GetGraphOutputs(graph);
697 m_OutputBindings.clear();
698 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000699 for (auto const& output : outputs)
700 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000701 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000702 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000703 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000704
Derek Lamberti8ddae332019-02-21 16:29:43 +0000705 RegisterInputSlots(graph, output->index(), layer);
706
707 auto baseLayer = GetBaseLayer(graph, output->index());
708 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
709 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
710 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
711
712 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
713 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
714 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000715 }
716}
717
Derek Lamberti8ddae332019-02-21 16:29:43 +0000718void Deserializer::RegisterOutputSlots(GraphPtr graph,
719 uint32_t layerIndex,
720 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000721{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000722 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000723 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000724 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000725 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
726 {
727 throw ParseException(
728 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
729 " for layer index: %3% %4%") %
730 parsedLayer->outputSlots()->size() %
731 layer->GetNumOutputSlots() %
732 layerIndex %
733 CHECK_LOCATION().AsString()));
734 }
735
736 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
737 {
738 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
739 RegisterOutputSlotOfConnection(layerIndex, slot);
740 }
741}
742
Derek Lamberti8ddae332019-02-21 16:29:43 +0000743void Deserializer::RegisterInputSlots(GraphPtr graph,
744 uint32_t layerIndex,
745 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000746{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000747 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000748 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000749 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000750 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
751 {
752 throw ParseException(
753 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
754 " for layer index:%3% %4%") %
755 parsedLayer->inputSlots()->size() %
756 layer->GetNumInputSlots() %
757 layerIndex %
758 CHECK_LOCATION().AsString()));
759 }
760
761 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
762 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000763 auto fbConnection = parsedLayer->inputSlots()->Get(slotIndex)->connection();
Kevin May43a799c2019-02-08 16:31:42 +0000764 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000765
766 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), slot);
Kevin May43a799c2019-02-08 16:31:42 +0000767 }
768}
769
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000770void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
771 uint32_t outputSlotIndex,
772 armnn::IInputSlot* slot)
Kevin May43a799c2019-02-08 16:31:42 +0000773{
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000774 BOOST_ASSERT(m_GraphConnections[0].size() > sourceLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000775
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000776 SlotsMap& slotsMap = m_GraphConnections[0][sourceLayerIndex];
777 if (slotsMap.inputSlots.find(outputSlotIndex) == slotsMap.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000778 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000779 slotsMap.inputSlots[outputSlotIndex] = {slot};
Kevin May43a799c2019-02-08 16:31:42 +0000780 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000781 else
782 {
783 slotsMap.inputSlots[outputSlotIndex].push_back(slot);
784 }
785}
Kevin May43a799c2019-02-08 16:31:42 +0000786
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000787void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
788 armnn::IOutputSlot* slot)
789{
790 BOOST_ASSERT(m_GraphConnections[0].size() > sourceLayerIndex);
791 m_GraphConnections[0][sourceLayerIndex].outputSlots.push_back(slot);
Kevin May43a799c2019-02-08 16:31:42 +0000792}
793
Derek Lamberti8ddae332019-02-21 16:29:43 +0000794void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000795{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000796 CHECK_LAYERS(graph, 0, layerIndex);
797 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000798 CHECK_LOCATION();
799 CHECK_VALID_SIZE(inputs.size(), 1);
800
Derek Lamberti8ddae332019-02-21 16:29:43 +0000801 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000802 CHECK_VALID_SIZE(outputs.size(), 1);
803
Derek Lamberti8ddae332019-02-21 16:29:43 +0000804 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000805 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000806 auto serializerDescriptor = serializerLayer->descriptor();
807
808 armnn::ActivationDescriptor descriptor;
809 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
810 descriptor.m_A = serializerDescriptor->a();
811 descriptor.m_B = serializerDescriptor->b();
812
813 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
814 layerName.c_str());
815 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
816 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
817
Derek Lamberti8ddae332019-02-21 16:29:43 +0000818 RegisterInputSlots(graph, layerIndex, layer);
819 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000820}
821
Derek Lamberti8ddae332019-02-21 16:29:43 +0000822void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000823{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000824 CHECK_LAYERS(graph, 0, layerIndex);
825 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000826 CHECK_LOCATION();
827 CHECK_VALID_SIZE(inputs.size(), 2);
828
Derek Lamberti8ddae332019-02-21 16:29:43 +0000829 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000830 CHECK_VALID_SIZE(outputs.size(), 1);
831
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000832 auto layerName = GetLayerName(graph, layerIndex);
833 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000834
835 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
836 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
837
Derek Lamberti8ddae332019-02-21 16:29:43 +0000838 RegisterInputSlots(graph, layerIndex, layer);
839 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000840}
841
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000842void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
843{
844 CHECK_LAYERS(graph, 0, layerIndex);
845
846 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
847 CHECK_VALID_SIZE(inputs.size(), 1);
848
849 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
850 CHECK_VALID_SIZE(outputs.size(), 1);
851
852 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
853 auto flatBufferCrops = flatBufferDescriptor->crops();
854 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
855
856 if (flatBufferCrops->Length() % 2 != 0)
857 {
858 throw ParseException(boost::str(
859 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
860 }
861
862 std::vector<std::pair<unsigned int, unsigned int>> crops;
863 crops.reserve(flatBufferCrops->Length() / 2);
864 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
865 {
866 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
867 }
868
869 armnn::BatchToSpaceNdDescriptor descriptor;
870 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
871 descriptor.m_BlockShape =
872 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
873 descriptor.m_Crops = crops;
874
875 auto layerName = GetLayerName(graph, layerIndex);
876 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
877
878 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
879 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
880
881 RegisterInputSlots(graph, layerIndex, layer);
882 RegisterOutputSlots(graph, layerIndex, layer);
883}
884
ruoyan018e7fa232019-02-28 15:09:07 +0000885void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
886{
887 CHECK_LAYERS(graph, 0, layerIndex);
888
889 auto inputs = GetInputs(graph, layerIndex);
890 CHECK_VALID_SIZE(inputs.size(), 1);
891
892 auto outputs = GetOutputs(graph, layerIndex);
893 CHECK_VALID_SIZE(outputs.size(), 1);
894 auto outputInfo = ToTensorInfo(outputs[0]);
895
ruoyan015c7ab052019-03-04 14:48:02 +0000896 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +0000897
898 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
899 auto serializerDescriptor = serializerLayer->descriptor();
900
901 armnn::BatchNormalizationDescriptor descriptor;
902 descriptor.m_Eps = serializerDescriptor->eps();
903 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
904
905 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
906 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
907 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
908 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
909
910 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
911 mean,
912 variance,
913 beta,
914 gamma,
915 layerName.c_str());
916 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
917
918 RegisterInputSlots(graph, layerIndex, layer);
919 RegisterOutputSlots(graph, layerIndex, layer);
920}
921
Conor Kennedy76277882019-02-26 08:29:54 +0000922void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
923{
924 CHECK_LAYERS(graph, 0, layerIndex);
925 CHECK_LOCATION();
926
927 auto outputs = GetOutputs(graph, layerIndex);
928 CHECK_VALID_SIZE(outputs.size(), 1);
929
930 auto layerName = GetLayerName(graph, layerIndex);
931
932 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
933 auto serializerInput = serializerLayer->input();
934
935 armnn::ConstTensor input = ToConstTensor(serializerInput);
936
937 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
938
939 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
940 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
941
942 RegisterOutputSlots(graph, layerIndex, layer);
943}
944
Derek Lamberti8ddae332019-02-21 16:29:43 +0000945void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000946{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000947 CHECK_LAYERS(graph, 0, layerIndex);
948 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000949 CHECK_LOCATION();
950 CHECK_VALID_SIZE(inputs.size(), 1);
951
Derek Lamberti8ddae332019-02-21 16:29:43 +0000952 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000953 CHECK_VALID_SIZE(outputs.size(), 1);
954
Derek Lamberti8ddae332019-02-21 16:29:43 +0000955 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000956 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000957 auto serializerDescriptor = serializerLayer->descriptor();
958
959 armnn::Convolution2dDescriptor descriptor;
960 descriptor.m_PadLeft = serializerDescriptor->padLeft();
961 descriptor.m_PadRight = serializerDescriptor->padRight();
962 descriptor.m_PadTop = serializerDescriptor->padTop();
963 descriptor.m_PadBottom = serializerDescriptor->padBottom();
964 descriptor.m_StrideX = serializerDescriptor->strideX();
965 descriptor.m_StrideY = serializerDescriptor->strideY();;
966 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
967 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
968
969 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
970 armnn::ConstTensor biases;
971
972 if (descriptor.m_BiasEnabled)
973 {
974 biases = ToConstTensor(serializerLayer->biases());
975 }
976 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
977 weights,
978 biases,
979 layerName.c_str());
980 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
981 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
982
Derek Lamberti8ddae332019-02-21 16:29:43 +0000983 RegisterInputSlots(graph, layerIndex, layer);
984 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +0000985}
986
Derek Lamberti8ddae332019-02-21 16:29:43 +0000987void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000988{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000989 CHECK_LAYERS(graph, 0, layerIndex);
990 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000991 CHECK_LOCATION();
992 CHECK_VALID_SIZE(inputs.size(), 1);
993
Derek Lamberti8ddae332019-02-21 16:29:43 +0000994 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000995 CHECK_VALID_SIZE(outputs.size(), 1);
996
Derek Lamberti8ddae332019-02-21 16:29:43 +0000997 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000998 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000999 auto serializerDescriptor = serializerLayer->descriptor();
1000
1001 armnn::DepthwiseConvolution2dDescriptor descriptor;
1002 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1003 descriptor.m_PadRight = serializerDescriptor->padRight();
1004 descriptor.m_PadTop = serializerDescriptor->padTop();
1005 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1006 descriptor.m_StrideX = serializerDescriptor->strideX();
1007 descriptor.m_StrideY = serializerDescriptor->strideY();;
1008 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1009 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1010
1011 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1012 armnn::ConstTensor biases;
1013
1014 if (descriptor.m_BiasEnabled)
1015 {
1016 biases = ToConstTensor(serializerLayer->biases());
1017 }
1018 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1019 weights,
1020 biases,
1021 layerName.c_str());
1022
1023 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1024 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1025
Derek Lamberti8ddae332019-02-21 16:29:43 +00001026 RegisterInputSlots(graph, layerIndex, layer);
1027 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001028}
1029
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001030void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1031{
1032 CHECK_LAYERS(graph, 0, layerIndex);
1033 auto inputs = GetInputs(graph, layerIndex);
1034 CHECK_LOCATION();
1035 CHECK_VALID_SIZE(inputs.size(), 2);
1036
1037 auto outputs = GetOutputs(graph, layerIndex);
1038 CHECK_VALID_SIZE(outputs.size(), 4);
1039
1040 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1041 auto layerName = GetLayerName(graph, layerIndex);
1042 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1043
1044 armnn::DetectionPostProcessDescriptor descriptor;
1045 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1046 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1047 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1048 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1049 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1050 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1051 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1052 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1053 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1054 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1055 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1056
1057 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1058
1059 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1060 anchors,
1061 layerName.c_str());
1062
1063 for (unsigned int i = 0; i < 4; i++)
1064 {
1065 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1066 }
1067
1068 RegisterInputSlots(graph, layerIndex, layer);
1069 RegisterOutputSlots(graph, layerIndex, layer);
1070}
1071
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001072void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1073{
1074 CHECK_LAYERS(graph, 0, layerIndex);
1075 auto inputs = GetInputs(graph, layerIndex);
1076 CHECK_LOCATION();
1077 CHECK_VALID_SIZE(inputs.size(), 2);
1078
1079 auto outputs = GetOutputs(graph, layerIndex);
1080 CHECK_VALID_SIZE(outputs.size(), 1);
1081
1082 auto layerName = GetLayerName(graph, layerIndex);
1083 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1084
1085 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1086 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1087
1088 RegisterInputSlots(graph, layerIndex, layer);
1089 RegisterOutputSlots(graph, layerIndex, layer);
1090}
1091
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001092void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1093{
1094 CHECK_LAYERS(graph, 0, layerIndex);
1095 auto inputs = GetInputs(graph, layerIndex);
1096 CHECK_LOCATION();
1097 CHECK_VALID_SIZE(inputs.size(), 2);
1098
1099 auto outputs = GetOutputs(graph, layerIndex);
1100 CHECK_VALID_SIZE(outputs.size(), 1);
1101
1102 auto layerName = GetLayerName(graph, layerIndex);
1103 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1104
1105 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1106 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1107
1108 RegisterInputSlots(graph, layerIndex, layer);
1109 RegisterOutputSlots(graph, layerIndex, layer);
1110}
1111
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001112void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1113{
1114 CHECK_LAYERS(graph, 0, layerIndex);
1115 auto inputs = GetInputs(graph, layerIndex);
1116 CHECK_LOCATION();
1117 CHECK_VALID_SIZE(inputs.size(), 2);
1118
1119 auto outputs = GetOutputs(graph, layerIndex);
1120 CHECK_VALID_SIZE(outputs.size(), 1);
1121
1122 auto layerName = GetLayerName(graph, layerIndex);
1123 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1124
1125 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1126 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1127
1128 RegisterInputSlots(graph, layerIndex, layer);
1129 RegisterOutputSlots(graph, layerIndex, layer);
1130}
1131
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001132void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1133{
1134 CHECK_LAYERS(graph, 0, layerIndex);
1135
1136 auto inputs = GetInputs(graph, layerIndex);
1137 CHECK_VALID_SIZE(inputs.size(), 1);
1138
1139 auto outputs = GetOutputs(graph, layerIndex);
1140 CHECK_VALID_SIZE(outputs.size(), 1);
1141 auto outputInfo = ToTensorInfo(outputs[0]);
1142
1143 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1144 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1145
1146 auto layerName = GetLayerName(graph, layerIndex);
1147 armnn::L2NormalizationDescriptor descriptor;
1148 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1149
1150 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1151 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1152
1153 RegisterInputSlots(graph, layerIndex, layer);
1154 RegisterOutputSlots(graph, layerIndex, layer);
1155}
1156
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001157void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1158{
1159 CHECK_LAYERS(graph, 0, layerIndex);
1160 auto inputs = GetInputs(graph, layerIndex);
1161 CHECK_LOCATION();
1162 CHECK_VALID_SIZE(inputs.size(), 2);
1163
1164 auto outputs = GetOutputs(graph, layerIndex);
1165 CHECK_VALID_SIZE(outputs.size(), 1);
1166
1167 auto layerName = GetLayerName(graph, layerIndex);
1168 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1169
1170 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1171 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1172
1173 RegisterInputSlots(graph, layerIndex, layer);
1174 RegisterOutputSlots(graph, layerIndex, layer);
1175}
1176
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001177void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1178{
1179 CHECK_LAYERS(graph, 0, layerIndex);
1180 auto inputs = GetInputs(graph, layerIndex);
1181 CHECK_LOCATION();
1182 CHECK_VALID_SIZE(inputs.size(), 2);
1183
1184 auto outputs = GetOutputs(graph, layerIndex);
1185 CHECK_VALID_SIZE(outputs.size(), 1);
1186
1187 auto layerName = GetLayerName(graph, layerIndex);
1188 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1189
1190 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1191 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1192
1193 RegisterInputSlots(graph, layerIndex, layer);
1194 RegisterOutputSlots(graph, layerIndex, layer);
1195}
1196
Jim Flynnac25a1b2019-02-28 10:40:49 +00001197void Deserializer::ParseMerger(GraphPtr graph, unsigned int layerIndex)
1198{
1199 CHECK_LAYERS(graph, 0, layerIndex);
1200 CHECK_LOCATION();
1201
1202 auto outputs = GetOutputs(graph, layerIndex);
1203 CHECK_VALID_SIZE(outputs.size(), 1);
1204
1205 auto mergerLayer = graph->layers()->Get(layerIndex)->layer_as_MergerLayer();
1206 auto layerName = GetLayerName(graph, layerIndex);
1207 auto mergerDescriptor = mergerLayer->descriptor();
1208 unsigned int numViews = mergerDescriptor->numViews();
1209 unsigned int numDimensions = mergerDescriptor->numDimensions();
1210
1211 // can now check the number of inputs == number of views
1212 auto inputs = GetInputs(graph, layerIndex);
1213 CHECK_VALID_SIZE(inputs.size(), numViews);
1214
1215 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
1216 auto originsPtr = mergerDescriptor->viewOrigins();
1217 for (unsigned int v = 0; v < numViews; ++v)
1218 {
1219 auto originPtr = originsPtr->Get(v);
1220 for (unsigned int d = 0; d < numDimensions; ++d)
1221 {
1222 uint32_t value = originPtr->data()->Get(d);
1223 descriptor.SetViewOriginCoord(v, d, value);
1224 }
1225 }
1226 descriptor.SetConcatAxis(mergerDescriptor->concatAxis());
1227
1228 IConnectableLayer* layer = m_Network->AddMergerLayer(descriptor, layerName.c_str());
1229 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1230 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1231
1232 RegisterInputSlots(graph, layerIndex, layer);
1233 RegisterOutputSlots(graph, layerIndex, layer);
1234}
1235
Derek Lamberti8ddae332019-02-21 16:29:43 +00001236void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001237{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001238 CHECK_LAYERS(graph, 0, layerIndex);
1239 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001240 CHECK_LOCATION();
1241 CHECK_VALID_SIZE(inputs.size(), 2);
1242
Derek Lamberti8ddae332019-02-21 16:29:43 +00001243 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001244 CHECK_VALID_SIZE(outputs.size(), 1);
1245
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001246 auto layerName = GetLayerName(graph, layerIndex);
1247 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001248
1249 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1250 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1251
Derek Lamberti8ddae332019-02-21 16:29:43 +00001252 RegisterInputSlots(graph, layerIndex, layer);
1253 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001254}
1255
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001256void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1257{
1258 CHECK_LAYERS(graph, 0, layerIndex);
1259 CHECK_LOCATION();
1260
1261 auto inputs = GetInputs(graph, layerIndex);
1262 CHECK_VALID_SIZE(inputs.size(), 1);
1263
1264 auto outputs = GetOutputs(graph, layerIndex);
1265 CHECK_VALID_SIZE(outputs.size(), 1);
1266
1267 auto layerName = GetLayerName(graph, layerIndex);
1268
1269 armnn::IConnectableLayer* layer;
1270
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001271 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001272
1273 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1274 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1275
1276 RegisterInputSlots(graph, layerIndex, layer);
1277 RegisterOutputSlots(graph, layerIndex, layer);
1278}
1279
Derek Lamberti8ddae332019-02-21 16:29:43 +00001280void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001281{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001282 CHECK_LAYERS(graph, 0, layerIndex);
1283 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001284 CHECK_LOCATION();
1285 CHECK_VALID_SIZE(inputs.size(), 1);
1286
Derek Lamberti8ddae332019-02-21 16:29:43 +00001287 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001288 CHECK_VALID_SIZE(outputs.size(), 1);
1289
Derek Lamberti8ddae332019-02-21 16:29:43 +00001290 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001291 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001292 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1293
1294 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1295 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1296 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1297
1298 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1299
1300 armnn::IConnectableLayer* layer;
1301 if (flatBufferDescriptor->biasEnabled())
1302 {
1303 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
1304 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1305 weightsTensor,
1306 biasTensorData,
1307 layerName.c_str());
1308 }
1309 else
1310 {
1311 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1312 weightsTensor,
1313 layerName.c_str());
1314 }
1315
1316 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1317 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1318
Derek Lamberti8ddae332019-02-21 16:29:43 +00001319 RegisterInputSlots(graph, layerIndex, layer);
1320 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001321}
1322
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001323void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1324{
1325 CHECK_LAYERS(graph, 0, layerIndex);
1326
1327 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1328 CHECK_VALID_SIZE(inputs.size(), 1);
1329
1330 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1331 CHECK_VALID_SIZE(outputs.size(), 1);
1332
1333 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1334 auto flatBufferPadList = flatBufferDescriptor->padList();
1335
1336 if (flatBufferPadList->Length() % 2 != 0)
1337 {
1338 throw ParseException(boost::str(
1339 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1340 }
1341
1342 std::vector<std::pair<unsigned int, unsigned int>> padList;
1343 padList.reserve(flatBufferPadList->Length() / 2);
1344 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1345 {
1346 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1347 }
1348
1349 armnn::PadDescriptor descriptor(padList);
1350
1351 auto layerName = GetLayerName(graph, layerIndex);
1352 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1353
1354 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1355 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1356
1357 RegisterInputSlots(graph, layerIndex, layer);
1358 RegisterOutputSlots(graph, layerIndex, layer);
1359}
1360
Derek Lamberti8ddae332019-02-21 16:29:43 +00001361void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001362{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001363 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001364
1365 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001366 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001367
Derek Lamberti8ddae332019-02-21 16:29:43 +00001368 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001369 CHECK_VALID_SIZE(inputs.size(), 1);
1370
Derek Lamberti8ddae332019-02-21 16:29:43 +00001371 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001372 CHECK_VALID_SIZE(outputs.size(), 1);
1373 auto outputInfo = ToTensorInfo(outputs[0]);
1374
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001375 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001376 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1377
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001378 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001379 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1380
Derek Lamberti8ddae332019-02-21 16:29:43 +00001381 RegisterInputSlots(graph, layerIndex, layer);
1382 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001383}
1384
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001385armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001386 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001387{
1388 armnn::Pooling2dDescriptor desc;
1389
1390 switch (pooling2dDesc->poolType())
1391 {
1392 case PoolingAlgorithm_Average:
1393 {
1394 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001395 break;
1396 }
1397 case PoolingAlgorithm_Max:
1398 {
1399 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001400 break;
1401 }
1402 default:
1403 {
1404 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1405 }
1406 }
1407
1408 switch (pooling2dDesc->outputShapeRounding())
1409 {
1410 case OutputShapeRounding_Floor:
1411 {
1412 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1413 break;
1414 }
1415 case OutputShapeRounding_Ceiling:
1416 {
1417 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1418 break;
1419 }
1420 default:
1421 {
1422 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1423 }
1424 }
1425
1426 switch (pooling2dDesc->paddingMethod())
1427 {
1428 case PaddingMethod_Exclude:
1429 {
1430 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1431 break;
1432 }
1433 case PaddingMethod_IgnoreValue:
1434 {
1435 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1436 break;
1437 }
1438 default:
1439 {
1440 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1441 }
1442 }
1443
1444 switch (pooling2dDesc->dataLayout())
1445 {
1446 case DataLayout_NCHW:
1447 {
1448 desc.m_DataLayout = armnn::DataLayout::NCHW;
1449 break;
1450 }
1451 case DataLayout_NHWC:
1452 {
1453 desc.m_DataLayout = armnn::DataLayout::NHWC;
1454 break;
1455 }
1456 default:
1457 {
1458 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1459 }
1460 }
1461
1462 desc.m_PadRight = pooling2dDesc->padRight();
1463 desc.m_PadLeft = pooling2dDesc->padLeft();
1464 desc.m_PadBottom = pooling2dDesc->padBottom();
1465 desc.m_PadTop = pooling2dDesc->padTop();
1466 desc.m_StrideX = pooling2dDesc->strideX();
1467 desc.m_StrideY = pooling2dDesc->strideY();
1468 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1469 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1470
1471 return desc;
1472}
1473
Derek Lamberti8ddae332019-02-21 16:29:43 +00001474void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001475{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001476 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001477
Derek Lamberti8ddae332019-02-21 16:29:43 +00001478 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001479 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001480 CHECK_VALID_SIZE(inputs.size(), 1);
1481
Derek Lamberti8ddae332019-02-21 16:29:43 +00001482 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001483 CHECK_VALID_SIZE(outputs.size(), 1);
1484 auto outputInfo = ToTensorInfo(outputs[0]);
1485
1486 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001487 auto layerName = GetLayerName(graph, layerIndex);
1488 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001489 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1490
Derek Lamberti8ddae332019-02-21 16:29:43 +00001491 RegisterInputSlots(graph, layerIndex, layer);
1492 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001493}
1494
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001495armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001496 const std::vector<uint32_t>& targetDimsIn)
1497{
1498 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1499 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1500
1501 if (stretchDim != targetDimsIn.end())
1502 {
1503 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1504 {
1505 throw ParseException(boost::str(
1506 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1507 }
1508
1509 auto targetNumElements =
1510 boost::numeric_cast<unsigned int>(
1511 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1512
1513 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1514 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1515 }
1516
1517 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1518
1519 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1520 reshapeInfo.SetShape(outputShape);
1521
1522 return reshapeInfo;
1523}
1524
Derek Lamberti8ddae332019-02-21 16:29:43 +00001525void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001526{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001527 CHECK_LAYERS(graph, 0, layerIndex);
1528 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001529
Derek Lamberti8ddae332019-02-21 16:29:43 +00001530 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001531 CHECK_VALID_SIZE(outputs.size(), 1);
1532
1533 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1534 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1535
Derek Lamberti8ddae332019-02-21 16:29:43 +00001536 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001537 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1538
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001539 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001540 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1541
1542 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1543 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1544
1545 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1546 {
1547 std::stringstream ss;
1548 ss << "New shape defined in reshape parameters "
1549 << reshapeOutputTensorShape
1550 << " does not equal output shape "
1551 << actualOutputTensorInfo.GetShape()
1552 << ": "
1553 << CHECK_LOCATION().AsString();
1554 throw ParseException(ss.str());
1555 }
1556
1557 armnn::ReshapeDescriptor reshapeDesc;
1558 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1559
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001560 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001561 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1562 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1563
Derek Lamberti8ddae332019-02-21 16:29:43 +00001564 RegisterInputSlots(graph, layerIndex, layer);
1565 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001566}
1567
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001568void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1569{
1570 CHECK_LAYERS(graph, 0, layerIndex);
1571
1572 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1573 CHECK_VALID_SIZE(inputs.size(), 1);
1574
1575 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1576 CHECK_VALID_SIZE(outputs.size(), 1);
1577
1578 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1579
1580 armnn::ResizeBilinearDescriptor descriptor;
1581 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1582 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1583 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1584
1585 auto layerName = GetLayerName(graph, layerIndex);
1586 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1587
1588 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1589 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1590
1591 RegisterInputSlots(graph, layerIndex, layer);
1592 RegisterOutputSlots(graph, layerIndex, layer);
1593}
1594
Derek Lamberti8ddae332019-02-21 16:29:43 +00001595void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001596{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001597 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001598
Derek Lamberti8ddae332019-02-21 16:29:43 +00001599 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001600 CHECK_VALID_SIZE(inputs.size(), 1);
1601
Derek Lamberti8ddae332019-02-21 16:29:43 +00001602 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001603 CHECK_VALID_SIZE(outputs.size(), 1);
1604
1605 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001606 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001607 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001608
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001609 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1610
1611 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1612 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1613
Derek Lamberti8ddae332019-02-21 16:29:43 +00001614 RegisterInputSlots(graph, layerIndex, layer);
1615 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001616}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001617
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001618void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1619{
1620 CHECK_LAYERS(graph, 0, layerIndex);
1621
1622 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1623 CHECK_VALID_SIZE(inputs.size(), 1);
1624
1625 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1626 CHECK_VALID_SIZE(outputs.size(), 1);
1627
1628 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1629 auto flatBufferPadList = flatBufferDescriptor->padList();
1630 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1631
1632 if (flatBufferPadList->Length() % 2 != 0)
1633 {
1634 throw ParseException(boost::str(
1635 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1636 }
1637
1638 std::vector<std::pair<unsigned int, unsigned int>> padList;
1639 padList.reserve(flatBufferPadList->Length() / 2);
1640 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1641 {
1642 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1643 }
1644
1645 armnn::SpaceToBatchNdDescriptor descriptor;
1646 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1647 descriptor.m_BlockShape =
1648 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1649 descriptor.m_PadList = padList;
1650
1651 auto layerName = GetLayerName(graph, layerIndex);
1652 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1653
1654 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1655 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1656
1657 RegisterInputSlots(graph, layerIndex, layer);
1658 RegisterOutputSlots(graph, layerIndex, layer);
1659}
1660
Nina Drozd57728782019-02-27 10:53:27 +00001661armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1662 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1663 unsigned int layerIndex)
1664{
1665 armnn::NormalizationDescriptor desc;
1666
1667 switch (normalizationDescriptor->normChannelType())
1668 {
1669 case NormalizationAlgorithmChannel_Across:
1670 {
1671 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1672 break;
1673 }
1674 case NormalizationAlgorithmChannel_Within:
1675 {
1676 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1677 break;
1678 }
1679 default:
1680 {
1681 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1682 }
1683 }
1684
1685 switch (normalizationDescriptor->normMethodType())
1686 {
1687 case NormalizationAlgorithmMethod_LocalBrightness:
1688 {
1689 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1690 break;
1691 }
1692 case NormalizationAlgorithmMethod_LocalContrast:
1693 {
1694 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1695 break;
1696 }
1697 default:
1698 {
1699 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1700 }
1701 }
1702
1703 switch (normalizationDescriptor->dataLayout())
1704 {
1705 case DataLayout_NCHW:
1706 {
1707 desc.m_DataLayout = armnn::DataLayout::NCHW;
1708 break;
1709 }
1710 case DataLayout_NHWC:
1711 {
1712 desc.m_DataLayout = armnn::DataLayout::NHWC;
1713 break;
1714 }
1715 default:
1716 {
1717 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1718 }
1719 }
1720
1721 desc.m_Alpha = normalizationDescriptor->alpha();
1722 desc.m_Beta = normalizationDescriptor->beta();
1723 desc.m_K = normalizationDescriptor->k();
1724 desc.m_NormSize = normalizationDescriptor->normSize();
1725
1726 return desc;
1727}
1728
1729void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1730{
1731 CHECK_LAYERS(graph, 0, layerIndex);
1732
1733 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1734
1735 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1736 CHECK_VALID_SIZE(inputs.size(), 1);
1737
1738 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1739 CHECK_VALID_SIZE(outputs.size(), 1);
1740
1741 auto outputInfo = ToTensorInfo(outputs[0]);
1742
1743 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1744 auto layerName = GetLayerName(graph, layerIndex);
1745
1746 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1747 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1748
1749 RegisterInputSlots(graph, layerIndex, layer);
1750 RegisterOutputSlots(graph, layerIndex, layer);
1751}
1752
Sadik Armagan8b42a382019-03-01 14:24:49 +00001753void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1754{
1755 CHECK_LAYERS(graph, 0, layerIndex);
1756 auto inputs = GetInputs(graph, layerIndex);
1757 CHECK_LOCATION();
1758 CHECK_VALID_SIZE(inputs.size(), 1);
1759
1760 auto outputs = GetOutputs(graph, layerIndex);
1761 CHECK_VALID_SIZE(outputs.size(), 1);
1762
1763 auto layerName = GetLayerName(graph, layerIndex);
1764 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1765
1766 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1767 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1768
1769 RegisterInputSlots(graph, layerIndex, layer);
1770 RegisterOutputSlots(graph, layerIndex, layer);
1771}
1772
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001773void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1774{
1775 CHECK_LAYERS(graph, 0, layerIndex);
1776
1777 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1778 CHECK_VALID_SIZE(inputs.size(), 1);
1779
1780 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1781 CHECK_VALID_SIZE(outputs.size(), 1);
1782
1783 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
1784
1785 auto flatBufferBegin = flatBufferDescriptor->begin();
1786 auto flatBufferEnd = flatBufferDescriptor->end();
1787 auto flatBufferStride = flatBufferDescriptor->stride();
1788
1789 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
1790 flatBufferBegin->Length() == flatBufferStride->Length()))
1791 {
1792 throw ParseException(boost::str(
1793 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
1794 }
1795
1796 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
1797 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
1798 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
1799
1800 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
1801 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
1802 descriptor.m_EndMask = flatBufferDescriptor->endMask();
1803 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
1804 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
1805 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
1806 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1807
1808 auto layerName = GetLayerName(graph, layerIndex);
1809 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
1810
1811 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1812 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1813
1814 RegisterInputSlots(graph, layerIndex, layer);
1815 RegisterOutputSlots(graph, layerIndex, layer);
1816}
1817
Conor Kennedyda1f9752019-03-01 14:37:12 +00001818void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1819{
1820 CHECK_LAYERS(graph, 0, layerIndex);
1821 auto inputs = GetInputs(graph, layerIndex);
1822 CHECK_LOCATION();
1823 CHECK_VALID_SIZE(inputs.size(), 2);
1824
1825 auto outputs = GetOutputs(graph, layerIndex);
1826 CHECK_VALID_SIZE(outputs.size(), 1);
1827
1828 auto layerName = GetLayerName(graph, layerIndex);
1829 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1830
1831 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1832 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1833
1834 RegisterInputSlots(graph, layerIndex, layer);
1835 RegisterOutputSlots(graph, layerIndex, layer);
1836}
1837
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001838void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
1839{
1840 CHECK_LAYERS(graph, 0, layerIndex);
1841
1842 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1843 CHECK_VALID_SIZE(inputs.size(), 2);
1844
1845 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1846 CHECK_VALID_SIZE(outputs.size(), 1);
1847
1848 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001849 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
1850
1851 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001852 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1853
1854 RegisterInputSlots(graph, layerIndex, layer);
1855 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001856}
1857
Sadik Armaganac97c8c2019-03-04 17:44:21 +00001858void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
1859{
1860 CHECK_LAYERS(graph, 0, layerIndex);
1861
1862 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1863 CHECK_VALID_SIZE(inputs.size(), 1);
1864
1865 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1866 CHECK_VALID_SIZE(outputs.size(), 1);
1867
1868 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
1869 auto flatBufferAxis = flatBufferDescriptor->axis();
1870 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
1871
1872 armnn::MeanDescriptor descriptor;
1873 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
1874 descriptor.m_KeepDims = flatBufferKeepDims;
1875
1876 auto layerName = GetLayerName(graph, layerIndex);
1877 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
1878
1879 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1880 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1881
1882 RegisterInputSlots(graph, layerIndex, layer);
1883 RegisterOutputSlots(graph, layerIndex, layer);
1884}
1885
Jim Flynn18ce3382019-03-08 11:08:30 +00001886void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
1887{
1888 CHECK_LAYERS(graph, 0, layerIndex);
1889
1890 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1891 CHECK_VALID_SIZE(inputs.size(), 1);
1892
1893 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1894
1895 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
1896 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
1897 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
1898 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
1899 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
1900 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
1901
1902 // Check numViews and numDimensions corresponds to the ones already serialized ...
1903 // numViews == flatBufferViewSizes.size();
1904 // foreach: numDimensions == flatBufferViewSizes[x].size();
1905
1906 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
1907 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
1908 {
1909 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
1910 {
1911 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
1912 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
1913 }
1914 }
1915
1916 auto layerName = GetLayerName(graph, layerIndex);
1917 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
1918
1919 // I could have as many outputs as views ...
1920 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
1921 {
1922 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
1923 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
1924 }
1925
1926 RegisterInputSlots(graph, layerIndex, layer);
1927 RegisterOutputSlots(graph, layerIndex, layer);
1928}
1929
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001930} // namespace armnnDeserializer