blob: 90ca6d3f9ac931ab6a1ed7f34cdc9d7ad2f0f1f6 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000191 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Jim Flynne242f2d2019-05-22 14:24:13 +0100192 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000193 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
195 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000196 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000197 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000198 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000199 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000200 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000201 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000202 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000203 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000204 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Jim Flynn11af3752019-03-19 17:22:29 +0000205 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000206 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000207 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
208 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100209 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100210 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000211 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000212 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000213 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000214 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000215 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100216 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000217 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000218 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000219 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000220 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000221 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000222 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100223 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000224 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000225 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000226 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100227 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Kevin May43a799c2019-02-08 16:31:42 +0000228}
229
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000230Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000231{
232 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
233
234 switch(layerType)
235 {
Mike Kellyaf484012019-02-20 16:53:11 +0000236 case Layer::Layer_ActivationLayer:
237 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000238 case Layer::Layer_AdditionLayer:
239 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000240 case Layer::Layer_BatchToSpaceNdLayer:
241 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000242 case Layer::Layer_BatchNormalizationLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100244 case Layer::Layer_ConcatLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000246 case Layer::Layer_ConstantLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000248 case Layer::Layer_Convolution2dLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000250 case Layer::Layer_DepthwiseConvolution2dLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000252 case Layer::Layer_DequantizeLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000254 case Layer::Layer_DetectionPostProcessLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000256 case Layer::Layer_DivisionLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000258 case Layer::Layer_EqualLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000260 case Layer::Layer_FullyConnectedLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000262 case Layer::Layer_FloorLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000264 case Layer::Layer_GatherLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000266 case Layer::Layer_GreaterLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000268 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000269 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000270 case Layer::Layer_L2NormalizationLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000272 case Layer::Layer_LstmLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000274 case Layer::Layer_MeanLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000276 case Layer::Layer_MinimumLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000278 case Layer::Layer_MaximumLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100280 case Layer::Layer_MergeLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000282 case Layer::Layer_MergerLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000284 case Layer::Layer_MultiplicationLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000286 case Layer::Layer_NormalizationLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000288 case Layer::Layer_OutputLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000290 case Layer::Layer_PadLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000292 case Layer::Layer_PermuteLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000294 case Layer::Layer_Pooling2dLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100296 case Layer::Layer_PreluLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000298 case Layer::Layer_QuantizeLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000300 case Layer::Layer_ReshapeLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000302 case Layer::Layer_ResizeBilinearLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000304 case Layer::Layer_RsqrtLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000306 case Layer::Layer_SoftmaxLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000308 case Layer::Layer_SpaceToBatchNdLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100310 case Layer::Layer_SpaceToDepthLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000312 case Layer::Layer_SplitterLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000314 case Layer::Layer_StridedSliceLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000316 case Layer::Layer_SubtractionLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100318 case Layer::Layer_SwitchLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000320 case Layer::Layer_NONE:
321 default:
322 throw ParseException(boost::str(
323 boost::format("Layer must have a type %1%") %
324 Layer::Layer_NONE));
325 }
326}
327
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000328std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
329{
330 auto layer = GetBaseLayer(graph, index);
331 assert(layer);
332 return layer->layerName()->str();
333}
334
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000335int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000336{
337 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
338
339 if (layerType == Layer::Layer_InputLayer)
340 {
341 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
342 }
343 else if ( layerType == Layer::Layer_OutputLayer )
344 {
345 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
346 }
347 return 0;
348}
349
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000350armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000351{
352 switch (dataLayout)
353 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000354 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000355 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000356 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000357 default:
358 return armnn::DataLayout::NCHW;
359 }
360}
361
Mike Kellyaf484012019-02-20 16:53:11 +0000362armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
363{
364 switch (function)
365 {
366 case armnnSerializer::ActivationFunction_Sigmoid:
367 return armnn::ActivationFunction::Sigmoid;
368 case armnnSerializer::ActivationFunction_TanH:
369 return armnn::ActivationFunction::TanH;
370 case armnnSerializer::ActivationFunction_Linear:
371 return armnn::ActivationFunction::Linear;
372 case armnnSerializer::ActivationFunction_ReLu:
373 return armnn::ActivationFunction::ReLu;
374 case armnnSerializer::ActivationFunction_BoundedReLu:
375 return armnn::ActivationFunction::BoundedReLu;
376 case armnnSerializer::ActivationFunction_LeakyReLu:
377 return armnn::ActivationFunction::LeakyReLu;
378 case armnnSerializer::ActivationFunction_Abs:
379 return armnn::ActivationFunction::Abs;
380 case armnnSerializer::ActivationFunction_Sqrt:
381 return armnn::ActivationFunction::Sqrt;
382 case armnnSerializer::ActivationFunction_Square:
383 return armnn::ActivationFunction::Square;
384 default:
385 return armnn::ActivationFunction::Sigmoid;
386 }
387}
388
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000389armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000390{
391 armnn::DataType type;
392 CHECK_TENSOR_PTR(tensorPtr);
393
394 switch (tensorPtr->dataType())
395 {
396 case DataType_QuantisedAsymm8:
397 type = armnn::DataType::QuantisedAsymm8;
398 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000399 case DataType_QuantisedSymm16:
400 type = armnn::DataType::QuantisedSymm16;
401 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000402 case DataType_Signed32:
403 type = armnn::DataType::Signed32;
404 break;
Kevin May43a799c2019-02-08 16:31:42 +0000405 case DataType_Float32:
406 type = armnn::DataType::Float32;
407 break;
408 case DataType_Float16:
409 type = armnn::DataType::Float16;
410 break;
411 case DataType_Boolean:
412 type = armnn::DataType::Boolean;
413 break;
414 default:
415 {
416 CheckLocation location = CHECK_LOCATION();
417 throw ParseException(
418 boost::str(
419 boost::format("Unsupported data type %1% = %2%. %3%") %
420 tensorPtr->dataType() %
421 EnumNameDataType(tensorPtr->dataType()) %
422 location.AsString()));
423 }
424 }
425 float quantizationScale = tensorPtr->quantizationScale();
426 int32_t quantizationOffset = tensorPtr->quantizationOffset();
427
428 auto dimensions = tensorPtr->dimensions();
429 unsigned int size = dimensions->size();
430 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
431
432 // two statements (on purpose) for easier debugging:
433 armnn::TensorInfo result(size,
434 outputDims.data(),
435 type,
436 quantizationScale,
437 quantizationOffset);
438 return result;
439}
440
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000441armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000442{
443 CHECK_CONST_TENSOR_PTR(constTensorPtr);
444 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
445
446 switch (constTensorPtr->data_type())
447 {
448 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000449 {
450 auto byteData = constTensorPtr->data_as_ByteData()->data();
451 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
452 return armnn::ConstTensor(tensorInfo, byteData->data());
453 }
Mike Kellya0766c32019-02-19 17:22:07 +0000454 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000455 {
456 auto shortData = constTensorPtr->data_as_ShortData()->data();
457 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
458 return armnn::ConstTensor(tensorInfo, shortData->data());
459 }
Mike Kellya0766c32019-02-19 17:22:07 +0000460 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000461 {
462 auto intData = constTensorPtr->data_as_IntData()->data();
463 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
464 return armnn::ConstTensor(tensorInfo, intData->data());
465 }
Mike Kellya0766c32019-02-19 17:22:07 +0000466 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000467 {
468 auto longData = constTensorPtr->data_as_LongData()->data();
469 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
470 return armnn::ConstTensor(tensorInfo, longData->data());
471 }
Mike Kellya0766c32019-02-19 17:22:07 +0000472 default:
473 {
474 CheckLocation location = CHECK_LOCATION();
475 throw ParseException(
476 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
477 constTensorPtr->data_type() %
478 EnumNameConstTensorData(constTensorPtr->data_type()) %
479 location.AsString()));
480 }
481 }
482}
483
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000484Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000485 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000486{
487 CHECK_LAYERS(graphPtr, 0, layerIndex);
488 auto layer = GetBaseLayer(graphPtr, layerIndex);
489 const auto& numInputs = layer->inputSlots()->size();
490
491 TensorRawPtrVector result(numInputs);
492
493 for (unsigned int i=0; i<numInputs; ++i)
494 {
495 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
496 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
497 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
498 }
499 return result;
500}
501
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000502Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000503 unsigned int layerIndex)
504{
505 CHECK_LAYERS(graphPtr, 0, layerIndex);
506 auto layer = GetBaseLayer(graphPtr, layerIndex);
507 const auto& numOutputs = layer->outputSlots()->size();
508
509 TensorRawPtrVector result(numOutputs);
510
511 for (unsigned int i=0; i<numOutputs; ++i)
512 {
513 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
514 }
515 return result;
516}
517
Derek Lamberti8ddae332019-02-21 16:29:43 +0000518void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000519{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000520 CHECK_LAYERS(graph, 0, layerIndex);
521 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000522 throw ParseException(
523 boost::str(
524 boost::format("Layer not supported. "
525 "layerIndex: %1% "
526 "layerName: %2% / %3%") %
527 layerIndex %
528 layerName %
529 CHECK_LOCATION().AsString()));
530}
531
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000532void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000533{
534 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000535 m_InputBindings.clear();
536 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000537}
538
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000539IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000540{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000541 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000542}
543
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000544IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000545{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000546 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000547}
548
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000549void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000550{
551 delete parser;
552}
553
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000554INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000555{
556 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000557 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
558 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000559}
560
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000561armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000562{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000563 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000564 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
565 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
566 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000567}
568
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000569Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000570{
571 if (binaryContent == nullptr)
572 {
573 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
574 CHECK_LOCATION().AsString()));
575 }
576 flatbuffers::Verifier verifier(binaryContent, len);
577 if (verifier.VerifyBuffer<SerializedGraph>() == false)
578 {
579 throw ParseException(
580 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
581 "flatbuffers format. size:%1% %2%") %
582 len %
583 CHECK_LOCATION().AsString()));
584 }
585 return GetSerializedGraph(binaryContent);
586}
587
Derek Lamberti8ddae332019-02-21 16:29:43 +0000588INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000589{
590 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000591 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000592 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000593 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000594 {
595 if (layer->layer_type() != Layer_InputLayer &&
596 layer->layer_type() != Layer_OutputLayer)
597 {
598 // lookup and call the parser function
599 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000600 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000601 }
602 ++layerIndex;
603 }
604
Derek Lamberti8ddae332019-02-21 16:29:43 +0000605 SetupInputLayers(graph);
606 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000607
608 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100609 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000610 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100611 Connections& connections = graphIt.second;
612 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000613 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100614 const unsigned int outputSlotIndex = outputIt.first;
615 IOutputSlot* outputSlot = outputIt.second;
616 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000617 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100618 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000619 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100620 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000621 }
Kevin May43a799c2019-02-08 16:31:42 +0000622 }
623 }
624 }
625
626 return std::move(m_Network);
627}
628
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000629BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000630 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000631{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000632 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000633 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000634 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000635 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000636 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000637 }
638 }
639 throw ParseException(
640 boost::str(
641 boost::format("No input binding found for layer:%1% / %2%") %
642 name %
643 CHECK_LOCATION().AsString()));
644}
645
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000646BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000647 const std::string& name) const
648{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000649 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000650 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000651 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000652 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000653 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000654 }
655 }
656 throw ParseException(
657 boost::str(
658 boost::format("No output binding found for layer:%1% / %2%") %
659 name %
660 CHECK_LOCATION().AsString()));
661}
662
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100663unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
664{
665 for (unsigned int i = 0; i < graph->layers()->size(); i++)
666 {
667 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
668 if (layer->index() == targetIndex)
669 {
670 return i;
671 }
672 }
673 throw ParseException("Layer with given index not found");
674}
675
Derek Lamberti8ddae332019-02-21 16:29:43 +0000676void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000677{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000678 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100679 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000680 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100681 m_InputBindings.reserve(numInputs);
682
683 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000684 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100685 const unsigned int inputId = graph->inputIds()->Get(i);
686 const unsigned int inputLayerIndex = GetLayerIndexInVector(graph, inputId);
687 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000688
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100689 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
690 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
691 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000692
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100693 IConnectableLayer* inputLayer =
694 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000695
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100696 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
697 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
698 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
699
Derek Lamberti8ddae332019-02-21 16:29:43 +0000700 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100701 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000702 }
703}
704
Derek Lamberti8ddae332019-02-21 16:29:43 +0000705void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000706{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000707 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100708 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000709 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100710 m_OutputBindings.reserve(numOutputs);
711
712 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000713 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100714 const unsigned int outputId = graph->outputIds()->Get(i);
715 const unsigned int outputLayerIndex = GetLayerIndexInVector(graph, outputId);
716 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000717
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100718 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
719 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
720 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000721
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100722 IConnectableLayer* outputLayer =
723 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000724
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100725 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
726
727 unsigned int sourceLayerIndex =
728 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
729 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
730 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
731
Derek Lamberti8ddae332019-02-21 16:29:43 +0000732 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100733 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000734 }
735}
736
Derek Lamberti8ddae332019-02-21 16:29:43 +0000737void Deserializer::RegisterOutputSlots(GraphPtr graph,
738 uint32_t layerIndex,
739 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000740{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000741 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000742 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100743 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
744 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000745 {
746 throw ParseException(
747 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
748 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100749 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000750 layer->GetNumOutputSlots() %
751 layerIndex %
752 CHECK_LOCATION().AsString()));
753 }
754
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100755 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000756 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100757 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
758 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
759 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
760 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000761 }
762}
763
Derek Lamberti8ddae332019-02-21 16:29:43 +0000764void Deserializer::RegisterInputSlots(GraphPtr graph,
765 uint32_t layerIndex,
766 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000767{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000768 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000769 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100770 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
771 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000772 {
773 throw ParseException(
774 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
775 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100776 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000777 layer->GetNumInputSlots() %
778 layerIndex %
779 CHECK_LOCATION().AsString()));
780 }
781
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100782 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000783 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100784 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
785 auto fbConnection = fbInputSlot->connection();
786 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
787 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000788 }
789}
790
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000791void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
792 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100793 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000794{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100795 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000796 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100797 m_GraphConnections[sourceLayerIndex] = Connections();
798 }
799
800 Connections& connections = m_GraphConnections[sourceLayerIndex];
801 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
802 {
803 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000804 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000805 else
806 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100807 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000808 }
809}
Kevin May43a799c2019-02-08 16:31:42 +0000810
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000811void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100812 uint32_t outputSlotIndex,
813 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000814{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100815 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
816 {
817 m_GraphConnections[sourceLayerIndex] = Connections();
818 }
819
820 Connections& connections = m_GraphConnections[sourceLayerIndex];
821 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
822 {
823 throw ParseException("Same output slot index processed twice");
824 }
825
826 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +0000827}
828
Derek Lamberti8ddae332019-02-21 16:29:43 +0000829void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000830{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000831 CHECK_LAYERS(graph, 0, layerIndex);
832 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000833 CHECK_LOCATION();
834 CHECK_VALID_SIZE(inputs.size(), 1);
835
Derek Lamberti8ddae332019-02-21 16:29:43 +0000836 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000837 CHECK_VALID_SIZE(outputs.size(), 1);
838
Derek Lamberti8ddae332019-02-21 16:29:43 +0000839 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000840 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000841 auto serializerDescriptor = serializerLayer->descriptor();
842
843 armnn::ActivationDescriptor descriptor;
844 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
845 descriptor.m_A = serializerDescriptor->a();
846 descriptor.m_B = serializerDescriptor->b();
847
848 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
849 layerName.c_str());
850 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
851 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
852
Derek Lamberti8ddae332019-02-21 16:29:43 +0000853 RegisterInputSlots(graph, layerIndex, layer);
854 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000855}
856
Derek Lamberti8ddae332019-02-21 16:29:43 +0000857void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000858{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000859 CHECK_LAYERS(graph, 0, layerIndex);
860 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000861 CHECK_LOCATION();
862 CHECK_VALID_SIZE(inputs.size(), 2);
863
Derek Lamberti8ddae332019-02-21 16:29:43 +0000864 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000865 CHECK_VALID_SIZE(outputs.size(), 1);
866
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000867 auto layerName = GetLayerName(graph, layerIndex);
868 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000869
870 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
871 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
872
Derek Lamberti8ddae332019-02-21 16:29:43 +0000873 RegisterInputSlots(graph, layerIndex, layer);
874 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000875}
876
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000877void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
878{
879 CHECK_LAYERS(graph, 0, layerIndex);
880
881 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
882 CHECK_VALID_SIZE(inputs.size(), 1);
883
884 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
885 CHECK_VALID_SIZE(outputs.size(), 1);
886
887 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
888 auto flatBufferCrops = flatBufferDescriptor->crops();
889 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
890
891 if (flatBufferCrops->Length() % 2 != 0)
892 {
893 throw ParseException(boost::str(
894 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
895 }
896
897 std::vector<std::pair<unsigned int, unsigned int>> crops;
898 crops.reserve(flatBufferCrops->Length() / 2);
899 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
900 {
901 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
902 }
903
904 armnn::BatchToSpaceNdDescriptor descriptor;
905 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
906 descriptor.m_BlockShape =
907 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
908 descriptor.m_Crops = crops;
909
910 auto layerName = GetLayerName(graph, layerIndex);
911 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
912
913 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
914 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
915
916 RegisterInputSlots(graph, layerIndex, layer);
917 RegisterOutputSlots(graph, layerIndex, layer);
918}
919
ruoyan018e7fa232019-02-28 15:09:07 +0000920void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
921{
922 CHECK_LAYERS(graph, 0, layerIndex);
923
924 auto inputs = GetInputs(graph, layerIndex);
925 CHECK_VALID_SIZE(inputs.size(), 1);
926
927 auto outputs = GetOutputs(graph, layerIndex);
928 CHECK_VALID_SIZE(outputs.size(), 1);
929 auto outputInfo = ToTensorInfo(outputs[0]);
930
ruoyan015c7ab052019-03-04 14:48:02 +0000931 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +0000932
933 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
934 auto serializerDescriptor = serializerLayer->descriptor();
935
936 armnn::BatchNormalizationDescriptor descriptor;
937 descriptor.m_Eps = serializerDescriptor->eps();
938 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
939
940 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
941 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
942 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
943 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
944
945 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
946 mean,
947 variance,
948 beta,
949 gamma,
950 layerName.c_str());
951 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
952
953 RegisterInputSlots(graph, layerIndex, layer);
954 RegisterOutputSlots(graph, layerIndex, layer);
955}
956
Conor Kennedy76277882019-02-26 08:29:54 +0000957void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
958{
959 CHECK_LAYERS(graph, 0, layerIndex);
960 CHECK_LOCATION();
961
962 auto outputs = GetOutputs(graph, layerIndex);
963 CHECK_VALID_SIZE(outputs.size(), 1);
964
965 auto layerName = GetLayerName(graph, layerIndex);
966
967 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
968 auto serializerInput = serializerLayer->input();
969
970 armnn::ConstTensor input = ToConstTensor(serializerInput);
971
972 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
973
974 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
975 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
976
977 RegisterOutputSlots(graph, layerIndex, layer);
978}
979
Derek Lamberti8ddae332019-02-21 16:29:43 +0000980void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000981{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000982 CHECK_LAYERS(graph, 0, layerIndex);
983 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000984 CHECK_LOCATION();
985 CHECK_VALID_SIZE(inputs.size(), 1);
986
Derek Lamberti8ddae332019-02-21 16:29:43 +0000987 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000988 CHECK_VALID_SIZE(outputs.size(), 1);
989
Derek Lamberti8ddae332019-02-21 16:29:43 +0000990 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000991 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000992 auto serializerDescriptor = serializerLayer->descriptor();
993
994 armnn::Convolution2dDescriptor descriptor;
995 descriptor.m_PadLeft = serializerDescriptor->padLeft();
996 descriptor.m_PadRight = serializerDescriptor->padRight();
997 descriptor.m_PadTop = serializerDescriptor->padTop();
998 descriptor.m_PadBottom = serializerDescriptor->padBottom();
999 descriptor.m_StrideX = serializerDescriptor->strideX();
1000 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001001 descriptor.m_DilationX = serializerDescriptor->dilationX();
1002 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001003 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1004 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1005
1006 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1007 armnn::ConstTensor biases;
1008
Matteo Martincighfc598e12019-05-14 10:36:13 +01001009 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001010 if (descriptor.m_BiasEnabled)
1011 {
1012 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001013 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001014 }
1015 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1016 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001017 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001018 layerName.c_str());
1019 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1020 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1021
Derek Lamberti8ddae332019-02-21 16:29:43 +00001022 RegisterInputSlots(graph, layerIndex, layer);
1023 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001024}
1025
Derek Lamberti8ddae332019-02-21 16:29:43 +00001026void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001027{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001028 CHECK_LAYERS(graph, 0, layerIndex);
1029 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001030 CHECK_LOCATION();
1031 CHECK_VALID_SIZE(inputs.size(), 1);
1032
Derek Lamberti8ddae332019-02-21 16:29:43 +00001033 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001034 CHECK_VALID_SIZE(outputs.size(), 1);
1035
Derek Lamberti8ddae332019-02-21 16:29:43 +00001036 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001037 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001038 auto serializerDescriptor = serializerLayer->descriptor();
1039
1040 armnn::DepthwiseConvolution2dDescriptor descriptor;
1041 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1042 descriptor.m_PadRight = serializerDescriptor->padRight();
1043 descriptor.m_PadTop = serializerDescriptor->padTop();
1044 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1045 descriptor.m_StrideX = serializerDescriptor->strideX();
1046 descriptor.m_StrideY = serializerDescriptor->strideY();;
1047 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1048 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1049
1050 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1051 armnn::ConstTensor biases;
1052
Matteo Martincighfc598e12019-05-14 10:36:13 +01001053 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001054 if (descriptor.m_BiasEnabled)
1055 {
1056 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001057 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001058 }
1059 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1060 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001061 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001062 layerName.c_str());
1063
1064 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1065 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1066
Derek Lamberti8ddae332019-02-21 16:29:43 +00001067 RegisterInputSlots(graph, layerIndex, layer);
1068 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001069}
1070
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001071void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1072{
1073 CHECK_LAYERS(graph, 0, layerIndex);
1074 auto inputs = GetInputs(graph, layerIndex);
1075 CHECK_LOCATION();
1076 CHECK_VALID_SIZE(inputs.size(), 2);
1077
1078 auto outputs = GetOutputs(graph, layerIndex);
1079 CHECK_VALID_SIZE(outputs.size(), 4);
1080
1081 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1082 auto layerName = GetLayerName(graph, layerIndex);
1083 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1084
1085 armnn::DetectionPostProcessDescriptor descriptor;
1086 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1087 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1088 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1089 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1090 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1091 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1092 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1093 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1094 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1095 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1096 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1097
1098 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1099
1100 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1101 anchors,
1102 layerName.c_str());
1103
1104 for (unsigned int i = 0; i < 4; i++)
1105 {
1106 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1107 }
1108
1109 RegisterInputSlots(graph, layerIndex, layer);
1110 RegisterOutputSlots(graph, layerIndex, layer);
1111}
1112
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001113void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1114{
1115 CHECK_LAYERS(graph, 0, layerIndex);
1116 auto inputs = GetInputs(graph, layerIndex);
1117 CHECK_LOCATION();
1118 CHECK_VALID_SIZE(inputs.size(), 2);
1119
1120 auto outputs = GetOutputs(graph, layerIndex);
1121 CHECK_VALID_SIZE(outputs.size(), 1);
1122
1123 auto layerName = GetLayerName(graph, layerIndex);
1124 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1125
1126 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1127 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1128
1129 RegisterInputSlots(graph, layerIndex, layer);
1130 RegisterOutputSlots(graph, layerIndex, layer);
1131}
1132
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001133void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1134{
1135 CHECK_LAYERS(graph, 0, layerIndex);
1136 auto inputs = GetInputs(graph, layerIndex);
1137 CHECK_LOCATION();
1138 CHECK_VALID_SIZE(inputs.size(), 2);
1139
1140 auto outputs = GetOutputs(graph, layerIndex);
1141 CHECK_VALID_SIZE(outputs.size(), 1);
1142
1143 auto layerName = GetLayerName(graph, layerIndex);
1144 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1145
1146 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1147 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1148
1149 RegisterInputSlots(graph, layerIndex, layer);
1150 RegisterOutputSlots(graph, layerIndex, layer);
1151}
1152
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001153void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1154{
1155 CHECK_LAYERS(graph, 0, layerIndex);
1156 auto inputs = GetInputs(graph, layerIndex);
1157 CHECK_LOCATION();
1158 CHECK_VALID_SIZE(inputs.size(), 2);
1159
1160 auto outputs = GetOutputs(graph, layerIndex);
1161 CHECK_VALID_SIZE(outputs.size(), 1);
1162
1163 auto layerName = GetLayerName(graph, layerIndex);
1164 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1165
1166 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1167 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1168
1169 RegisterInputSlots(graph, layerIndex, layer);
1170 RegisterOutputSlots(graph, layerIndex, layer);
1171}
1172
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001173void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1174{
1175 CHECK_LAYERS(graph, 0, layerIndex);
1176
1177 auto inputs = GetInputs(graph, layerIndex);
1178 CHECK_VALID_SIZE(inputs.size(), 1);
1179
1180 auto outputs = GetOutputs(graph, layerIndex);
1181 CHECK_VALID_SIZE(outputs.size(), 1);
1182 auto outputInfo = ToTensorInfo(outputs[0]);
1183
1184 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1185 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1186
1187 auto layerName = GetLayerName(graph, layerIndex);
1188 armnn::L2NormalizationDescriptor descriptor;
1189 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001190 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001191
1192 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1193 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1194
1195 RegisterInputSlots(graph, layerIndex, layer);
1196 RegisterOutputSlots(graph, layerIndex, layer);
1197}
1198
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001199void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1200{
1201 CHECK_LAYERS(graph, 0, layerIndex);
1202 auto inputs = GetInputs(graph, layerIndex);
1203 CHECK_LOCATION();
1204 CHECK_VALID_SIZE(inputs.size(), 2);
1205
1206 auto outputs = GetOutputs(graph, layerIndex);
1207 CHECK_VALID_SIZE(outputs.size(), 1);
1208
1209 auto layerName = GetLayerName(graph, layerIndex);
1210 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1211
1212 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1213 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1214
1215 RegisterInputSlots(graph, layerIndex, layer);
1216 RegisterOutputSlots(graph, layerIndex, layer);
1217}
1218
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001219void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1220{
1221 CHECK_LAYERS(graph, 0, layerIndex);
1222 auto inputs = GetInputs(graph, layerIndex);
1223 CHECK_LOCATION();
1224 CHECK_VALID_SIZE(inputs.size(), 2);
1225
1226 auto outputs = GetOutputs(graph, layerIndex);
1227 CHECK_VALID_SIZE(outputs.size(), 1);
1228
1229 auto layerName = GetLayerName(graph, layerIndex);
1230 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1231
1232 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1233 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1234
1235 RegisterInputSlots(graph, layerIndex, layer);
1236 RegisterOutputSlots(graph, layerIndex, layer);
1237}
1238
Jim Flynne242f2d2019-05-22 14:24:13 +01001239const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1240 unsigned int layerIndex)
1241{
1242 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1243
1244 switch (layerType)
1245 {
1246 case Layer::Layer_ConcatLayer:
1247 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1248 case Layer::Layer_MergerLayer:
1249 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1250 default:
1251 throw armnn::Exception("unknown layer type, should be concat or merger");
1252 }
1253}
1254
Jim Flynn906f9462019-05-10 13:55:21 +01001255void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001256{
1257 CHECK_LAYERS(graph, 0, layerIndex);
1258 CHECK_LOCATION();
1259
1260 auto outputs = GetOutputs(graph, layerIndex);
1261 CHECK_VALID_SIZE(outputs.size(), 1);
1262
Jim Flynnac25a1b2019-02-28 10:40:49 +00001263 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001264 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1265 unsigned int numViews = originsDescriptor->numViews();
1266 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001267
1268 // can now check the number of inputs == number of views
1269 auto inputs = GetInputs(graph, layerIndex);
1270 CHECK_VALID_SIZE(inputs.size(), numViews);
1271
1272 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001273 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001274 for (unsigned int v = 0; v < numViews; ++v)
1275 {
1276 auto originPtr = originsPtr->Get(v);
1277 for (unsigned int d = 0; d < numDimensions; ++d)
1278 {
1279 uint32_t value = originPtr->data()->Get(d);
1280 descriptor.SetViewOriginCoord(v, d, value);
1281 }
1282 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001283 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001284
Jim Flynn906f9462019-05-10 13:55:21 +01001285 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001286 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1287 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1288
1289 RegisterInputSlots(graph, layerIndex, layer);
1290 RegisterOutputSlots(graph, layerIndex, layer);
1291}
1292
Derek Lamberti8ddae332019-02-21 16:29:43 +00001293void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001294{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001295 CHECK_LAYERS(graph, 0, layerIndex);
1296 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001297 CHECK_LOCATION();
1298 CHECK_VALID_SIZE(inputs.size(), 2);
1299
Derek Lamberti8ddae332019-02-21 16:29:43 +00001300 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001301 CHECK_VALID_SIZE(outputs.size(), 1);
1302
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001303 auto layerName = GetLayerName(graph, layerIndex);
1304 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001305
1306 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1307 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1308
Derek Lamberti8ddae332019-02-21 16:29:43 +00001309 RegisterInputSlots(graph, layerIndex, layer);
1310 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001311}
1312
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001313void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1314{
1315 CHECK_LAYERS(graph, 0, layerIndex);
1316 CHECK_LOCATION();
1317
1318 auto inputs = GetInputs(graph, layerIndex);
1319 CHECK_VALID_SIZE(inputs.size(), 1);
1320
1321 auto outputs = GetOutputs(graph, layerIndex);
1322 CHECK_VALID_SIZE(outputs.size(), 1);
1323
1324 auto layerName = GetLayerName(graph, layerIndex);
1325
1326 armnn::IConnectableLayer* layer;
1327
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001328 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001329
1330 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1331 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1332
1333 RegisterInputSlots(graph, layerIndex, layer);
1334 RegisterOutputSlots(graph, layerIndex, layer);
1335}
1336
Derek Lamberti8ddae332019-02-21 16:29:43 +00001337void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001338{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001339 CHECK_LAYERS(graph, 0, layerIndex);
1340 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001341 CHECK_LOCATION();
1342 CHECK_VALID_SIZE(inputs.size(), 1);
1343
Derek Lamberti8ddae332019-02-21 16:29:43 +00001344 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001345 CHECK_VALID_SIZE(outputs.size(), 1);
1346
Derek Lamberti8ddae332019-02-21 16:29:43 +00001347 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001348 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001349 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1350
1351 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1352 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1353 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1354
1355 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1356
1357 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001358 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001359 if (flatBufferDescriptor->biasEnabled())
1360 {
1361 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001362 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001363 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001364 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1365 weightsTensor,
1366 optionalBiases,
1367 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001368
1369 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1370 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1371
Derek Lamberti8ddae332019-02-21 16:29:43 +00001372 RegisterInputSlots(graph, layerIndex, layer);
1373 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001374}
1375
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001376void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1377{
1378 CHECK_LAYERS(graph, 0, layerIndex);
1379
1380 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1381 CHECK_VALID_SIZE(inputs.size(), 1);
1382
1383 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1384 CHECK_VALID_SIZE(outputs.size(), 1);
1385
1386 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1387 auto flatBufferPadList = flatBufferDescriptor->padList();
1388
1389 if (flatBufferPadList->Length() % 2 != 0)
1390 {
1391 throw ParseException(boost::str(
1392 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1393 }
1394
1395 std::vector<std::pair<unsigned int, unsigned int>> padList;
1396 padList.reserve(flatBufferPadList->Length() / 2);
1397 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1398 {
1399 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1400 }
1401
1402 armnn::PadDescriptor descriptor(padList);
1403
1404 auto layerName = GetLayerName(graph, layerIndex);
1405 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1406
1407 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1408 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1409
1410 RegisterInputSlots(graph, layerIndex, layer);
1411 RegisterOutputSlots(graph, layerIndex, layer);
1412}
1413
Derek Lamberti8ddae332019-02-21 16:29:43 +00001414void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001415{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001416 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001417
1418 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001419 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001420
Derek Lamberti8ddae332019-02-21 16:29:43 +00001421 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001422 CHECK_VALID_SIZE(inputs.size(), 1);
1423
Derek Lamberti8ddae332019-02-21 16:29:43 +00001424 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001425 CHECK_VALID_SIZE(outputs.size(), 1);
1426 auto outputInfo = ToTensorInfo(outputs[0]);
1427
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001428 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001429 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1430
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001431 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001432 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1433
Derek Lamberti8ddae332019-02-21 16:29:43 +00001434 RegisterInputSlots(graph, layerIndex, layer);
1435 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001436}
1437
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001438armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001439 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001440{
1441 armnn::Pooling2dDescriptor desc;
1442
1443 switch (pooling2dDesc->poolType())
1444 {
1445 case PoolingAlgorithm_Average:
1446 {
1447 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001448 break;
1449 }
1450 case PoolingAlgorithm_Max:
1451 {
1452 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001453 break;
1454 }
1455 default:
1456 {
1457 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1458 }
1459 }
1460
1461 switch (pooling2dDesc->outputShapeRounding())
1462 {
1463 case OutputShapeRounding_Floor:
1464 {
1465 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1466 break;
1467 }
1468 case OutputShapeRounding_Ceiling:
1469 {
1470 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1471 break;
1472 }
1473 default:
1474 {
1475 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1476 }
1477 }
1478
1479 switch (pooling2dDesc->paddingMethod())
1480 {
1481 case PaddingMethod_Exclude:
1482 {
1483 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1484 break;
1485 }
1486 case PaddingMethod_IgnoreValue:
1487 {
1488 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1489 break;
1490 }
1491 default:
1492 {
1493 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1494 }
1495 }
1496
1497 switch (pooling2dDesc->dataLayout())
1498 {
1499 case DataLayout_NCHW:
1500 {
1501 desc.m_DataLayout = armnn::DataLayout::NCHW;
1502 break;
1503 }
1504 case DataLayout_NHWC:
1505 {
1506 desc.m_DataLayout = armnn::DataLayout::NHWC;
1507 break;
1508 }
1509 default:
1510 {
1511 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1512 }
1513 }
1514
1515 desc.m_PadRight = pooling2dDesc->padRight();
1516 desc.m_PadLeft = pooling2dDesc->padLeft();
1517 desc.m_PadBottom = pooling2dDesc->padBottom();
1518 desc.m_PadTop = pooling2dDesc->padTop();
1519 desc.m_StrideX = pooling2dDesc->strideX();
1520 desc.m_StrideY = pooling2dDesc->strideY();
1521 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1522 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1523
1524 return desc;
1525}
1526
Derek Lamberti8ddae332019-02-21 16:29:43 +00001527void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001528{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001529 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001530
Derek Lamberti8ddae332019-02-21 16:29:43 +00001531 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001532 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001533 CHECK_VALID_SIZE(inputs.size(), 1);
1534
Derek Lamberti8ddae332019-02-21 16:29:43 +00001535 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001536 CHECK_VALID_SIZE(outputs.size(), 1);
1537 auto outputInfo = ToTensorInfo(outputs[0]);
1538
1539 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001540 auto layerName = GetLayerName(graph, layerIndex);
1541 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001542 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1543
Derek Lamberti8ddae332019-02-21 16:29:43 +00001544 RegisterInputSlots(graph, layerIndex, layer);
1545 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001546}
1547
Derek Lamberti87acb272019-03-27 16:51:31 +00001548void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1549{
1550 CHECK_LAYERS(graph, 0, layerIndex);
1551
1552 auto inputs = GetInputs(graph, layerIndex);
1553 CHECK_VALID_SIZE(inputs.size(), 1);
1554
1555 auto outputs = GetOutputs(graph, layerIndex);
1556 CHECK_VALID_SIZE(outputs.size(), 1);
1557 auto outputInfo = ToTensorInfo(outputs[0]);
1558
1559 auto layerName = GetLayerName(graph, layerIndex);
1560 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1561 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1562
1563 RegisterInputSlots(graph, layerIndex, layer);
1564 RegisterOutputSlots(graph, layerIndex, layer);
1565}
1566
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001567armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001568 const std::vector<uint32_t>& targetDimsIn)
1569{
1570 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1571 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1572
1573 if (stretchDim != targetDimsIn.end())
1574 {
1575 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1576 {
1577 throw ParseException(boost::str(
1578 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1579 }
1580
1581 auto targetNumElements =
1582 boost::numeric_cast<unsigned int>(
1583 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1584
1585 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1586 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1587 }
1588
1589 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1590
1591 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1592 reshapeInfo.SetShape(outputShape);
1593
1594 return reshapeInfo;
1595}
1596
Derek Lamberti8ddae332019-02-21 16:29:43 +00001597void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001598{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001599 CHECK_LAYERS(graph, 0, layerIndex);
1600 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001601
Derek Lamberti8ddae332019-02-21 16:29:43 +00001602 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001603 CHECK_VALID_SIZE(outputs.size(), 1);
1604
1605 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1606 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1607
Derek Lamberti8ddae332019-02-21 16:29:43 +00001608 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001609 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1610
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001611 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001612 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1613
1614 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1615 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1616
1617 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1618 {
1619 std::stringstream ss;
1620 ss << "New shape defined in reshape parameters "
1621 << reshapeOutputTensorShape
1622 << " does not equal output shape "
1623 << actualOutputTensorInfo.GetShape()
1624 << ": "
1625 << CHECK_LOCATION().AsString();
1626 throw ParseException(ss.str());
1627 }
1628
1629 armnn::ReshapeDescriptor reshapeDesc;
1630 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1631
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001632 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001633 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1634 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1635
Derek Lamberti8ddae332019-02-21 16:29:43 +00001636 RegisterInputSlots(graph, layerIndex, layer);
1637 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001638}
1639
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001640void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1641{
1642 CHECK_LAYERS(graph, 0, layerIndex);
1643
1644 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1645 CHECK_VALID_SIZE(inputs.size(), 1);
1646
1647 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1648 CHECK_VALID_SIZE(outputs.size(), 1);
1649
1650 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1651
1652 armnn::ResizeBilinearDescriptor descriptor;
1653 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1654 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1655 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1656
1657 auto layerName = GetLayerName(graph, layerIndex);
1658 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1659
1660 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1661 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1662
1663 RegisterInputSlots(graph, layerIndex, layer);
1664 RegisterOutputSlots(graph, layerIndex, layer);
1665}
1666
Derek Lamberti8ddae332019-02-21 16:29:43 +00001667void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001668{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001669 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001670
Derek Lamberti8ddae332019-02-21 16:29:43 +00001671 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001672 CHECK_VALID_SIZE(inputs.size(), 1);
1673
Derek Lamberti8ddae332019-02-21 16:29:43 +00001674 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001675 CHECK_VALID_SIZE(outputs.size(), 1);
1676
1677 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001678 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001679 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001680
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001681 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1682
1683 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1684 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1685
Derek Lamberti8ddae332019-02-21 16:29:43 +00001686 RegisterInputSlots(graph, layerIndex, layer);
1687 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001688}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001689
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001690void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1691{
1692 CHECK_LAYERS(graph, 0, layerIndex);
1693
1694 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1695 CHECK_VALID_SIZE(inputs.size(), 1);
1696
1697 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1698 CHECK_VALID_SIZE(outputs.size(), 1);
1699
1700 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1701 auto flatBufferPadList = flatBufferDescriptor->padList();
1702 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1703
1704 if (flatBufferPadList->Length() % 2 != 0)
1705 {
1706 throw ParseException(boost::str(
1707 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1708 }
1709
1710 std::vector<std::pair<unsigned int, unsigned int>> padList;
1711 padList.reserve(flatBufferPadList->Length() / 2);
1712 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1713 {
1714 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1715 }
1716
1717 armnn::SpaceToBatchNdDescriptor descriptor;
1718 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1719 descriptor.m_BlockShape =
1720 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1721 descriptor.m_PadList = padList;
1722
1723 auto layerName = GetLayerName(graph, layerIndex);
1724 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1725
1726 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1727 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1728
1729 RegisterInputSlots(graph, layerIndex, layer);
1730 RegisterOutputSlots(graph, layerIndex, layer);
1731}
1732
Aron Virginas-Taraa067142019-06-11 16:01:44 +01001733void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
1734{
1735 CHECK_LAYERS(graph, 0, layerIndex);
1736
1737 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1738 CHECK_VALID_SIZE(inputs.size(), 1);
1739
1740 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1741 CHECK_VALID_SIZE(outputs.size(), 1);
1742
1743 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
1744
1745 armnn::SpaceToDepthDescriptor descriptor;
1746 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
1747 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1748
1749 auto layerName = GetLayerName(graph, layerIndex);
1750 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
1751
1752 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1753 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1754
1755 RegisterInputSlots(graph, layerIndex, layer);
1756 RegisterOutputSlots(graph, layerIndex, layer);
1757}
1758
Nina Drozd57728782019-02-27 10:53:27 +00001759armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1760 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1761 unsigned int layerIndex)
1762{
1763 armnn::NormalizationDescriptor desc;
1764
1765 switch (normalizationDescriptor->normChannelType())
1766 {
1767 case NormalizationAlgorithmChannel_Across:
1768 {
1769 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1770 break;
1771 }
1772 case NormalizationAlgorithmChannel_Within:
1773 {
1774 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1775 break;
1776 }
1777 default:
1778 {
1779 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1780 }
1781 }
1782
1783 switch (normalizationDescriptor->normMethodType())
1784 {
1785 case NormalizationAlgorithmMethod_LocalBrightness:
1786 {
1787 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1788 break;
1789 }
1790 case NormalizationAlgorithmMethod_LocalContrast:
1791 {
1792 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1793 break;
1794 }
1795 default:
1796 {
1797 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1798 }
1799 }
1800
1801 switch (normalizationDescriptor->dataLayout())
1802 {
1803 case DataLayout_NCHW:
1804 {
1805 desc.m_DataLayout = armnn::DataLayout::NCHW;
1806 break;
1807 }
1808 case DataLayout_NHWC:
1809 {
1810 desc.m_DataLayout = armnn::DataLayout::NHWC;
1811 break;
1812 }
1813 default:
1814 {
1815 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1816 }
1817 }
1818
1819 desc.m_Alpha = normalizationDescriptor->alpha();
1820 desc.m_Beta = normalizationDescriptor->beta();
1821 desc.m_K = normalizationDescriptor->k();
1822 desc.m_NormSize = normalizationDescriptor->normSize();
1823
1824 return desc;
1825}
1826
1827void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1828{
1829 CHECK_LAYERS(graph, 0, layerIndex);
1830
1831 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1832
1833 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1834 CHECK_VALID_SIZE(inputs.size(), 1);
1835
1836 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1837 CHECK_VALID_SIZE(outputs.size(), 1);
1838
1839 auto outputInfo = ToTensorInfo(outputs[0]);
1840
1841 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1842 auto layerName = GetLayerName(graph, layerIndex);
1843
1844 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1845 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1846
1847 RegisterInputSlots(graph, layerIndex, layer);
1848 RegisterOutputSlots(graph, layerIndex, layer);
1849}
1850
Sadik Armagan8b42a382019-03-01 14:24:49 +00001851void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1852{
1853 CHECK_LAYERS(graph, 0, layerIndex);
1854 auto inputs = GetInputs(graph, layerIndex);
1855 CHECK_LOCATION();
1856 CHECK_VALID_SIZE(inputs.size(), 1);
1857
1858 auto outputs = GetOutputs(graph, layerIndex);
1859 CHECK_VALID_SIZE(outputs.size(), 1);
1860
1861 auto layerName = GetLayerName(graph, layerIndex);
1862 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1863
1864 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1865 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1866
1867 RegisterInputSlots(graph, layerIndex, layer);
1868 RegisterOutputSlots(graph, layerIndex, layer);
1869}
1870
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001871void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1872{
1873 CHECK_LAYERS(graph, 0, layerIndex);
1874
1875 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1876 CHECK_VALID_SIZE(inputs.size(), 1);
1877
1878 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1879 CHECK_VALID_SIZE(outputs.size(), 1);
1880
1881 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
1882
1883 auto flatBufferBegin = flatBufferDescriptor->begin();
1884 auto flatBufferEnd = flatBufferDescriptor->end();
1885 auto flatBufferStride = flatBufferDescriptor->stride();
1886
1887 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
1888 flatBufferBegin->Length() == flatBufferStride->Length()))
1889 {
1890 throw ParseException(boost::str(
1891 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
1892 }
1893
1894 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
1895 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
1896 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
1897
1898 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
1899 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
1900 descriptor.m_EndMask = flatBufferDescriptor->endMask();
1901 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
1902 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
1903 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
1904 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1905
1906 auto layerName = GetLayerName(graph, layerIndex);
1907 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
1908
1909 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1910 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1911
1912 RegisterInputSlots(graph, layerIndex, layer);
1913 RegisterOutputSlots(graph, layerIndex, layer);
1914}
1915
Conor Kennedyda1f9752019-03-01 14:37:12 +00001916void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1917{
1918 CHECK_LAYERS(graph, 0, layerIndex);
1919 auto inputs = GetInputs(graph, layerIndex);
1920 CHECK_LOCATION();
1921 CHECK_VALID_SIZE(inputs.size(), 2);
1922
1923 auto outputs = GetOutputs(graph, layerIndex);
1924 CHECK_VALID_SIZE(outputs.size(), 1);
1925
1926 auto layerName = GetLayerName(graph, layerIndex);
1927 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1928
1929 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1930 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1931
1932 RegisterInputSlots(graph, layerIndex, layer);
1933 RegisterOutputSlots(graph, layerIndex, layer);
1934}
1935
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001936void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
1937{
1938 CHECK_LAYERS(graph, 0, layerIndex);
1939
1940 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1941 CHECK_VALID_SIZE(inputs.size(), 2);
1942
1943 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1944 CHECK_VALID_SIZE(outputs.size(), 1);
1945
1946 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001947 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
1948
1949 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001950 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1951
1952 RegisterInputSlots(graph, layerIndex, layer);
1953 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001954}
1955
Sadik Armaganac97c8c2019-03-04 17:44:21 +00001956void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
1957{
1958 CHECK_LAYERS(graph, 0, layerIndex);
1959
1960 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1961 CHECK_VALID_SIZE(inputs.size(), 1);
1962
1963 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1964 CHECK_VALID_SIZE(outputs.size(), 1);
1965
1966 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
1967 auto flatBufferAxis = flatBufferDescriptor->axis();
1968 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
1969
1970 armnn::MeanDescriptor descriptor;
1971 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
1972 descriptor.m_KeepDims = flatBufferKeepDims;
1973
1974 auto layerName = GetLayerName(graph, layerIndex);
1975 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
1976
1977 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1978 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1979
1980 RegisterInputSlots(graph, layerIndex, layer);
1981 RegisterOutputSlots(graph, layerIndex, layer);
1982}
1983
Jim Flynn18ce3382019-03-08 11:08:30 +00001984void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
1985{
1986 CHECK_LAYERS(graph, 0, layerIndex);
1987
1988 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1989 CHECK_VALID_SIZE(inputs.size(), 1);
1990
1991 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1992
1993 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
1994 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
1995 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
1996 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
1997 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
1998 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
1999
2000 // Check numViews and numDimensions corresponds to the ones already serialized ...
2001 // numViews == flatBufferViewSizes.size();
2002 // foreach: numDimensions == flatBufferViewSizes[x].size();
2003
2004 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2005 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2006 {
2007 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2008 {
2009 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2010 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2011 }
2012 }
2013
2014 auto layerName = GetLayerName(graph, layerIndex);
2015 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2016
2017 // I could have as many outputs as views ...
2018 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2019 {
2020 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2021 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2022 }
2023
2024 RegisterInputSlots(graph, layerIndex, layer);
2025 RegisterOutputSlots(graph, layerIndex, layer);
2026}
2027
Jim Flynn11af3752019-03-19 17:22:29 +00002028armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2029{
2030 armnn::LstmDescriptor desc;
2031
2032 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2033 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2034 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2035 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2036 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2037 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
2038
2039 return desc;
2040}
2041
2042void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2043{
2044 CHECK_LAYERS(graph, 0, layerIndex);
2045
2046 auto inputs = GetInputs(graph, layerIndex);
2047 CHECK_VALID_SIZE(inputs.size(), 3);
2048
2049 auto outputs = GetOutputs(graph, layerIndex);
2050 CHECK_VALID_SIZE(outputs.size(), 4);
2051
2052 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2053 auto layerName = GetLayerName(graph, layerIndex);
2054 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2055 auto flatBufferInputParams = flatBufferLayer->inputParams();
2056
2057 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2058
2059 armnn::LstmInputParams lstmInputParams;
2060
2061 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2062 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2063 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2064 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2065 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2066 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2067 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2068 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2069 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2070
2071 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2072 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2073 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2074 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2075 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2076 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2077 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2078 lstmInputParams.m_CellBias = &cellBias;
2079 lstmInputParams.m_OutputGateBias = &outputGateBias;
2080
2081 armnn::ConstTensor inputToInputWeights;
2082 armnn::ConstTensor recurrentToInputWeights;
2083 armnn::ConstTensor cellToInputWeights;
2084 armnn::ConstTensor inputGateBias;
2085 if (!lstmDescriptor.m_CifgEnabled)
2086 {
2087 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2088 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2089 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2090 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2091
2092 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2093 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2094 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2095 lstmInputParams.m_InputGateBias = &inputGateBias;
2096 }
2097
2098 armnn::ConstTensor projectionWeights;
2099 armnn::ConstTensor projectionBias;
2100 if (lstmDescriptor.m_ProjectionEnabled)
2101 {
2102 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2103 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2104
2105 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2106 lstmInputParams.m_ProjectionBias = &projectionBias;
2107 }
2108
2109 armnn::ConstTensor cellToForgetWeights;
2110 armnn::ConstTensor cellToOutputWeights;
2111 if (lstmDescriptor.m_PeepholeEnabled)
2112 {
2113 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2114 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2115
2116 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2117 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2118 }
2119
2120 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2121
2122 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2123 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2124
2125 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2126 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2127
2128 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2129 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2130
2131 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2132 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2133
2134 RegisterInputSlots(graph, layerIndex, layer);
2135 RegisterOutputSlots(graph, layerIndex, layer);
2136}
2137
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002138void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2139{
2140 CHECK_LAYERS(graph, 0, layerIndex);
2141
2142 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2143 CHECK_VALID_SIZE(inputs.size(), 1);
2144
2145 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2146 CHECK_VALID_SIZE(outputs.size(), 1);
2147
2148 const std::string layerName = GetLayerName(graph, layerIndex);
2149 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2150
2151 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2152 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2153
2154 RegisterInputSlots(graph, layerIndex, layer);
2155 RegisterOutputSlots(graph, layerIndex, layer);
2156}
2157
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002158void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2159{
2160 CHECK_LAYERS(graph, 0, layerIndex);
2161
2162 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2163 CHECK_VALID_SIZE(inputs.size(), 2);
2164
2165 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2166 CHECK_VALID_SIZE(outputs.size(), 1);
2167
2168 const std::string layerName = GetLayerName(graph, layerIndex);
2169 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2170
2171 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2172 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2173
2174 RegisterInputSlots(graph, layerIndex, layer);
2175 RegisterOutputSlots(graph, layerIndex, layer);
2176}
2177
Sadik Armaganeff363d2019-04-05 15:25:46 +01002178void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2179{
2180 CHECK_LAYERS(graph, 0, layerIndex);
2181 auto inputs = GetInputs(graph, layerIndex);
2182 CHECK_LOCATION();
2183 CHECK_VALID_SIZE(inputs.size(), 2);
2184
2185 auto outputs = GetOutputs(graph, layerIndex);
2186 CHECK_VALID_SIZE(outputs.size(), 2);
2187
2188 auto layerName = GetLayerName(graph, layerIndex);
2189 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2190
2191 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2192 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2193
2194 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2195 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2196
2197 RegisterInputSlots(graph, layerIndex, layer);
2198 RegisterOutputSlots(graph, layerIndex, layer);
2199}
2200
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002201void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2202{
2203 CHECK_LAYERS(graph, 0, layerIndex);
2204 auto inputs = GetInputs(graph, layerIndex);
2205 CHECK_LOCATION();
2206 CHECK_VALID_SIZE(inputs.size(), 2);
2207
2208 auto outputs = GetOutputs(graph, layerIndex);
2209 CHECK_VALID_SIZE(outputs.size(), 1);
2210
2211 auto layerName = GetLayerName(graph, layerIndex);
2212 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2213
2214 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2215 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2216
2217 RegisterInputSlots(graph, layerIndex, layer);
2218 RegisterOutputSlots(graph, layerIndex, layer);
2219}
2220
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002221} // namespace armnnDeserializer