blob: 75c258b7ab7c7647dafbfe3bb09b774c539a9453 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
25// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000026#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000027
28#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000029#include <algorithm>
30#include <limits>
31#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000032
33using armnn::ParseException;
34using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000036
Derek Lamberti0028d1b2019-02-20 13:57:42 +000037namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000038{
Kevin May43a799c2019-02-08 16:31:42 +000039
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000040namespace
41{
Kevin May43a799c2019-02-08 16:31:42 +000042
43const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
44
Derek Lamberti0028d1b2019-02-20 13:57:42 +000045 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000046 unsigned int layersIndex,
47 const CheckLocation& location)
48{
49 if (graph->layers() == nullptr)
50 {
51 throw ParseException(
52 boost::str(
53 boost::format("%1% was called with invalid (null) graph. "
54 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
55 "layers:%2% at %3%") %
56 location.m_Function %
57 layersIndex %
58 location.FileLine()));
59 }
60 else if (layersIndex >= graph->layers()->size())
61 {
62 throw ParseException(
63 boost::str(
64 boost::format("%1% was called with an invalid layers index. "
65 "layers:%2% at %3%") %
66 location.m_Function %
67 layersIndex %
68 location.FileLine()));
69 }
70}
71
Derek Lamberti0028d1b2019-02-20 13:57:42 +000072void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000073 unsigned int layersIndex,
74 unsigned int layerIndex,
75 const CheckLocation& location)
76{
77 if (graph->layers() == nullptr)
78 {
79 throw ParseException(
80 boost::str(
81 boost::format("%1% was called with invalid (null) graph. "
82 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000083 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000084 location.m_Function %
85 layersIndex %
86 location.FileLine()));
87 }
88 else if (layersIndex >= graph->layers()->size())
89 {
90 throw ParseException(
91 boost::str(
92 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000093 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000094 location.m_Function %
95 layersIndex %
96 location.FileLine()));
97 }
98 else if (layerIndex >= graph->layers()[layersIndex].size()
99 && layerIndex != VIRTUAL_LAYER_ID)
100 {
101 throw ParseException(
102 boost::str(
103 boost::format("%1% was called with an invalid layer index. "
104 "layers:%2% layer:%3% at %4%") %
105 location.m_Function %
106 layersIndex %
107 layerIndex %
108 location.FileLine()));
109 }
110}
111
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000112void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000113 const CheckLocation& location)
114{
115 if (rawPtr == nullptr)
116 {
117 throw ParseException(
118 boost::str(
119 boost::format("%1% was called with a null tensor pointer. "
120 "at %2%") %
121 location.m_Function %
122 location.FileLine()));
123
124 }
125}
126
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000127void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000128 const CheckLocation& location)
129{
130 if (rawPtr == nullptr)
131 {
132 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
133 location.m_Function %
134 location.FileLine()));
135 }
136}
137
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000138void CheckConstTensorSize(const unsigned int constTensorSize,
139 const unsigned int tensorSize,
140 const CheckLocation& location)
141{
142 if (constTensorSize != tensorSize)
143 {
144 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
145 location.m_Function %
146 location.FileLine()));
147 }
148}
149
Kevin May43a799c2019-02-08 16:31:42 +0000150#define CHECK_TENSOR_PTR(TENSOR_PTR) \
151 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
152
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000153#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
154 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
155
Mike Kellya0766c32019-02-19 17:22:07 +0000156#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
157 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
158
Kevin May43a799c2019-02-08 16:31:42 +0000159#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
160 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
161
162#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
163 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
164}
165
Saoirse Stewart263829c2019-02-19 15:54:14 +0000166bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
167{
168 const unsigned int actualSize = actual.GetNumDimensions();
169 if (actualSize != expected.size())
170 {
171 return false;
172 }
173
174 for (unsigned int i = 0u; i < actualSize; i++)
175 {
176 if (actual[i] != static_cast<unsigned int>(expected[i]))
177 {
178 return false;
179 }
180 }
181
182 return true;
183}
184
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000186: m_Network(nullptr, nullptr),
187//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000188m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000189{
190 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000191 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000192 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000193 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000194 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Jim Flynne242f2d2019-05-22 14:24:13 +0100195 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000196 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000197 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
198 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000199 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000200 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000201 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000202 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000203 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000204 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000205 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000206 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000207 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Jim Flynn11af3752019-03-19 17:22:29 +0000208 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000209 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000210 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
211 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100212 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100213 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000214 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000215 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000216 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000217 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000218 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Derek Lamberti87acb272019-03-27 16:51:31 +0000219 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000220 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000221 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000222 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000223 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000224 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Jim Flynn18ce3382019-03-08 11:08:30 +0000225 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000226 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000227 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100228 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Kevin May43a799c2019-02-08 16:31:42 +0000229}
230
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000231Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000232{
233 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
234
235 switch(layerType)
236 {
Mike Kellyaf484012019-02-20 16:53:11 +0000237 case Layer::Layer_ActivationLayer:
238 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000239 case Layer::Layer_AdditionLayer:
240 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000241 case Layer::Layer_BatchToSpaceNdLayer:
242 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000243 case Layer::Layer_BatchNormalizationLayer:
244 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100245 case Layer::Layer_ConcatLayer:
246 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000247 case Layer::Layer_ConstantLayer:
248 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000249 case Layer::Layer_Convolution2dLayer:
250 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000251 case Layer::Layer_DepthwiseConvolution2dLayer:
252 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000253 case Layer::Layer_DequantizeLayer:
254 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000255 case Layer::Layer_DetectionPostProcessLayer:
256 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000257 case Layer::Layer_DivisionLayer:
258 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000259 case Layer::Layer_EqualLayer:
260 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000261 case Layer::Layer_FullyConnectedLayer:
262 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000263 case Layer::Layer_FloorLayer:
264 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000265 case Layer::Layer_GatherLayer:
266 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000267 case Layer::Layer_GreaterLayer:
268 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000269 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000270 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000271 case Layer::Layer_L2NormalizationLayer:
272 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000273 case Layer::Layer_LstmLayer:
274 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000275 case Layer::Layer_MeanLayer:
276 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000277 case Layer::Layer_MinimumLayer:
278 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000279 case Layer::Layer_MaximumLayer:
280 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100281 case Layer::Layer_MergeLayer:
282 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000283 case Layer::Layer_MergerLayer:
284 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000285 case Layer::Layer_MultiplicationLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000287 case Layer::Layer_NormalizationLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000289 case Layer::Layer_OutputLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000291 case Layer::Layer_PadLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000293 case Layer::Layer_PermuteLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000295 case Layer::Layer_Pooling2dLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000297 case Layer::Layer_QuantizeLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000299 case Layer::Layer_ReshapeLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000301 case Layer::Layer_ResizeBilinearLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000303 case Layer::Layer_RsqrtLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000305 case Layer::Layer_SoftmaxLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000307 case Layer::Layer_SpaceToBatchNdLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000309 case Layer::Layer_SplitterLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000311 case Layer::Layer_StridedSliceLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000313 case Layer::Layer_SubtractionLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100315 case Layer::Layer_SwitchLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000317 case Layer::Layer_NONE:
318 default:
319 throw ParseException(boost::str(
320 boost::format("Layer must have a type %1%") %
321 Layer::Layer_NONE));
322 }
323}
324
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000325std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
326{
327 auto layer = GetBaseLayer(graph, index);
328 assert(layer);
329 return layer->layerName()->str();
330}
331
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000332int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000333{
334 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
335
336 if (layerType == Layer::Layer_InputLayer)
337 {
338 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
339 }
340 else if ( layerType == Layer::Layer_OutputLayer )
341 {
342 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
343 }
344 return 0;
345}
346
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000347armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000348{
349 switch (dataLayout)
350 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000351 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000352 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000353 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000354 default:
355 return armnn::DataLayout::NCHW;
356 }
357}
358
Mike Kellyaf484012019-02-20 16:53:11 +0000359armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
360{
361 switch (function)
362 {
363 case armnnSerializer::ActivationFunction_Sigmoid:
364 return armnn::ActivationFunction::Sigmoid;
365 case armnnSerializer::ActivationFunction_TanH:
366 return armnn::ActivationFunction::TanH;
367 case armnnSerializer::ActivationFunction_Linear:
368 return armnn::ActivationFunction::Linear;
369 case armnnSerializer::ActivationFunction_ReLu:
370 return armnn::ActivationFunction::ReLu;
371 case armnnSerializer::ActivationFunction_BoundedReLu:
372 return armnn::ActivationFunction::BoundedReLu;
373 case armnnSerializer::ActivationFunction_LeakyReLu:
374 return armnn::ActivationFunction::LeakyReLu;
375 case armnnSerializer::ActivationFunction_Abs:
376 return armnn::ActivationFunction::Abs;
377 case armnnSerializer::ActivationFunction_Sqrt:
378 return armnn::ActivationFunction::Sqrt;
379 case armnnSerializer::ActivationFunction_Square:
380 return armnn::ActivationFunction::Square;
381 default:
382 return armnn::ActivationFunction::Sigmoid;
383 }
384}
385
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000386armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000387{
388 armnn::DataType type;
389 CHECK_TENSOR_PTR(tensorPtr);
390
391 switch (tensorPtr->dataType())
392 {
393 case DataType_QuantisedAsymm8:
394 type = armnn::DataType::QuantisedAsymm8;
395 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000396 case DataType_QuantisedSymm16:
397 type = armnn::DataType::QuantisedSymm16;
398 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000399 case DataType_Signed32:
400 type = armnn::DataType::Signed32;
401 break;
Kevin May43a799c2019-02-08 16:31:42 +0000402 case DataType_Float32:
403 type = armnn::DataType::Float32;
404 break;
405 case DataType_Float16:
406 type = armnn::DataType::Float16;
407 break;
408 case DataType_Boolean:
409 type = armnn::DataType::Boolean;
410 break;
411 default:
412 {
413 CheckLocation location = CHECK_LOCATION();
414 throw ParseException(
415 boost::str(
416 boost::format("Unsupported data type %1% = %2%. %3%") %
417 tensorPtr->dataType() %
418 EnumNameDataType(tensorPtr->dataType()) %
419 location.AsString()));
420 }
421 }
422 float quantizationScale = tensorPtr->quantizationScale();
423 int32_t quantizationOffset = tensorPtr->quantizationOffset();
424
425 auto dimensions = tensorPtr->dimensions();
426 unsigned int size = dimensions->size();
427 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
428
429 // two statements (on purpose) for easier debugging:
430 armnn::TensorInfo result(size,
431 outputDims.data(),
432 type,
433 quantizationScale,
434 quantizationOffset);
435 return result;
436}
437
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000438armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000439{
440 CHECK_CONST_TENSOR_PTR(constTensorPtr);
441 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
442
443 switch (constTensorPtr->data_type())
444 {
445 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000446 {
447 auto byteData = constTensorPtr->data_as_ByteData()->data();
448 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
449 return armnn::ConstTensor(tensorInfo, byteData->data());
450 }
Mike Kellya0766c32019-02-19 17:22:07 +0000451 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000452 {
453 auto shortData = constTensorPtr->data_as_ShortData()->data();
454 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
455 return armnn::ConstTensor(tensorInfo, shortData->data());
456 }
Mike Kellya0766c32019-02-19 17:22:07 +0000457 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000458 {
459 auto intData = constTensorPtr->data_as_IntData()->data();
460 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
461 return armnn::ConstTensor(tensorInfo, intData->data());
462 }
Mike Kellya0766c32019-02-19 17:22:07 +0000463 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000464 {
465 auto longData = constTensorPtr->data_as_LongData()->data();
466 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
467 return armnn::ConstTensor(tensorInfo, longData->data());
468 }
Mike Kellya0766c32019-02-19 17:22:07 +0000469 default:
470 {
471 CheckLocation location = CHECK_LOCATION();
472 throw ParseException(
473 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
474 constTensorPtr->data_type() %
475 EnumNameConstTensorData(constTensorPtr->data_type()) %
476 location.AsString()));
477 }
478 }
479}
480
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000481Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000482 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000483{
484 CHECK_LAYERS(graphPtr, 0, layerIndex);
485 auto layer = GetBaseLayer(graphPtr, layerIndex);
486 const auto& numInputs = layer->inputSlots()->size();
487
488 TensorRawPtrVector result(numInputs);
489
490 for (unsigned int i=0; i<numInputs; ++i)
491 {
492 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
493 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
494 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
495 }
496 return result;
497}
498
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000499Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000500 unsigned int layerIndex)
501{
502 CHECK_LAYERS(graphPtr, 0, layerIndex);
503 auto layer = GetBaseLayer(graphPtr, layerIndex);
504 const auto& numOutputs = layer->outputSlots()->size();
505
506 TensorRawPtrVector result(numOutputs);
507
508 for (unsigned int i=0; i<numOutputs; ++i)
509 {
510 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
511 }
512 return result;
513}
514
Derek Lamberti8ddae332019-02-21 16:29:43 +0000515void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000516{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000517 CHECK_LAYERS(graph, 0, layerIndex);
518 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000519 throw ParseException(
520 boost::str(
521 boost::format("Layer not supported. "
522 "layerIndex: %1% "
523 "layerName: %2% / %3%") %
524 layerIndex %
525 layerName %
526 CHECK_LOCATION().AsString()));
527}
528
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000529void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000530{
531 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000532 m_InputBindings.clear();
533 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000534}
535
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000536IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000537{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000538 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000539}
540
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000541IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000542{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000543 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000544}
545
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000546void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000547{
548 delete parser;
549}
550
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000551INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000552{
553 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000554 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
555 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000556}
557
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000558armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000559{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000560 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000561 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
562 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
563 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000564}
565
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000566Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000567{
568 if (binaryContent == nullptr)
569 {
570 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
571 CHECK_LOCATION().AsString()));
572 }
573 flatbuffers::Verifier verifier(binaryContent, len);
574 if (verifier.VerifyBuffer<SerializedGraph>() == false)
575 {
576 throw ParseException(
577 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
578 "flatbuffers format. size:%1% %2%") %
579 len %
580 CHECK_LOCATION().AsString()));
581 }
582 return GetSerializedGraph(binaryContent);
583}
584
Derek Lamberti8ddae332019-02-21 16:29:43 +0000585INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000586{
587 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000588 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000589 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000590 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000591 {
592 if (layer->layer_type() != Layer_InputLayer &&
593 layer->layer_type() != Layer_OutputLayer)
594 {
595 // lookup and call the parser function
596 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000597 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000598 }
599 ++layerIndex;
600 }
601
Derek Lamberti8ddae332019-02-21 16:29:43 +0000602 SetupInputLayers(graph);
603 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000604
605 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100606 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000607 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100608 Connections& connections = graphIt.second;
609 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000610 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100611 const unsigned int outputSlotIndex = outputIt.first;
612 IOutputSlot* outputSlot = outputIt.second;
613 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000614 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100615 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000616 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100617 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000618 }
Kevin May43a799c2019-02-08 16:31:42 +0000619 }
620 }
621 }
622
623 return std::move(m_Network);
624}
625
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000626BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000627 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000628{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000629 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000630 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000631 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000632 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000633 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000634 }
635 }
636 throw ParseException(
637 boost::str(
638 boost::format("No input binding found for layer:%1% / %2%") %
639 name %
640 CHECK_LOCATION().AsString()));
641}
642
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000643BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000644 const std::string& name) const
645{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000646 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000647 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000648 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000649 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000650 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000651 }
652 }
653 throw ParseException(
654 boost::str(
655 boost::format("No output binding found for layer:%1% / %2%") %
656 name %
657 CHECK_LOCATION().AsString()));
658}
659
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100660unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
661{
662 for (unsigned int i = 0; i < graph->layers()->size(); i++)
663 {
664 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
665 if (layer->index() == targetIndex)
666 {
667 return i;
668 }
669 }
670 throw ParseException("Layer with given index not found");
671}
672
Derek Lamberti8ddae332019-02-21 16:29:43 +0000673void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000674{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000675 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100676 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000677 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100678 m_InputBindings.reserve(numInputs);
679
680 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000681 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100682 const unsigned int inputId = graph->inputIds()->Get(i);
683 const unsigned int inputLayerIndex = GetLayerIndexInVector(graph, inputId);
684 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000685
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100686 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
687 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
688 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000689
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100690 IConnectableLayer* inputLayer =
691 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000692
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100693 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
694 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
695 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
696
Derek Lamberti8ddae332019-02-21 16:29:43 +0000697 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100698 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000699 }
700}
701
Derek Lamberti8ddae332019-02-21 16:29:43 +0000702void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000703{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000704 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100705 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000706 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100707 m_OutputBindings.reserve(numOutputs);
708
709 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000710 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100711 const unsigned int outputId = graph->outputIds()->Get(i);
712 const unsigned int outputLayerIndex = GetLayerIndexInVector(graph, outputId);
713 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000714
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100715 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
716 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
717 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000718
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100719 IConnectableLayer* outputLayer =
720 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000721
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100722 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
723
724 unsigned int sourceLayerIndex =
725 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
726 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
727 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
728
Derek Lamberti8ddae332019-02-21 16:29:43 +0000729 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100730 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000731 }
732}
733
Derek Lamberti8ddae332019-02-21 16:29:43 +0000734void Deserializer::RegisterOutputSlots(GraphPtr graph,
735 uint32_t layerIndex,
736 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000737{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000738 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000739 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100740 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
741 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000742 {
743 throw ParseException(
744 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
745 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100746 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000747 layer->GetNumOutputSlots() %
748 layerIndex %
749 CHECK_LOCATION().AsString()));
750 }
751
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100752 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000753 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100754 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
755 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
756 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
757 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000758 }
759}
760
Derek Lamberti8ddae332019-02-21 16:29:43 +0000761void Deserializer::RegisterInputSlots(GraphPtr graph,
762 uint32_t layerIndex,
763 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000764{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000765 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000766 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100767 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
768 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000769 {
770 throw ParseException(
771 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
772 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100773 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000774 layer->GetNumInputSlots() %
775 layerIndex %
776 CHECK_LOCATION().AsString()));
777 }
778
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100779 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000780 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100781 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
782 auto fbConnection = fbInputSlot->connection();
783 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
784 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000785 }
786}
787
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000788void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
789 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100790 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000791{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100792 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000793 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100794 m_GraphConnections[sourceLayerIndex] = Connections();
795 }
796
797 Connections& connections = m_GraphConnections[sourceLayerIndex];
798 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
799 {
800 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000801 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000802 else
803 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100804 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000805 }
806}
Kevin May43a799c2019-02-08 16:31:42 +0000807
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000808void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100809 uint32_t outputSlotIndex,
810 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000811{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100812 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
813 {
814 m_GraphConnections[sourceLayerIndex] = Connections();
815 }
816
817 Connections& connections = m_GraphConnections[sourceLayerIndex];
818 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
819 {
820 throw ParseException("Same output slot index processed twice");
821 }
822
823 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +0000824}
825
Derek Lamberti8ddae332019-02-21 16:29:43 +0000826void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000827{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000828 CHECK_LAYERS(graph, 0, layerIndex);
829 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000830 CHECK_LOCATION();
831 CHECK_VALID_SIZE(inputs.size(), 1);
832
Derek Lamberti8ddae332019-02-21 16:29:43 +0000833 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000834 CHECK_VALID_SIZE(outputs.size(), 1);
835
Derek Lamberti8ddae332019-02-21 16:29:43 +0000836 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000837 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000838 auto serializerDescriptor = serializerLayer->descriptor();
839
840 armnn::ActivationDescriptor descriptor;
841 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
842 descriptor.m_A = serializerDescriptor->a();
843 descriptor.m_B = serializerDescriptor->b();
844
845 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
846 layerName.c_str());
847 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
848 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
849
Derek Lamberti8ddae332019-02-21 16:29:43 +0000850 RegisterInputSlots(graph, layerIndex, layer);
851 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000852}
853
Derek Lamberti8ddae332019-02-21 16:29:43 +0000854void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000855{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000856 CHECK_LAYERS(graph, 0, layerIndex);
857 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000858 CHECK_LOCATION();
859 CHECK_VALID_SIZE(inputs.size(), 2);
860
Derek Lamberti8ddae332019-02-21 16:29:43 +0000861 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000862 CHECK_VALID_SIZE(outputs.size(), 1);
863
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000864 auto layerName = GetLayerName(graph, layerIndex);
865 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000866
867 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
868 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
869
Derek Lamberti8ddae332019-02-21 16:29:43 +0000870 RegisterInputSlots(graph, layerIndex, layer);
871 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000872}
873
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000874void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
875{
876 CHECK_LAYERS(graph, 0, layerIndex);
877
878 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
879 CHECK_VALID_SIZE(inputs.size(), 1);
880
881 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
882 CHECK_VALID_SIZE(outputs.size(), 1);
883
884 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
885 auto flatBufferCrops = flatBufferDescriptor->crops();
886 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
887
888 if (flatBufferCrops->Length() % 2 != 0)
889 {
890 throw ParseException(boost::str(
891 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
892 }
893
894 std::vector<std::pair<unsigned int, unsigned int>> crops;
895 crops.reserve(flatBufferCrops->Length() / 2);
896 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
897 {
898 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
899 }
900
901 armnn::BatchToSpaceNdDescriptor descriptor;
902 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
903 descriptor.m_BlockShape =
904 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
905 descriptor.m_Crops = crops;
906
907 auto layerName = GetLayerName(graph, layerIndex);
908 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
909
910 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
911 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
912
913 RegisterInputSlots(graph, layerIndex, layer);
914 RegisterOutputSlots(graph, layerIndex, layer);
915}
916
ruoyan018e7fa232019-02-28 15:09:07 +0000917void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
918{
919 CHECK_LAYERS(graph, 0, layerIndex);
920
921 auto inputs = GetInputs(graph, layerIndex);
922 CHECK_VALID_SIZE(inputs.size(), 1);
923
924 auto outputs = GetOutputs(graph, layerIndex);
925 CHECK_VALID_SIZE(outputs.size(), 1);
926 auto outputInfo = ToTensorInfo(outputs[0]);
927
ruoyan015c7ab052019-03-04 14:48:02 +0000928 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +0000929
930 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
931 auto serializerDescriptor = serializerLayer->descriptor();
932
933 armnn::BatchNormalizationDescriptor descriptor;
934 descriptor.m_Eps = serializerDescriptor->eps();
935 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
936
937 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
938 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
939 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
940 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
941
942 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
943 mean,
944 variance,
945 beta,
946 gamma,
947 layerName.c_str());
948 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
949
950 RegisterInputSlots(graph, layerIndex, layer);
951 RegisterOutputSlots(graph, layerIndex, layer);
952}
953
Conor Kennedy76277882019-02-26 08:29:54 +0000954void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
955{
956 CHECK_LAYERS(graph, 0, layerIndex);
957 CHECK_LOCATION();
958
959 auto outputs = GetOutputs(graph, layerIndex);
960 CHECK_VALID_SIZE(outputs.size(), 1);
961
962 auto layerName = GetLayerName(graph, layerIndex);
963
964 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
965 auto serializerInput = serializerLayer->input();
966
967 armnn::ConstTensor input = ToConstTensor(serializerInput);
968
969 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
970
971 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
972 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
973
974 RegisterOutputSlots(graph, layerIndex, layer);
975}
976
Derek Lamberti8ddae332019-02-21 16:29:43 +0000977void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000978{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000979 CHECK_LAYERS(graph, 0, layerIndex);
980 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000981 CHECK_LOCATION();
982 CHECK_VALID_SIZE(inputs.size(), 1);
983
Derek Lamberti8ddae332019-02-21 16:29:43 +0000984 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000985 CHECK_VALID_SIZE(outputs.size(), 1);
986
Derek Lamberti8ddae332019-02-21 16:29:43 +0000987 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000988 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000989 auto serializerDescriptor = serializerLayer->descriptor();
990
991 armnn::Convolution2dDescriptor descriptor;
992 descriptor.m_PadLeft = serializerDescriptor->padLeft();
993 descriptor.m_PadRight = serializerDescriptor->padRight();
994 descriptor.m_PadTop = serializerDescriptor->padTop();
995 descriptor.m_PadBottom = serializerDescriptor->padBottom();
996 descriptor.m_StrideX = serializerDescriptor->strideX();
997 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +0100998 descriptor.m_DilationX = serializerDescriptor->dilationX();
999 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001000 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1001 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1002
1003 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1004 armnn::ConstTensor biases;
1005
Matteo Martincighfc598e12019-05-14 10:36:13 +01001006 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001007 if (descriptor.m_BiasEnabled)
1008 {
1009 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001010 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001011 }
1012 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1013 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001014 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001015 layerName.c_str());
1016 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1017 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1018
Derek Lamberti8ddae332019-02-21 16:29:43 +00001019 RegisterInputSlots(graph, layerIndex, layer);
1020 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001021}
1022
Derek Lamberti8ddae332019-02-21 16:29:43 +00001023void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001024{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001025 CHECK_LAYERS(graph, 0, layerIndex);
1026 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001027 CHECK_LOCATION();
1028 CHECK_VALID_SIZE(inputs.size(), 1);
1029
Derek Lamberti8ddae332019-02-21 16:29:43 +00001030 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001031 CHECK_VALID_SIZE(outputs.size(), 1);
1032
Derek Lamberti8ddae332019-02-21 16:29:43 +00001033 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001034 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001035 auto serializerDescriptor = serializerLayer->descriptor();
1036
1037 armnn::DepthwiseConvolution2dDescriptor descriptor;
1038 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1039 descriptor.m_PadRight = serializerDescriptor->padRight();
1040 descriptor.m_PadTop = serializerDescriptor->padTop();
1041 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1042 descriptor.m_StrideX = serializerDescriptor->strideX();
1043 descriptor.m_StrideY = serializerDescriptor->strideY();;
1044 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1045 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1046
1047 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1048 armnn::ConstTensor biases;
1049
Matteo Martincighfc598e12019-05-14 10:36:13 +01001050 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001051 if (descriptor.m_BiasEnabled)
1052 {
1053 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001054 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001055 }
1056 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1057 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001058 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001059 layerName.c_str());
1060
1061 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1062 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1063
Derek Lamberti8ddae332019-02-21 16:29:43 +00001064 RegisterInputSlots(graph, layerIndex, layer);
1065 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001066}
1067
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001068void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1069{
1070 CHECK_LAYERS(graph, 0, layerIndex);
1071 auto inputs = GetInputs(graph, layerIndex);
1072 CHECK_LOCATION();
1073 CHECK_VALID_SIZE(inputs.size(), 2);
1074
1075 auto outputs = GetOutputs(graph, layerIndex);
1076 CHECK_VALID_SIZE(outputs.size(), 4);
1077
1078 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1079 auto layerName = GetLayerName(graph, layerIndex);
1080 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1081
1082 armnn::DetectionPostProcessDescriptor descriptor;
1083 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1084 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1085 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1086 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1087 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1088 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1089 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1090 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1091 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1092 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1093 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1094
1095 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1096
1097 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1098 anchors,
1099 layerName.c_str());
1100
1101 for (unsigned int i = 0; i < 4; i++)
1102 {
1103 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1104 }
1105
1106 RegisterInputSlots(graph, layerIndex, layer);
1107 RegisterOutputSlots(graph, layerIndex, layer);
1108}
1109
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001110void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1111{
1112 CHECK_LAYERS(graph, 0, layerIndex);
1113 auto inputs = GetInputs(graph, layerIndex);
1114 CHECK_LOCATION();
1115 CHECK_VALID_SIZE(inputs.size(), 2);
1116
1117 auto outputs = GetOutputs(graph, layerIndex);
1118 CHECK_VALID_SIZE(outputs.size(), 1);
1119
1120 auto layerName = GetLayerName(graph, layerIndex);
1121 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1122
1123 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1124 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1125
1126 RegisterInputSlots(graph, layerIndex, layer);
1127 RegisterOutputSlots(graph, layerIndex, layer);
1128}
1129
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001130void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1131{
1132 CHECK_LAYERS(graph, 0, layerIndex);
1133 auto inputs = GetInputs(graph, layerIndex);
1134 CHECK_LOCATION();
1135 CHECK_VALID_SIZE(inputs.size(), 2);
1136
1137 auto outputs = GetOutputs(graph, layerIndex);
1138 CHECK_VALID_SIZE(outputs.size(), 1);
1139
1140 auto layerName = GetLayerName(graph, layerIndex);
1141 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1142
1143 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1144 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1145
1146 RegisterInputSlots(graph, layerIndex, layer);
1147 RegisterOutputSlots(graph, layerIndex, layer);
1148}
1149
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001150void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1151{
1152 CHECK_LAYERS(graph, 0, layerIndex);
1153 auto inputs = GetInputs(graph, layerIndex);
1154 CHECK_LOCATION();
1155 CHECK_VALID_SIZE(inputs.size(), 2);
1156
1157 auto outputs = GetOutputs(graph, layerIndex);
1158 CHECK_VALID_SIZE(outputs.size(), 1);
1159
1160 auto layerName = GetLayerName(graph, layerIndex);
1161 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1162
1163 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1164 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1165
1166 RegisterInputSlots(graph, layerIndex, layer);
1167 RegisterOutputSlots(graph, layerIndex, layer);
1168}
1169
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001170void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1171{
1172 CHECK_LAYERS(graph, 0, layerIndex);
1173
1174 auto inputs = GetInputs(graph, layerIndex);
1175 CHECK_VALID_SIZE(inputs.size(), 1);
1176
1177 auto outputs = GetOutputs(graph, layerIndex);
1178 CHECK_VALID_SIZE(outputs.size(), 1);
1179 auto outputInfo = ToTensorInfo(outputs[0]);
1180
1181 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1182 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1183
1184 auto layerName = GetLayerName(graph, layerIndex);
1185 armnn::L2NormalizationDescriptor descriptor;
1186 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1187
1188 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1189 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1190
1191 RegisterInputSlots(graph, layerIndex, layer);
1192 RegisterOutputSlots(graph, layerIndex, layer);
1193}
1194
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001195void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1196{
1197 CHECK_LAYERS(graph, 0, layerIndex);
1198 auto inputs = GetInputs(graph, layerIndex);
1199 CHECK_LOCATION();
1200 CHECK_VALID_SIZE(inputs.size(), 2);
1201
1202 auto outputs = GetOutputs(graph, layerIndex);
1203 CHECK_VALID_SIZE(outputs.size(), 1);
1204
1205 auto layerName = GetLayerName(graph, layerIndex);
1206 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1207
1208 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1209 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1210
1211 RegisterInputSlots(graph, layerIndex, layer);
1212 RegisterOutputSlots(graph, layerIndex, layer);
1213}
1214
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001215void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1216{
1217 CHECK_LAYERS(graph, 0, layerIndex);
1218 auto inputs = GetInputs(graph, layerIndex);
1219 CHECK_LOCATION();
1220 CHECK_VALID_SIZE(inputs.size(), 2);
1221
1222 auto outputs = GetOutputs(graph, layerIndex);
1223 CHECK_VALID_SIZE(outputs.size(), 1);
1224
1225 auto layerName = GetLayerName(graph, layerIndex);
1226 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1227
1228 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1229 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1230
1231 RegisterInputSlots(graph, layerIndex, layer);
1232 RegisterOutputSlots(graph, layerIndex, layer);
1233}
1234
Jim Flynne242f2d2019-05-22 14:24:13 +01001235const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1236 unsigned int layerIndex)
1237{
1238 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1239
1240 switch (layerType)
1241 {
1242 case Layer::Layer_ConcatLayer:
1243 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1244 case Layer::Layer_MergerLayer:
1245 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1246 default:
1247 throw armnn::Exception("unknown layer type, should be concat or merger");
1248 }
1249}
1250
Jim Flynn906f9462019-05-10 13:55:21 +01001251void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001252{
1253 CHECK_LAYERS(graph, 0, layerIndex);
1254 CHECK_LOCATION();
1255
1256 auto outputs = GetOutputs(graph, layerIndex);
1257 CHECK_VALID_SIZE(outputs.size(), 1);
1258
Jim Flynnac25a1b2019-02-28 10:40:49 +00001259 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001260 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1261 unsigned int numViews = originsDescriptor->numViews();
1262 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001263
1264 // can now check the number of inputs == number of views
1265 auto inputs = GetInputs(graph, layerIndex);
1266 CHECK_VALID_SIZE(inputs.size(), numViews);
1267
1268 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001269 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001270 for (unsigned int v = 0; v < numViews; ++v)
1271 {
1272 auto originPtr = originsPtr->Get(v);
1273 for (unsigned int d = 0; d < numDimensions; ++d)
1274 {
1275 uint32_t value = originPtr->data()->Get(d);
1276 descriptor.SetViewOriginCoord(v, d, value);
1277 }
1278 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001279 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001280
Jim Flynn906f9462019-05-10 13:55:21 +01001281 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001282 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1283 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1284
1285 RegisterInputSlots(graph, layerIndex, layer);
1286 RegisterOutputSlots(graph, layerIndex, layer);
1287}
1288
Derek Lamberti8ddae332019-02-21 16:29:43 +00001289void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001290{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001291 CHECK_LAYERS(graph, 0, layerIndex);
1292 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001293 CHECK_LOCATION();
1294 CHECK_VALID_SIZE(inputs.size(), 2);
1295
Derek Lamberti8ddae332019-02-21 16:29:43 +00001296 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001297 CHECK_VALID_SIZE(outputs.size(), 1);
1298
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001299 auto layerName = GetLayerName(graph, layerIndex);
1300 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001301
1302 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1303 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1304
Derek Lamberti8ddae332019-02-21 16:29:43 +00001305 RegisterInputSlots(graph, layerIndex, layer);
1306 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001307}
1308
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001309void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1310{
1311 CHECK_LAYERS(graph, 0, layerIndex);
1312 CHECK_LOCATION();
1313
1314 auto inputs = GetInputs(graph, layerIndex);
1315 CHECK_VALID_SIZE(inputs.size(), 1);
1316
1317 auto outputs = GetOutputs(graph, layerIndex);
1318 CHECK_VALID_SIZE(outputs.size(), 1);
1319
1320 auto layerName = GetLayerName(graph, layerIndex);
1321
1322 armnn::IConnectableLayer* layer;
1323
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001324 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001325
1326 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1327 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1328
1329 RegisterInputSlots(graph, layerIndex, layer);
1330 RegisterOutputSlots(graph, layerIndex, layer);
1331}
1332
Derek Lamberti8ddae332019-02-21 16:29:43 +00001333void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001334{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001335 CHECK_LAYERS(graph, 0, layerIndex);
1336 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001337 CHECK_LOCATION();
1338 CHECK_VALID_SIZE(inputs.size(), 1);
1339
Derek Lamberti8ddae332019-02-21 16:29:43 +00001340 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001341 CHECK_VALID_SIZE(outputs.size(), 1);
1342
Derek Lamberti8ddae332019-02-21 16:29:43 +00001343 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001344 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001345 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1346
1347 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1348 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1349 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1350
1351 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1352
1353 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001354 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001355 if (flatBufferDescriptor->biasEnabled())
1356 {
1357 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001358 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001359 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001360 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1361 weightsTensor,
1362 optionalBiases,
1363 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001364
1365 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1366 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1367
Derek Lamberti8ddae332019-02-21 16:29:43 +00001368 RegisterInputSlots(graph, layerIndex, layer);
1369 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001370}
1371
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001372void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1373{
1374 CHECK_LAYERS(graph, 0, layerIndex);
1375
1376 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1377 CHECK_VALID_SIZE(inputs.size(), 1);
1378
1379 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1380 CHECK_VALID_SIZE(outputs.size(), 1);
1381
1382 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1383 auto flatBufferPadList = flatBufferDescriptor->padList();
1384
1385 if (flatBufferPadList->Length() % 2 != 0)
1386 {
1387 throw ParseException(boost::str(
1388 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1389 }
1390
1391 std::vector<std::pair<unsigned int, unsigned int>> padList;
1392 padList.reserve(flatBufferPadList->Length() / 2);
1393 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1394 {
1395 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1396 }
1397
1398 armnn::PadDescriptor descriptor(padList);
1399
1400 auto layerName = GetLayerName(graph, layerIndex);
1401 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1402
1403 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1404 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1405
1406 RegisterInputSlots(graph, layerIndex, layer);
1407 RegisterOutputSlots(graph, layerIndex, layer);
1408}
1409
Derek Lamberti8ddae332019-02-21 16:29:43 +00001410void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001411{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001412 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001413
1414 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001415 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001416
Derek Lamberti8ddae332019-02-21 16:29:43 +00001417 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001418 CHECK_VALID_SIZE(inputs.size(), 1);
1419
Derek Lamberti8ddae332019-02-21 16:29:43 +00001420 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001421 CHECK_VALID_SIZE(outputs.size(), 1);
1422 auto outputInfo = ToTensorInfo(outputs[0]);
1423
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001424 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001425 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1426
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001427 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001428 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1429
Derek Lamberti8ddae332019-02-21 16:29:43 +00001430 RegisterInputSlots(graph, layerIndex, layer);
1431 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001432}
1433
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001434armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001435 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001436{
1437 armnn::Pooling2dDescriptor desc;
1438
1439 switch (pooling2dDesc->poolType())
1440 {
1441 case PoolingAlgorithm_Average:
1442 {
1443 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001444 break;
1445 }
1446 case PoolingAlgorithm_Max:
1447 {
1448 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001449 break;
1450 }
1451 default:
1452 {
1453 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1454 }
1455 }
1456
1457 switch (pooling2dDesc->outputShapeRounding())
1458 {
1459 case OutputShapeRounding_Floor:
1460 {
1461 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1462 break;
1463 }
1464 case OutputShapeRounding_Ceiling:
1465 {
1466 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1467 break;
1468 }
1469 default:
1470 {
1471 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1472 }
1473 }
1474
1475 switch (pooling2dDesc->paddingMethod())
1476 {
1477 case PaddingMethod_Exclude:
1478 {
1479 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1480 break;
1481 }
1482 case PaddingMethod_IgnoreValue:
1483 {
1484 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1485 break;
1486 }
1487 default:
1488 {
1489 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1490 }
1491 }
1492
1493 switch (pooling2dDesc->dataLayout())
1494 {
1495 case DataLayout_NCHW:
1496 {
1497 desc.m_DataLayout = armnn::DataLayout::NCHW;
1498 break;
1499 }
1500 case DataLayout_NHWC:
1501 {
1502 desc.m_DataLayout = armnn::DataLayout::NHWC;
1503 break;
1504 }
1505 default:
1506 {
1507 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1508 }
1509 }
1510
1511 desc.m_PadRight = pooling2dDesc->padRight();
1512 desc.m_PadLeft = pooling2dDesc->padLeft();
1513 desc.m_PadBottom = pooling2dDesc->padBottom();
1514 desc.m_PadTop = pooling2dDesc->padTop();
1515 desc.m_StrideX = pooling2dDesc->strideX();
1516 desc.m_StrideY = pooling2dDesc->strideY();
1517 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1518 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1519
1520 return desc;
1521}
1522
Derek Lamberti8ddae332019-02-21 16:29:43 +00001523void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001524{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001525 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001526
Derek Lamberti8ddae332019-02-21 16:29:43 +00001527 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001528 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001529 CHECK_VALID_SIZE(inputs.size(), 1);
1530
Derek Lamberti8ddae332019-02-21 16:29:43 +00001531 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001532 CHECK_VALID_SIZE(outputs.size(), 1);
1533 auto outputInfo = ToTensorInfo(outputs[0]);
1534
1535 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001536 auto layerName = GetLayerName(graph, layerIndex);
1537 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001538 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1539
Derek Lamberti8ddae332019-02-21 16:29:43 +00001540 RegisterInputSlots(graph, layerIndex, layer);
1541 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001542}
1543
Derek Lamberti87acb272019-03-27 16:51:31 +00001544void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1545{
1546 CHECK_LAYERS(graph, 0, layerIndex);
1547
1548 auto inputs = GetInputs(graph, layerIndex);
1549 CHECK_VALID_SIZE(inputs.size(), 1);
1550
1551 auto outputs = GetOutputs(graph, layerIndex);
1552 CHECK_VALID_SIZE(outputs.size(), 1);
1553 auto outputInfo = ToTensorInfo(outputs[0]);
1554
1555 auto layerName = GetLayerName(graph, layerIndex);
1556 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1557 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1558
1559 RegisterInputSlots(graph, layerIndex, layer);
1560 RegisterOutputSlots(graph, layerIndex, layer);
1561}
1562
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001563armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001564 const std::vector<uint32_t>& targetDimsIn)
1565{
1566 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1567 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1568
1569 if (stretchDim != targetDimsIn.end())
1570 {
1571 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1572 {
1573 throw ParseException(boost::str(
1574 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1575 }
1576
1577 auto targetNumElements =
1578 boost::numeric_cast<unsigned int>(
1579 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1580
1581 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1582 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1583 }
1584
1585 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1586
1587 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1588 reshapeInfo.SetShape(outputShape);
1589
1590 return reshapeInfo;
1591}
1592
Derek Lamberti8ddae332019-02-21 16:29:43 +00001593void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001594{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001595 CHECK_LAYERS(graph, 0, layerIndex);
1596 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001597
Derek Lamberti8ddae332019-02-21 16:29:43 +00001598 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001599 CHECK_VALID_SIZE(outputs.size(), 1);
1600
1601 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1602 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1603
Derek Lamberti8ddae332019-02-21 16:29:43 +00001604 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001605 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1606
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001607 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001608 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1609
1610 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1611 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1612
1613 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1614 {
1615 std::stringstream ss;
1616 ss << "New shape defined in reshape parameters "
1617 << reshapeOutputTensorShape
1618 << " does not equal output shape "
1619 << actualOutputTensorInfo.GetShape()
1620 << ": "
1621 << CHECK_LOCATION().AsString();
1622 throw ParseException(ss.str());
1623 }
1624
1625 armnn::ReshapeDescriptor reshapeDesc;
1626 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1627
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001628 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001629 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1630 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1631
Derek Lamberti8ddae332019-02-21 16:29:43 +00001632 RegisterInputSlots(graph, layerIndex, layer);
1633 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001634}
1635
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001636void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1637{
1638 CHECK_LAYERS(graph, 0, layerIndex);
1639
1640 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1641 CHECK_VALID_SIZE(inputs.size(), 1);
1642
1643 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1644 CHECK_VALID_SIZE(outputs.size(), 1);
1645
1646 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1647
1648 armnn::ResizeBilinearDescriptor descriptor;
1649 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1650 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1651 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1652
1653 auto layerName = GetLayerName(graph, layerIndex);
1654 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1655
1656 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1657 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1658
1659 RegisterInputSlots(graph, layerIndex, layer);
1660 RegisterOutputSlots(graph, layerIndex, layer);
1661}
1662
Derek Lamberti8ddae332019-02-21 16:29:43 +00001663void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001664{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001665 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001666
Derek Lamberti8ddae332019-02-21 16:29:43 +00001667 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001668 CHECK_VALID_SIZE(inputs.size(), 1);
1669
Derek Lamberti8ddae332019-02-21 16:29:43 +00001670 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001671 CHECK_VALID_SIZE(outputs.size(), 1);
1672
1673 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001674 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001675 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001676
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001677 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1678
1679 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1680 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1681
Derek Lamberti8ddae332019-02-21 16:29:43 +00001682 RegisterInputSlots(graph, layerIndex, layer);
1683 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001684}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001685
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001686void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1687{
1688 CHECK_LAYERS(graph, 0, layerIndex);
1689
1690 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1691 CHECK_VALID_SIZE(inputs.size(), 1);
1692
1693 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1694 CHECK_VALID_SIZE(outputs.size(), 1);
1695
1696 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1697 auto flatBufferPadList = flatBufferDescriptor->padList();
1698 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1699
1700 if (flatBufferPadList->Length() % 2 != 0)
1701 {
1702 throw ParseException(boost::str(
1703 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1704 }
1705
1706 std::vector<std::pair<unsigned int, unsigned int>> padList;
1707 padList.reserve(flatBufferPadList->Length() / 2);
1708 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1709 {
1710 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1711 }
1712
1713 armnn::SpaceToBatchNdDescriptor descriptor;
1714 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1715 descriptor.m_BlockShape =
1716 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1717 descriptor.m_PadList = padList;
1718
1719 auto layerName = GetLayerName(graph, layerIndex);
1720 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1721
1722 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1723 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1724
1725 RegisterInputSlots(graph, layerIndex, layer);
1726 RegisterOutputSlots(graph, layerIndex, layer);
1727}
1728
Nina Drozd57728782019-02-27 10:53:27 +00001729armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1730 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1731 unsigned int layerIndex)
1732{
1733 armnn::NormalizationDescriptor desc;
1734
1735 switch (normalizationDescriptor->normChannelType())
1736 {
1737 case NormalizationAlgorithmChannel_Across:
1738 {
1739 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1740 break;
1741 }
1742 case NormalizationAlgorithmChannel_Within:
1743 {
1744 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1745 break;
1746 }
1747 default:
1748 {
1749 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1750 }
1751 }
1752
1753 switch (normalizationDescriptor->normMethodType())
1754 {
1755 case NormalizationAlgorithmMethod_LocalBrightness:
1756 {
1757 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1758 break;
1759 }
1760 case NormalizationAlgorithmMethod_LocalContrast:
1761 {
1762 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1763 break;
1764 }
1765 default:
1766 {
1767 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1768 }
1769 }
1770
1771 switch (normalizationDescriptor->dataLayout())
1772 {
1773 case DataLayout_NCHW:
1774 {
1775 desc.m_DataLayout = armnn::DataLayout::NCHW;
1776 break;
1777 }
1778 case DataLayout_NHWC:
1779 {
1780 desc.m_DataLayout = armnn::DataLayout::NHWC;
1781 break;
1782 }
1783 default:
1784 {
1785 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1786 }
1787 }
1788
1789 desc.m_Alpha = normalizationDescriptor->alpha();
1790 desc.m_Beta = normalizationDescriptor->beta();
1791 desc.m_K = normalizationDescriptor->k();
1792 desc.m_NormSize = normalizationDescriptor->normSize();
1793
1794 return desc;
1795}
1796
1797void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1798{
1799 CHECK_LAYERS(graph, 0, layerIndex);
1800
1801 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1802
1803 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1804 CHECK_VALID_SIZE(inputs.size(), 1);
1805
1806 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1807 CHECK_VALID_SIZE(outputs.size(), 1);
1808
1809 auto outputInfo = ToTensorInfo(outputs[0]);
1810
1811 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1812 auto layerName = GetLayerName(graph, layerIndex);
1813
1814 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1815 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1816
1817 RegisterInputSlots(graph, layerIndex, layer);
1818 RegisterOutputSlots(graph, layerIndex, layer);
1819}
1820
Sadik Armagan8b42a382019-03-01 14:24:49 +00001821void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1822{
1823 CHECK_LAYERS(graph, 0, layerIndex);
1824 auto inputs = GetInputs(graph, layerIndex);
1825 CHECK_LOCATION();
1826 CHECK_VALID_SIZE(inputs.size(), 1);
1827
1828 auto outputs = GetOutputs(graph, layerIndex);
1829 CHECK_VALID_SIZE(outputs.size(), 1);
1830
1831 auto layerName = GetLayerName(graph, layerIndex);
1832 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1833
1834 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1835 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1836
1837 RegisterInputSlots(graph, layerIndex, layer);
1838 RegisterOutputSlots(graph, layerIndex, layer);
1839}
1840
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001841void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1842{
1843 CHECK_LAYERS(graph, 0, layerIndex);
1844
1845 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1846 CHECK_VALID_SIZE(inputs.size(), 1);
1847
1848 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1849 CHECK_VALID_SIZE(outputs.size(), 1);
1850
1851 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
1852
1853 auto flatBufferBegin = flatBufferDescriptor->begin();
1854 auto flatBufferEnd = flatBufferDescriptor->end();
1855 auto flatBufferStride = flatBufferDescriptor->stride();
1856
1857 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
1858 flatBufferBegin->Length() == flatBufferStride->Length()))
1859 {
1860 throw ParseException(boost::str(
1861 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
1862 }
1863
1864 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
1865 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
1866 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
1867
1868 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
1869 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
1870 descriptor.m_EndMask = flatBufferDescriptor->endMask();
1871 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
1872 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
1873 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
1874 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1875
1876 auto layerName = GetLayerName(graph, layerIndex);
1877 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
1878
1879 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1880 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1881
1882 RegisterInputSlots(graph, layerIndex, layer);
1883 RegisterOutputSlots(graph, layerIndex, layer);
1884}
1885
Conor Kennedyda1f9752019-03-01 14:37:12 +00001886void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1887{
1888 CHECK_LAYERS(graph, 0, layerIndex);
1889 auto inputs = GetInputs(graph, layerIndex);
1890 CHECK_LOCATION();
1891 CHECK_VALID_SIZE(inputs.size(), 2);
1892
1893 auto outputs = GetOutputs(graph, layerIndex);
1894 CHECK_VALID_SIZE(outputs.size(), 1);
1895
1896 auto layerName = GetLayerName(graph, layerIndex);
1897 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1898
1899 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1900 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1901
1902 RegisterInputSlots(graph, layerIndex, layer);
1903 RegisterOutputSlots(graph, layerIndex, layer);
1904}
1905
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001906void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
1907{
1908 CHECK_LAYERS(graph, 0, layerIndex);
1909
1910 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1911 CHECK_VALID_SIZE(inputs.size(), 2);
1912
1913 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1914 CHECK_VALID_SIZE(outputs.size(), 1);
1915
1916 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001917 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
1918
1919 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001920 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1921
1922 RegisterInputSlots(graph, layerIndex, layer);
1923 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001924}
1925
Sadik Armaganac97c8c2019-03-04 17:44:21 +00001926void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
1927{
1928 CHECK_LAYERS(graph, 0, layerIndex);
1929
1930 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1931 CHECK_VALID_SIZE(inputs.size(), 1);
1932
1933 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1934 CHECK_VALID_SIZE(outputs.size(), 1);
1935
1936 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
1937 auto flatBufferAxis = flatBufferDescriptor->axis();
1938 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
1939
1940 armnn::MeanDescriptor descriptor;
1941 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
1942 descriptor.m_KeepDims = flatBufferKeepDims;
1943
1944 auto layerName = GetLayerName(graph, layerIndex);
1945 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
1946
1947 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1948 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1949
1950 RegisterInputSlots(graph, layerIndex, layer);
1951 RegisterOutputSlots(graph, layerIndex, layer);
1952}
1953
Jim Flynn18ce3382019-03-08 11:08:30 +00001954void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
1955{
1956 CHECK_LAYERS(graph, 0, layerIndex);
1957
1958 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1959 CHECK_VALID_SIZE(inputs.size(), 1);
1960
1961 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1962
1963 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
1964 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
1965 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
1966 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
1967 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
1968 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
1969
1970 // Check numViews and numDimensions corresponds to the ones already serialized ...
1971 // numViews == flatBufferViewSizes.size();
1972 // foreach: numDimensions == flatBufferViewSizes[x].size();
1973
1974 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
1975 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
1976 {
1977 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
1978 {
1979 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
1980 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
1981 }
1982 }
1983
1984 auto layerName = GetLayerName(graph, layerIndex);
1985 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
1986
1987 // I could have as many outputs as views ...
1988 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
1989 {
1990 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
1991 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
1992 }
1993
1994 RegisterInputSlots(graph, layerIndex, layer);
1995 RegisterOutputSlots(graph, layerIndex, layer);
1996}
1997
Jim Flynn11af3752019-03-19 17:22:29 +00001998armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
1999{
2000 armnn::LstmDescriptor desc;
2001
2002 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2003 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2004 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2005 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2006 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2007 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
2008
2009 return desc;
2010}
2011
2012void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2013{
2014 CHECK_LAYERS(graph, 0, layerIndex);
2015
2016 auto inputs = GetInputs(graph, layerIndex);
2017 CHECK_VALID_SIZE(inputs.size(), 3);
2018
2019 auto outputs = GetOutputs(graph, layerIndex);
2020 CHECK_VALID_SIZE(outputs.size(), 4);
2021
2022 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2023 auto layerName = GetLayerName(graph, layerIndex);
2024 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2025 auto flatBufferInputParams = flatBufferLayer->inputParams();
2026
2027 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2028
2029 armnn::LstmInputParams lstmInputParams;
2030
2031 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2032 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2033 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2034 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2035 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2036 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2037 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2038 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2039 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2040
2041 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2042 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2043 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2044 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2045 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2046 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2047 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2048 lstmInputParams.m_CellBias = &cellBias;
2049 lstmInputParams.m_OutputGateBias = &outputGateBias;
2050
2051 armnn::ConstTensor inputToInputWeights;
2052 armnn::ConstTensor recurrentToInputWeights;
2053 armnn::ConstTensor cellToInputWeights;
2054 armnn::ConstTensor inputGateBias;
2055 if (!lstmDescriptor.m_CifgEnabled)
2056 {
2057 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2058 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2059 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2060 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2061
2062 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2063 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2064 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2065 lstmInputParams.m_InputGateBias = &inputGateBias;
2066 }
2067
2068 armnn::ConstTensor projectionWeights;
2069 armnn::ConstTensor projectionBias;
2070 if (lstmDescriptor.m_ProjectionEnabled)
2071 {
2072 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2073 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2074
2075 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2076 lstmInputParams.m_ProjectionBias = &projectionBias;
2077 }
2078
2079 armnn::ConstTensor cellToForgetWeights;
2080 armnn::ConstTensor cellToOutputWeights;
2081 if (lstmDescriptor.m_PeepholeEnabled)
2082 {
2083 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2084 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2085
2086 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2087 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2088 }
2089
2090 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2091
2092 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2093 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2094
2095 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2096 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2097
2098 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2099 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2100
2101 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2102 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2103
2104 RegisterInputSlots(graph, layerIndex, layer);
2105 RegisterOutputSlots(graph, layerIndex, layer);
2106}
2107
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002108void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2109{
2110 CHECK_LAYERS(graph, 0, layerIndex);
2111
2112 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2113 CHECK_VALID_SIZE(inputs.size(), 1);
2114
2115 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2116 CHECK_VALID_SIZE(outputs.size(), 1);
2117
2118 const std::string layerName = GetLayerName(graph, layerIndex);
2119 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2120
2121 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2122 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2123
2124 RegisterInputSlots(graph, layerIndex, layer);
2125 RegisterOutputSlots(graph, layerIndex, layer);
2126}
2127
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002128void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2129{
2130 CHECK_LAYERS(graph, 0, layerIndex);
2131
2132 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2133 CHECK_VALID_SIZE(inputs.size(), 2);
2134
2135 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2136 CHECK_VALID_SIZE(outputs.size(), 1);
2137
2138 const std::string layerName = GetLayerName(graph, layerIndex);
2139 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2140
2141 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2142 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2143
2144 RegisterInputSlots(graph, layerIndex, layer);
2145 RegisterOutputSlots(graph, layerIndex, layer);
2146}
2147
Sadik Armaganeff363d2019-04-05 15:25:46 +01002148void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2149{
2150 CHECK_LAYERS(graph, 0, layerIndex);
2151 auto inputs = GetInputs(graph, layerIndex);
2152 CHECK_LOCATION();
2153 CHECK_VALID_SIZE(inputs.size(), 2);
2154
2155 auto outputs = GetOutputs(graph, layerIndex);
2156 CHECK_VALID_SIZE(outputs.size(), 2);
2157
2158 auto layerName = GetLayerName(graph, layerIndex);
2159 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2160
2161 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2162 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2163
2164 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2165 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2166
2167 RegisterInputSlots(graph, layerIndex, layer);
2168 RegisterOutputSlots(graph, layerIndex, layer);
2169}
2170
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002171} // namespace armnnDeserializer