blob: 170917e5cf5c541482a1d869866749c6b44537e2 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
25// The generated code based on the Serialize schema:
Matthew Bentham268509a2019-02-25 13:58:24 +000026#include <ArmnnSchema_generated.h>
Kevin May43a799c2019-02-08 16:31:42 +000027
28#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000029#include <algorithm>
30#include <limits>
31#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000032
33using armnn::ParseException;
34using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000036
Derek Lamberti0028d1b2019-02-20 13:57:42 +000037namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000038{
Kevin May43a799c2019-02-08 16:31:42 +000039
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000040namespace
41{
Kevin May43a799c2019-02-08 16:31:42 +000042
43const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
44
Derek Lamberti0028d1b2019-02-20 13:57:42 +000045 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000046 unsigned int layersIndex,
47 const CheckLocation& location)
48{
49 if (graph->layers() == nullptr)
50 {
51 throw ParseException(
52 boost::str(
53 boost::format("%1% was called with invalid (null) graph. "
54 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
55 "layers:%2% at %3%") %
56 location.m_Function %
57 layersIndex %
58 location.FileLine()));
59 }
60 else if (layersIndex >= graph->layers()->size())
61 {
62 throw ParseException(
63 boost::str(
64 boost::format("%1% was called with an invalid layers index. "
65 "layers:%2% at %3%") %
66 location.m_Function %
67 layersIndex %
68 location.FileLine()));
69 }
70}
71
Derek Lamberti0028d1b2019-02-20 13:57:42 +000072void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000073 unsigned int layersIndex,
74 unsigned int layerIndex,
75 const CheckLocation& location)
76{
77 if (graph->layers() == nullptr)
78 {
79 throw ParseException(
80 boost::str(
81 boost::format("%1% was called with invalid (null) graph. "
82 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000083 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000084 location.m_Function %
85 layersIndex %
86 location.FileLine()));
87 }
88 else if (layersIndex >= graph->layers()->size())
89 {
90 throw ParseException(
91 boost::str(
92 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000093 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000094 location.m_Function %
95 layersIndex %
96 location.FileLine()));
97 }
98 else if (layerIndex >= graph->layers()[layersIndex].size()
99 && layerIndex != VIRTUAL_LAYER_ID)
100 {
101 throw ParseException(
102 boost::str(
103 boost::format("%1% was called with an invalid layer index. "
104 "layers:%2% layer:%3% at %4%") %
105 location.m_Function %
106 layersIndex %
107 layerIndex %
108 location.FileLine()));
109 }
110}
111
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000112void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000113 const CheckLocation& location)
114{
115 if (rawPtr == nullptr)
116 {
117 throw ParseException(
118 boost::str(
119 boost::format("%1% was called with a null tensor pointer. "
120 "at %2%") %
121 location.m_Function %
122 location.FileLine()));
123
124 }
125}
126
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000127void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000128 const CheckLocation& location)
129{
130 if (rawPtr == nullptr)
131 {
132 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
133 location.m_Function %
134 location.FileLine()));
135 }
136}
137
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000138void CheckConstTensorSize(const unsigned int constTensorSize,
139 const unsigned int tensorSize,
140 const CheckLocation& location)
141{
142 if (constTensorSize != tensorSize)
143 {
144 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
145 location.m_Function %
146 location.FileLine()));
147 }
148}
149
Kevin May43a799c2019-02-08 16:31:42 +0000150#define CHECK_TENSOR_PTR(TENSOR_PTR) \
151 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
152
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000153#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
154 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
155
Mike Kellya0766c32019-02-19 17:22:07 +0000156#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
157 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
158
Kevin May43a799c2019-02-08 16:31:42 +0000159#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
160 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
161
162#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
163 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
164}
165
Saoirse Stewart263829c2019-02-19 15:54:14 +0000166bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
167{
168 const unsigned int actualSize = actual.GetNumDimensions();
169 if (actualSize != expected.size())
170 {
171 return false;
172 }
173
174 for (unsigned int i = 0u; i < actualSize; i++)
175 {
176 if (actual[i] != static_cast<unsigned int>(expected[i]))
177 {
178 return false;
179 }
180 }
181
182 return true;
183}
184
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000186: m_Network(nullptr, nullptr),
187//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000188m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000189{
190 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000191 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000192 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000193 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000194 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Conor Kennedy76277882019-02-26 08:29:54 +0000195 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000196 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
197 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000198 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000199 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000200 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000201 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000202 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000203 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000204 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000205 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000206 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Jim Flynn11af3752019-03-19 17:22:29 +0000207 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000208 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000209 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
210 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100211 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100212 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000213 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000214 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000215 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000216 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000217 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Derek Lamberti87acb272019-03-27 16:51:31 +0000218 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000219 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000220 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000221 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000222 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000223 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Jim Flynn18ce3382019-03-08 11:08:30 +0000224 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000225 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000226 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100227 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Kevin May43a799c2019-02-08 16:31:42 +0000228}
229
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000230Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000231{
232 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
233
234 switch(layerType)
235 {
Mike Kellyaf484012019-02-20 16:53:11 +0000236 case Layer::Layer_ActivationLayer:
237 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000238 case Layer::Layer_AdditionLayer:
239 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000240 case Layer::Layer_BatchToSpaceNdLayer:
241 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000242 case Layer::Layer_BatchNormalizationLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000244 case Layer::Layer_ConstantLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000246 case Layer::Layer_Convolution2dLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000248 case Layer::Layer_DepthwiseConvolution2dLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000250 case Layer::Layer_DequantizeLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000252 case Layer::Layer_DetectionPostProcessLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000254 case Layer::Layer_DivisionLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000256 case Layer::Layer_EqualLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000258 case Layer::Layer_FullyConnectedLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000260 case Layer::Layer_FloorLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000262 case Layer::Layer_GatherLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000264 case Layer::Layer_GreaterLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000266 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000267 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000268 case Layer::Layer_L2NormalizationLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000270 case Layer::Layer_LstmLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000272 case Layer::Layer_MeanLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000274 case Layer::Layer_MinimumLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000276 case Layer::Layer_MaximumLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100278 case Layer::Layer_MergeLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000280 case Layer::Layer_MergerLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000282 case Layer::Layer_MultiplicationLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000284 case Layer::Layer_NormalizationLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000286 case Layer::Layer_OutputLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000288 case Layer::Layer_PadLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000290 case Layer::Layer_PermuteLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000292 case Layer::Layer_Pooling2dLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000294 case Layer::Layer_QuantizeLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000296 case Layer::Layer_ReshapeLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000298 case Layer::Layer_ResizeBilinearLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000300 case Layer::Layer_RsqrtLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000302 case Layer::Layer_SoftmaxLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000304 case Layer::Layer_SpaceToBatchNdLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000306 case Layer::Layer_SplitterLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000308 case Layer::Layer_StridedSliceLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000310 case Layer::Layer_SubtractionLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100312 case Layer::Layer_SwitchLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000314 case Layer::Layer_NONE:
315 default:
316 throw ParseException(boost::str(
317 boost::format("Layer must have a type %1%") %
318 Layer::Layer_NONE));
319 }
320}
321
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000322std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
323{
324 auto layer = GetBaseLayer(graph, index);
325 assert(layer);
326 return layer->layerName()->str();
327}
328
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000329int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000330{
331 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
332
333 if (layerType == Layer::Layer_InputLayer)
334 {
335 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
336 }
337 else if ( layerType == Layer::Layer_OutputLayer )
338 {
339 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
340 }
341 return 0;
342}
343
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000344armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000345{
346 switch (dataLayout)
347 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000348 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000349 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000350 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000351 default:
352 return armnn::DataLayout::NCHW;
353 }
354}
355
Mike Kellyaf484012019-02-20 16:53:11 +0000356armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
357{
358 switch (function)
359 {
360 case armnnSerializer::ActivationFunction_Sigmoid:
361 return armnn::ActivationFunction::Sigmoid;
362 case armnnSerializer::ActivationFunction_TanH:
363 return armnn::ActivationFunction::TanH;
364 case armnnSerializer::ActivationFunction_Linear:
365 return armnn::ActivationFunction::Linear;
366 case armnnSerializer::ActivationFunction_ReLu:
367 return armnn::ActivationFunction::ReLu;
368 case armnnSerializer::ActivationFunction_BoundedReLu:
369 return armnn::ActivationFunction::BoundedReLu;
370 case armnnSerializer::ActivationFunction_LeakyReLu:
371 return armnn::ActivationFunction::LeakyReLu;
372 case armnnSerializer::ActivationFunction_Abs:
373 return armnn::ActivationFunction::Abs;
374 case armnnSerializer::ActivationFunction_Sqrt:
375 return armnn::ActivationFunction::Sqrt;
376 case armnnSerializer::ActivationFunction_Square:
377 return armnn::ActivationFunction::Square;
378 default:
379 return armnn::ActivationFunction::Sigmoid;
380 }
381}
382
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000383armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000384{
385 armnn::DataType type;
386 CHECK_TENSOR_PTR(tensorPtr);
387
388 switch (tensorPtr->dataType())
389 {
390 case DataType_QuantisedAsymm8:
391 type = armnn::DataType::QuantisedAsymm8;
392 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000393 case DataType_QuantisedSymm16:
394 type = armnn::DataType::QuantisedSymm16;
395 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000396 case DataType_Signed32:
397 type = armnn::DataType::Signed32;
398 break;
Kevin May43a799c2019-02-08 16:31:42 +0000399 case DataType_Float32:
400 type = armnn::DataType::Float32;
401 break;
402 case DataType_Float16:
403 type = armnn::DataType::Float16;
404 break;
405 case DataType_Boolean:
406 type = armnn::DataType::Boolean;
407 break;
408 default:
409 {
410 CheckLocation location = CHECK_LOCATION();
411 throw ParseException(
412 boost::str(
413 boost::format("Unsupported data type %1% = %2%. %3%") %
414 tensorPtr->dataType() %
415 EnumNameDataType(tensorPtr->dataType()) %
416 location.AsString()));
417 }
418 }
419 float quantizationScale = tensorPtr->quantizationScale();
420 int32_t quantizationOffset = tensorPtr->quantizationOffset();
421
422 auto dimensions = tensorPtr->dimensions();
423 unsigned int size = dimensions->size();
424 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
425
426 // two statements (on purpose) for easier debugging:
427 armnn::TensorInfo result(size,
428 outputDims.data(),
429 type,
430 quantizationScale,
431 quantizationOffset);
432 return result;
433}
434
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000435armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000436{
437 CHECK_CONST_TENSOR_PTR(constTensorPtr);
438 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
439
440 switch (constTensorPtr->data_type())
441 {
442 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000443 {
444 auto byteData = constTensorPtr->data_as_ByteData()->data();
445 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
446 return armnn::ConstTensor(tensorInfo, byteData->data());
447 }
Mike Kellya0766c32019-02-19 17:22:07 +0000448 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000449 {
450 auto shortData = constTensorPtr->data_as_ShortData()->data();
451 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
452 return armnn::ConstTensor(tensorInfo, shortData->data());
453 }
Mike Kellya0766c32019-02-19 17:22:07 +0000454 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000455 {
456 auto intData = constTensorPtr->data_as_IntData()->data();
457 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
458 return armnn::ConstTensor(tensorInfo, intData->data());
459 }
Mike Kellya0766c32019-02-19 17:22:07 +0000460 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000461 {
462 auto longData = constTensorPtr->data_as_LongData()->data();
463 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
464 return armnn::ConstTensor(tensorInfo, longData->data());
465 }
Mike Kellya0766c32019-02-19 17:22:07 +0000466 default:
467 {
468 CheckLocation location = CHECK_LOCATION();
469 throw ParseException(
470 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
471 constTensorPtr->data_type() %
472 EnumNameConstTensorData(constTensorPtr->data_type()) %
473 location.AsString()));
474 }
475 }
476}
477
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000478Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphInputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000479{
480
481 CHECK_GRAPH(graphPtr, 0);
482 const auto& numInputs = graphPtr->inputIds()->size();
483
484 LayerBaseRawPtrVector result(numInputs);
485
486 for (unsigned int i=0; i<numInputs; ++i)
487 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000488 uint32_t inputId = graphPtr->inputIds()->Get(i);
Kevin May43a799c2019-02-08 16:31:42 +0000489 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(inputId));
490 }
491 return result;
492}
493
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000494Deserializer::LayerBaseRawPtrVector Deserializer::GetGraphOutputs(const GraphPtr& graphPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000495{
496 CHECK_GRAPH(graphPtr, 0);
497 const auto& numOutputs = graphPtr->outputIds()->size();
Kevin May43a799c2019-02-08 16:31:42 +0000498 LayerBaseRawPtrVector result(numOutputs);
499
500 for (unsigned int i=0; i<numOutputs; ++i)
501 {
Mike Kelly8c1701a2019-02-11 17:01:27 +0000502 uint32_t outputId = graphPtr->outputIds()->Get(i);
Saoirse Stewart263829c2019-02-19 15:54:14 +0000503
Kevin May43a799c2019-02-08 16:31:42 +0000504 result[i] = GetBaseLayer(graphPtr, static_cast<uint32_t>(outputId));
505 }
506 return result;
507}
508
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000509Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000510 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000511{
512 CHECK_LAYERS(graphPtr, 0, layerIndex);
513 auto layer = GetBaseLayer(graphPtr, layerIndex);
514 const auto& numInputs = layer->inputSlots()->size();
515
516 TensorRawPtrVector result(numInputs);
517
518 for (unsigned int i=0; i<numInputs; ++i)
519 {
520 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
521 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
522 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
523 }
524 return result;
525}
526
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000527Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000528 unsigned int layerIndex)
529{
530 CHECK_LAYERS(graphPtr, 0, layerIndex);
531 auto layer = GetBaseLayer(graphPtr, layerIndex);
532 const auto& numOutputs = layer->outputSlots()->size();
533
534 TensorRawPtrVector result(numOutputs);
535
536 for (unsigned int i=0; i<numOutputs; ++i)
537 {
538 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
539 }
540 return result;
541}
542
Derek Lamberti8ddae332019-02-21 16:29:43 +0000543void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000544{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000545 CHECK_LAYERS(graph, 0, layerIndex);
546 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000547 throw ParseException(
548 boost::str(
549 boost::format("Layer not supported. "
550 "layerIndex: %1% "
551 "layerName: %2% / %3%") %
552 layerIndex %
553 layerName %
554 CHECK_LOCATION().AsString()));
555}
556
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000557void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000558{
559 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000560 m_InputBindings.clear();
561 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000562}
563
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000564IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000565{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000566 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000567}
568
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000569IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000570{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000571 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000572}
573
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000574void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000575{
576 delete parser;
577}
578
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000579INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000580{
581 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000582 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
583 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000584}
585
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000586armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000587{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000588 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000589 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
590 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
591 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000592}
593
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000594Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000595{
596 if (binaryContent == nullptr)
597 {
598 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
599 CHECK_LOCATION().AsString()));
600 }
601 flatbuffers::Verifier verifier(binaryContent, len);
602 if (verifier.VerifyBuffer<SerializedGraph>() == false)
603 {
604 throw ParseException(
605 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
606 "flatbuffers format. size:%1% %2%") %
607 len %
608 CHECK_LOCATION().AsString()));
609 }
610 return GetSerializedGraph(binaryContent);
611}
612
Derek Lamberti8ddae332019-02-21 16:29:43 +0000613INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000614{
615 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000616 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000617 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000618 m_GraphConnections.emplace_back(graph->layers()->size());
619 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000620 {
621 if (layer->layer_type() != Layer_InputLayer &&
622 layer->layer_type() != Layer_OutputLayer)
623 {
624 // lookup and call the parser function
625 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000626 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000627 }
628 ++layerIndex;
629 }
630
Derek Lamberti8ddae332019-02-21 16:29:43 +0000631 SetupInputLayers(graph);
632 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000633
634 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000635 for (size_t connectionsIndex = 0; connectionsIndex < m_GraphConnections[0].size(); ++connectionsIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000636 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000637 SlotsMap& slotsMap = m_GraphConnections[0][connectionsIndex];
638 for (unsigned int outputSlotIndex = 0; outputSlotIndex < slotsMap.outputSlots.size(); outputSlotIndex++)
Kevin May43a799c2019-02-08 16:31:42 +0000639 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000640 if (slotsMap.inputSlots.find(outputSlotIndex) != slotsMap.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000641 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000642 for (armnn::IInputSlot* inputSlot : slotsMap.inputSlots[outputSlotIndex])
643 {
644 slotsMap.outputSlots[outputSlotIndex]->Connect(*inputSlot);
645 }
Kevin May43a799c2019-02-08 16:31:42 +0000646 }
647 }
648 }
649
650 return std::move(m_Network);
651}
652
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000653BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000654 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000655{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000656 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000657 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000658 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000659 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000660 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000661 }
662 }
663 throw ParseException(
664 boost::str(
665 boost::format("No input binding found for layer:%1% / %2%") %
666 name %
667 CHECK_LOCATION().AsString()));
668}
669
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000670BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000671 const std::string& name) const
672{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000673 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000674 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000675 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000676 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000677 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000678 }
679 }
680 throw ParseException(
681 boost::str(
682 boost::format("No output binding found for layer:%1% / %2%") %
683 name %
684 CHECK_LOCATION().AsString()));
685}
686
Derek Lamberti8ddae332019-02-21 16:29:43 +0000687void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000688{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000689 CHECK_GRAPH(graph, 0);
690 auto inputs = GetGraphInputs(graph);
691 m_InputBindings.clear();
692 m_InputBindings.reserve(inputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000693 for (auto const& input : inputs)
694 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000695 LayerBindingId bindingId = GetBindingLayerInfo(graph, input->index());
Kevin May43a799c2019-02-08 16:31:42 +0000696 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000697 m_Network->AddInputLayer(bindingId, input->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000698
699 auto tensorInfo = ToTensorInfo(input->outputSlots()->Get(0)->tensorInfo());
700 layer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
701
Derek Lamberti8ddae332019-02-21 16:29:43 +0000702 RegisterOutputSlots(graph, input->index(), layer);
703
704 BOOST_ASSERT_MSG(input->layerName()->c_str(), "Input has no name.");
705 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
706 m_InputBindings.push_back(std::make_pair(input->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000707 }
708}
709
Derek Lamberti8ddae332019-02-21 16:29:43 +0000710void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000711{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000712 CHECK_GRAPH(graph, 0);
713 auto outputs = GetGraphOutputs(graph);
714 m_OutputBindings.clear();
715 m_OutputBindings.reserve(outputs.size());
Kevin May43a799c2019-02-08 16:31:42 +0000716 for (auto const& output : outputs)
717 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000718 LayerBindingId bindingId = GetBindingLayerInfo(graph, output->index());
Kevin May43a799c2019-02-08 16:31:42 +0000719 IConnectableLayer* layer =
Derek Lamberti8ddae332019-02-21 16:29:43 +0000720 m_Network->AddOutputLayer(bindingId, output->layerName()->c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000721
Derek Lamberti8ddae332019-02-21 16:29:43 +0000722 RegisterInputSlots(graph, output->index(), layer);
723
724 auto baseLayer = GetBaseLayer(graph, output->index());
725 auto sourceLayerIndex = baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex();
726 auto sourceLayer = GetBaseLayer(graph, sourceLayerIndex);
727 auto tensorInfo = ToTensorInfo(sourceLayer->outputSlots()->Get(0)->tensorInfo());
728
729 BOOST_ASSERT_MSG(output->layerName()->c_str(), "Output has no name.");
730 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
731 m_OutputBindings.push_back(std::make_pair(output->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000732 }
733}
734
Derek Lamberti8ddae332019-02-21 16:29:43 +0000735void Deserializer::RegisterOutputSlots(GraphPtr graph,
736 uint32_t layerIndex,
737 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000738{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000739 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000740 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000741 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000742 if (parsedLayer->outputSlots()->size() != layer->GetNumOutputSlots())
743 {
744 throw ParseException(
745 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
746 " for layer index: %3% %4%") %
747 parsedLayer->outputSlots()->size() %
748 layer->GetNumOutputSlots() %
749 layerIndex %
750 CHECK_LOCATION().AsString()));
751 }
752
753 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumOutputSlots(); ++slotIndex)
754 {
755 armnn::IOutputSlot* slot = &(layer->GetOutputSlot(slotIndex));
756 RegisterOutputSlotOfConnection(layerIndex, slot);
757 }
758}
759
Derek Lamberti8ddae332019-02-21 16:29:43 +0000760void Deserializer::RegisterInputSlots(GraphPtr graph,
761 uint32_t layerIndex,
762 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000763{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000764 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000765 BOOST_ASSERT(layer != nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000766 auto parsedLayer = GetBaseLayer(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000767 if (parsedLayer->inputSlots()->size() != layer->GetNumInputSlots())
768 {
769 throw ParseException(
770 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
771 " for layer index:%3% %4%") %
772 parsedLayer->inputSlots()->size() %
773 layer->GetNumInputSlots() %
774 layerIndex %
775 CHECK_LOCATION().AsString()));
776 }
777
778 for (unsigned int slotIndex = 0; slotIndex < layer->GetNumInputSlots(); ++slotIndex)
779 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000780 auto fbConnection = parsedLayer->inputSlots()->Get(slotIndex)->connection();
Kevin May43a799c2019-02-08 16:31:42 +0000781 armnn::IInputSlot* slot = &(layer->GetInputSlot(slotIndex));
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000782
783 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), slot);
Kevin May43a799c2019-02-08 16:31:42 +0000784 }
785}
786
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000787void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
788 uint32_t outputSlotIndex,
789 armnn::IInputSlot* slot)
Kevin May43a799c2019-02-08 16:31:42 +0000790{
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000791 BOOST_ASSERT(m_GraphConnections[0].size() > sourceLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000792
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000793 SlotsMap& slotsMap = m_GraphConnections[0][sourceLayerIndex];
794 if (slotsMap.inputSlots.find(outputSlotIndex) == slotsMap.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000795 {
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000796 slotsMap.inputSlots[outputSlotIndex] = {slot};
Kevin May43a799c2019-02-08 16:31:42 +0000797 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000798 else
799 {
800 slotsMap.inputSlots[outputSlotIndex].push_back(slot);
801 }
802}
Kevin May43a799c2019-02-08 16:31:42 +0000803
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000804void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
805 armnn::IOutputSlot* slot)
806{
807 BOOST_ASSERT(m_GraphConnections[0].size() > sourceLayerIndex);
808 m_GraphConnections[0][sourceLayerIndex].outputSlots.push_back(slot);
Kevin May43a799c2019-02-08 16:31:42 +0000809}
810
Derek Lamberti8ddae332019-02-21 16:29:43 +0000811void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000812{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000813 CHECK_LAYERS(graph, 0, layerIndex);
814 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000815 CHECK_LOCATION();
816 CHECK_VALID_SIZE(inputs.size(), 1);
817
Derek Lamberti8ddae332019-02-21 16:29:43 +0000818 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000819 CHECK_VALID_SIZE(outputs.size(), 1);
820
Derek Lamberti8ddae332019-02-21 16:29:43 +0000821 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000822 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000823 auto serializerDescriptor = serializerLayer->descriptor();
824
825 armnn::ActivationDescriptor descriptor;
826 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
827 descriptor.m_A = serializerDescriptor->a();
828 descriptor.m_B = serializerDescriptor->b();
829
830 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
831 layerName.c_str());
832 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
833 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
834
Derek Lamberti8ddae332019-02-21 16:29:43 +0000835 RegisterInputSlots(graph, layerIndex, layer);
836 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000837}
838
Derek Lamberti8ddae332019-02-21 16:29:43 +0000839void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000840{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000841 CHECK_LAYERS(graph, 0, layerIndex);
842 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000843 CHECK_LOCATION();
844 CHECK_VALID_SIZE(inputs.size(), 2);
845
Derek Lamberti8ddae332019-02-21 16:29:43 +0000846 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000847 CHECK_VALID_SIZE(outputs.size(), 1);
848
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000849 auto layerName = GetLayerName(graph, layerIndex);
850 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000851
852 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
853 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
854
Derek Lamberti8ddae332019-02-21 16:29:43 +0000855 RegisterInputSlots(graph, layerIndex, layer);
856 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000857}
858
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000859void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
860{
861 CHECK_LAYERS(graph, 0, layerIndex);
862
863 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
864 CHECK_VALID_SIZE(inputs.size(), 1);
865
866 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
867 CHECK_VALID_SIZE(outputs.size(), 1);
868
869 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
870 auto flatBufferCrops = flatBufferDescriptor->crops();
871 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
872
873 if (flatBufferCrops->Length() % 2 != 0)
874 {
875 throw ParseException(boost::str(
876 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
877 }
878
879 std::vector<std::pair<unsigned int, unsigned int>> crops;
880 crops.reserve(flatBufferCrops->Length() / 2);
881 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
882 {
883 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
884 }
885
886 armnn::BatchToSpaceNdDescriptor descriptor;
887 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
888 descriptor.m_BlockShape =
889 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
890 descriptor.m_Crops = crops;
891
892 auto layerName = GetLayerName(graph, layerIndex);
893 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
894
895 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
896 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
897
898 RegisterInputSlots(graph, layerIndex, layer);
899 RegisterOutputSlots(graph, layerIndex, layer);
900}
901
ruoyan018e7fa232019-02-28 15:09:07 +0000902void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
903{
904 CHECK_LAYERS(graph, 0, layerIndex);
905
906 auto inputs = GetInputs(graph, layerIndex);
907 CHECK_VALID_SIZE(inputs.size(), 1);
908
909 auto outputs = GetOutputs(graph, layerIndex);
910 CHECK_VALID_SIZE(outputs.size(), 1);
911 auto outputInfo = ToTensorInfo(outputs[0]);
912
ruoyan015c7ab052019-03-04 14:48:02 +0000913 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +0000914
915 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
916 auto serializerDescriptor = serializerLayer->descriptor();
917
918 armnn::BatchNormalizationDescriptor descriptor;
919 descriptor.m_Eps = serializerDescriptor->eps();
920 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
921
922 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
923 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
924 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
925 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
926
927 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
928 mean,
929 variance,
930 beta,
931 gamma,
932 layerName.c_str());
933 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
934
935 RegisterInputSlots(graph, layerIndex, layer);
936 RegisterOutputSlots(graph, layerIndex, layer);
937}
938
Conor Kennedy76277882019-02-26 08:29:54 +0000939void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
940{
941 CHECK_LAYERS(graph, 0, layerIndex);
942 CHECK_LOCATION();
943
944 auto outputs = GetOutputs(graph, layerIndex);
945 CHECK_VALID_SIZE(outputs.size(), 1);
946
947 auto layerName = GetLayerName(graph, layerIndex);
948
949 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
950 auto serializerInput = serializerLayer->input();
951
952 armnn::ConstTensor input = ToConstTensor(serializerInput);
953
954 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
955
956 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
957 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
958
959 RegisterOutputSlots(graph, layerIndex, layer);
960}
961
Derek Lamberti8ddae332019-02-21 16:29:43 +0000962void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +0000963{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000964 CHECK_LAYERS(graph, 0, layerIndex);
965 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000966 CHECK_LOCATION();
967 CHECK_VALID_SIZE(inputs.size(), 1);
968
Derek Lamberti8ddae332019-02-21 16:29:43 +0000969 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000970 CHECK_VALID_SIZE(outputs.size(), 1);
971
Derek Lamberti8ddae332019-02-21 16:29:43 +0000972 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000973 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +0000974 auto serializerDescriptor = serializerLayer->descriptor();
975
976 armnn::Convolution2dDescriptor descriptor;
977 descriptor.m_PadLeft = serializerDescriptor->padLeft();
978 descriptor.m_PadRight = serializerDescriptor->padRight();
979 descriptor.m_PadTop = serializerDescriptor->padTop();
980 descriptor.m_PadBottom = serializerDescriptor->padBottom();
981 descriptor.m_StrideX = serializerDescriptor->strideX();
982 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +0100983 descriptor.m_DilationX = serializerDescriptor->dilationX();
984 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +0000985 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
986 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
987
988 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
989 armnn::ConstTensor biases;
990
Matteo Martincighfc598e12019-05-14 10:36:13 +0100991 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +0000992 if (descriptor.m_BiasEnabled)
993 {
994 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +0100995 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +0000996 }
997 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
998 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +0100999 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001000 layerName.c_str());
1001 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1002 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1003
Derek Lamberti8ddae332019-02-21 16:29:43 +00001004 RegisterInputSlots(graph, layerIndex, layer);
1005 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001006}
1007
Derek Lamberti8ddae332019-02-21 16:29:43 +00001008void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001009{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001010 CHECK_LAYERS(graph, 0, layerIndex);
1011 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001012 CHECK_LOCATION();
1013 CHECK_VALID_SIZE(inputs.size(), 1);
1014
Derek Lamberti8ddae332019-02-21 16:29:43 +00001015 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001016 CHECK_VALID_SIZE(outputs.size(), 1);
1017
Derek Lamberti8ddae332019-02-21 16:29:43 +00001018 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001019 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001020 auto serializerDescriptor = serializerLayer->descriptor();
1021
1022 armnn::DepthwiseConvolution2dDescriptor descriptor;
1023 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1024 descriptor.m_PadRight = serializerDescriptor->padRight();
1025 descriptor.m_PadTop = serializerDescriptor->padTop();
1026 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1027 descriptor.m_StrideX = serializerDescriptor->strideX();
1028 descriptor.m_StrideY = serializerDescriptor->strideY();;
1029 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1030 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1031
1032 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1033 armnn::ConstTensor biases;
1034
Matteo Martincighfc598e12019-05-14 10:36:13 +01001035 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001036 if (descriptor.m_BiasEnabled)
1037 {
1038 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001039 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001040 }
1041 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1042 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001043 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001044 layerName.c_str());
1045
1046 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1047 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1048
Derek Lamberti8ddae332019-02-21 16:29:43 +00001049 RegisterInputSlots(graph, layerIndex, layer);
1050 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001051}
1052
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001053void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1054{
1055 CHECK_LAYERS(graph, 0, layerIndex);
1056 auto inputs = GetInputs(graph, layerIndex);
1057 CHECK_LOCATION();
1058 CHECK_VALID_SIZE(inputs.size(), 2);
1059
1060 auto outputs = GetOutputs(graph, layerIndex);
1061 CHECK_VALID_SIZE(outputs.size(), 4);
1062
1063 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1064 auto layerName = GetLayerName(graph, layerIndex);
1065 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1066
1067 armnn::DetectionPostProcessDescriptor descriptor;
1068 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1069 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1070 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1071 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1072 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1073 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1074 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1075 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1076 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1077 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1078 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1079
1080 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1081
1082 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1083 anchors,
1084 layerName.c_str());
1085
1086 for (unsigned int i = 0; i < 4; i++)
1087 {
1088 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1089 }
1090
1091 RegisterInputSlots(graph, layerIndex, layer);
1092 RegisterOutputSlots(graph, layerIndex, layer);
1093}
1094
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001095void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1096{
1097 CHECK_LAYERS(graph, 0, layerIndex);
1098 auto inputs = GetInputs(graph, layerIndex);
1099 CHECK_LOCATION();
1100 CHECK_VALID_SIZE(inputs.size(), 2);
1101
1102 auto outputs = GetOutputs(graph, layerIndex);
1103 CHECK_VALID_SIZE(outputs.size(), 1);
1104
1105 auto layerName = GetLayerName(graph, layerIndex);
1106 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1107
1108 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1109 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1110
1111 RegisterInputSlots(graph, layerIndex, layer);
1112 RegisterOutputSlots(graph, layerIndex, layer);
1113}
1114
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001115void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1116{
1117 CHECK_LAYERS(graph, 0, layerIndex);
1118 auto inputs = GetInputs(graph, layerIndex);
1119 CHECK_LOCATION();
1120 CHECK_VALID_SIZE(inputs.size(), 2);
1121
1122 auto outputs = GetOutputs(graph, layerIndex);
1123 CHECK_VALID_SIZE(outputs.size(), 1);
1124
1125 auto layerName = GetLayerName(graph, layerIndex);
1126 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1127
1128 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1129 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1130
1131 RegisterInputSlots(graph, layerIndex, layer);
1132 RegisterOutputSlots(graph, layerIndex, layer);
1133}
1134
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001135void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1136{
1137 CHECK_LAYERS(graph, 0, layerIndex);
1138 auto inputs = GetInputs(graph, layerIndex);
1139 CHECK_LOCATION();
1140 CHECK_VALID_SIZE(inputs.size(), 2);
1141
1142 auto outputs = GetOutputs(graph, layerIndex);
1143 CHECK_VALID_SIZE(outputs.size(), 1);
1144
1145 auto layerName = GetLayerName(graph, layerIndex);
1146 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1147
1148 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1149 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1150
1151 RegisterInputSlots(graph, layerIndex, layer);
1152 RegisterOutputSlots(graph, layerIndex, layer);
1153}
1154
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001155void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1156{
1157 CHECK_LAYERS(graph, 0, layerIndex);
1158
1159 auto inputs = GetInputs(graph, layerIndex);
1160 CHECK_VALID_SIZE(inputs.size(), 1);
1161
1162 auto outputs = GetOutputs(graph, layerIndex);
1163 CHECK_VALID_SIZE(outputs.size(), 1);
1164 auto outputInfo = ToTensorInfo(outputs[0]);
1165
1166 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1167 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1168
1169 auto layerName = GetLayerName(graph, layerIndex);
1170 armnn::L2NormalizationDescriptor descriptor;
1171 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1172
1173 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1174 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1175
1176 RegisterInputSlots(graph, layerIndex, layer);
1177 RegisterOutputSlots(graph, layerIndex, layer);
1178}
1179
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001180void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1181{
1182 CHECK_LAYERS(graph, 0, layerIndex);
1183 auto inputs = GetInputs(graph, layerIndex);
1184 CHECK_LOCATION();
1185 CHECK_VALID_SIZE(inputs.size(), 2);
1186
1187 auto outputs = GetOutputs(graph, layerIndex);
1188 CHECK_VALID_SIZE(outputs.size(), 1);
1189
1190 auto layerName = GetLayerName(graph, layerIndex);
1191 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1192
1193 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1194 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1195
1196 RegisterInputSlots(graph, layerIndex, layer);
1197 RegisterOutputSlots(graph, layerIndex, layer);
1198}
1199
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001200void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1201{
1202 CHECK_LAYERS(graph, 0, layerIndex);
1203 auto inputs = GetInputs(graph, layerIndex);
1204 CHECK_LOCATION();
1205 CHECK_VALID_SIZE(inputs.size(), 2);
1206
1207 auto outputs = GetOutputs(graph, layerIndex);
1208 CHECK_VALID_SIZE(outputs.size(), 1);
1209
1210 auto layerName = GetLayerName(graph, layerIndex);
1211 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1212
1213 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1214 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1215
1216 RegisterInputSlots(graph, layerIndex, layer);
1217 RegisterOutputSlots(graph, layerIndex, layer);
1218}
1219
Jim Flynn906f9462019-05-10 13:55:21 +01001220void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001221{
1222 CHECK_LAYERS(graph, 0, layerIndex);
1223 CHECK_LOCATION();
1224
1225 auto outputs = GetOutputs(graph, layerIndex);
1226 CHECK_VALID_SIZE(outputs.size(), 1);
1227
1228 auto mergerLayer = graph->layers()->Get(layerIndex)->layer_as_MergerLayer();
1229 auto layerName = GetLayerName(graph, layerIndex);
1230 auto mergerDescriptor = mergerLayer->descriptor();
1231 unsigned int numViews = mergerDescriptor->numViews();
1232 unsigned int numDimensions = mergerDescriptor->numDimensions();
1233
1234 // can now check the number of inputs == number of views
1235 auto inputs = GetInputs(graph, layerIndex);
1236 CHECK_VALID_SIZE(inputs.size(), numViews);
1237
1238 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
1239 auto originsPtr = mergerDescriptor->viewOrigins();
1240 for (unsigned int v = 0; v < numViews; ++v)
1241 {
1242 auto originPtr = originsPtr->Get(v);
1243 for (unsigned int d = 0; d < numDimensions; ++d)
1244 {
1245 uint32_t value = originPtr->data()->Get(d);
1246 descriptor.SetViewOriginCoord(v, d, value);
1247 }
1248 }
1249 descriptor.SetConcatAxis(mergerDescriptor->concatAxis());
1250
Jim Flynn906f9462019-05-10 13:55:21 +01001251 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001252 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1253 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1254
1255 RegisterInputSlots(graph, layerIndex, layer);
1256 RegisterOutputSlots(graph, layerIndex, layer);
1257}
1258
Derek Lamberti8ddae332019-02-21 16:29:43 +00001259void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001260{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001261 CHECK_LAYERS(graph, 0, layerIndex);
1262 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001263 CHECK_LOCATION();
1264 CHECK_VALID_SIZE(inputs.size(), 2);
1265
Derek Lamberti8ddae332019-02-21 16:29:43 +00001266 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001267 CHECK_VALID_SIZE(outputs.size(), 1);
1268
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001269 auto layerName = GetLayerName(graph, layerIndex);
1270 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001271
1272 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1273 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1274
Derek Lamberti8ddae332019-02-21 16:29:43 +00001275 RegisterInputSlots(graph, layerIndex, layer);
1276 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001277}
1278
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001279void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1280{
1281 CHECK_LAYERS(graph, 0, layerIndex);
1282 CHECK_LOCATION();
1283
1284 auto inputs = GetInputs(graph, layerIndex);
1285 CHECK_VALID_SIZE(inputs.size(), 1);
1286
1287 auto outputs = GetOutputs(graph, layerIndex);
1288 CHECK_VALID_SIZE(outputs.size(), 1);
1289
1290 auto layerName = GetLayerName(graph, layerIndex);
1291
1292 armnn::IConnectableLayer* layer;
1293
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001294 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001295
1296 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1297 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1298
1299 RegisterInputSlots(graph, layerIndex, layer);
1300 RegisterOutputSlots(graph, layerIndex, layer);
1301}
1302
Derek Lamberti8ddae332019-02-21 16:29:43 +00001303void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001304{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001305 CHECK_LAYERS(graph, 0, layerIndex);
1306 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001307 CHECK_LOCATION();
1308 CHECK_VALID_SIZE(inputs.size(), 1);
1309
Derek Lamberti8ddae332019-02-21 16:29:43 +00001310 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001311 CHECK_VALID_SIZE(outputs.size(), 1);
1312
Derek Lamberti8ddae332019-02-21 16:29:43 +00001313 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001314 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001315 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1316
1317 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1318 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1319 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1320
1321 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1322
1323 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001324 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001325 if (flatBufferDescriptor->biasEnabled())
1326 {
1327 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001328 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001329 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001330 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1331 weightsTensor,
1332 optionalBiases,
1333 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001334
1335 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1336 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1337
Derek Lamberti8ddae332019-02-21 16:29:43 +00001338 RegisterInputSlots(graph, layerIndex, layer);
1339 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001340}
1341
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001342void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1343{
1344 CHECK_LAYERS(graph, 0, layerIndex);
1345
1346 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1347 CHECK_VALID_SIZE(inputs.size(), 1);
1348
1349 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1350 CHECK_VALID_SIZE(outputs.size(), 1);
1351
1352 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1353 auto flatBufferPadList = flatBufferDescriptor->padList();
1354
1355 if (flatBufferPadList->Length() % 2 != 0)
1356 {
1357 throw ParseException(boost::str(
1358 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1359 }
1360
1361 std::vector<std::pair<unsigned int, unsigned int>> padList;
1362 padList.reserve(flatBufferPadList->Length() / 2);
1363 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1364 {
1365 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1366 }
1367
1368 armnn::PadDescriptor descriptor(padList);
1369
1370 auto layerName = GetLayerName(graph, layerIndex);
1371 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1372
1373 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1374 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1375
1376 RegisterInputSlots(graph, layerIndex, layer);
1377 RegisterOutputSlots(graph, layerIndex, layer);
1378}
1379
Derek Lamberti8ddae332019-02-21 16:29:43 +00001380void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001381{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001382 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001383
1384 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001385 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001386
Derek Lamberti8ddae332019-02-21 16:29:43 +00001387 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001388 CHECK_VALID_SIZE(inputs.size(), 1);
1389
Derek Lamberti8ddae332019-02-21 16:29:43 +00001390 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001391 CHECK_VALID_SIZE(outputs.size(), 1);
1392 auto outputInfo = ToTensorInfo(outputs[0]);
1393
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001394 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001395 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1396
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001397 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001398 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1399
Derek Lamberti8ddae332019-02-21 16:29:43 +00001400 RegisterInputSlots(graph, layerIndex, layer);
1401 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001402}
1403
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001404armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001405 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001406{
1407 armnn::Pooling2dDescriptor desc;
1408
1409 switch (pooling2dDesc->poolType())
1410 {
1411 case PoolingAlgorithm_Average:
1412 {
1413 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001414 break;
1415 }
1416 case PoolingAlgorithm_Max:
1417 {
1418 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001419 break;
1420 }
1421 default:
1422 {
1423 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1424 }
1425 }
1426
1427 switch (pooling2dDesc->outputShapeRounding())
1428 {
1429 case OutputShapeRounding_Floor:
1430 {
1431 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1432 break;
1433 }
1434 case OutputShapeRounding_Ceiling:
1435 {
1436 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1437 break;
1438 }
1439 default:
1440 {
1441 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1442 }
1443 }
1444
1445 switch (pooling2dDesc->paddingMethod())
1446 {
1447 case PaddingMethod_Exclude:
1448 {
1449 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1450 break;
1451 }
1452 case PaddingMethod_IgnoreValue:
1453 {
1454 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1455 break;
1456 }
1457 default:
1458 {
1459 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1460 }
1461 }
1462
1463 switch (pooling2dDesc->dataLayout())
1464 {
1465 case DataLayout_NCHW:
1466 {
1467 desc.m_DataLayout = armnn::DataLayout::NCHW;
1468 break;
1469 }
1470 case DataLayout_NHWC:
1471 {
1472 desc.m_DataLayout = armnn::DataLayout::NHWC;
1473 break;
1474 }
1475 default:
1476 {
1477 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1478 }
1479 }
1480
1481 desc.m_PadRight = pooling2dDesc->padRight();
1482 desc.m_PadLeft = pooling2dDesc->padLeft();
1483 desc.m_PadBottom = pooling2dDesc->padBottom();
1484 desc.m_PadTop = pooling2dDesc->padTop();
1485 desc.m_StrideX = pooling2dDesc->strideX();
1486 desc.m_StrideY = pooling2dDesc->strideY();
1487 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1488 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1489
1490 return desc;
1491}
1492
Derek Lamberti8ddae332019-02-21 16:29:43 +00001493void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001494{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001495 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001496
Derek Lamberti8ddae332019-02-21 16:29:43 +00001497 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001498 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001499 CHECK_VALID_SIZE(inputs.size(), 1);
1500
Derek Lamberti8ddae332019-02-21 16:29:43 +00001501 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001502 CHECK_VALID_SIZE(outputs.size(), 1);
1503 auto outputInfo = ToTensorInfo(outputs[0]);
1504
1505 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001506 auto layerName = GetLayerName(graph, layerIndex);
1507 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001508 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1509
Derek Lamberti8ddae332019-02-21 16:29:43 +00001510 RegisterInputSlots(graph, layerIndex, layer);
1511 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001512}
1513
Derek Lamberti87acb272019-03-27 16:51:31 +00001514void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1515{
1516 CHECK_LAYERS(graph, 0, layerIndex);
1517
1518 auto inputs = GetInputs(graph, layerIndex);
1519 CHECK_VALID_SIZE(inputs.size(), 1);
1520
1521 auto outputs = GetOutputs(graph, layerIndex);
1522 CHECK_VALID_SIZE(outputs.size(), 1);
1523 auto outputInfo = ToTensorInfo(outputs[0]);
1524
1525 auto layerName = GetLayerName(graph, layerIndex);
1526 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1527 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1528
1529 RegisterInputSlots(graph, layerIndex, layer);
1530 RegisterOutputSlots(graph, layerIndex, layer);
1531}
1532
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001533armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001534 const std::vector<uint32_t>& targetDimsIn)
1535{
1536 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1537 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1538
1539 if (stretchDim != targetDimsIn.end())
1540 {
1541 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1542 {
1543 throw ParseException(boost::str(
1544 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1545 }
1546
1547 auto targetNumElements =
1548 boost::numeric_cast<unsigned int>(
1549 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1550
1551 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1552 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1553 }
1554
1555 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1556
1557 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1558 reshapeInfo.SetShape(outputShape);
1559
1560 return reshapeInfo;
1561}
1562
Derek Lamberti8ddae332019-02-21 16:29:43 +00001563void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001564{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001565 CHECK_LAYERS(graph, 0, layerIndex);
1566 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001567
Derek Lamberti8ddae332019-02-21 16:29:43 +00001568 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001569 CHECK_VALID_SIZE(outputs.size(), 1);
1570
1571 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1572 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1573
Derek Lamberti8ddae332019-02-21 16:29:43 +00001574 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001575 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1576
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001577 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001578 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1579
1580 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1581 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1582
1583 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1584 {
1585 std::stringstream ss;
1586 ss << "New shape defined in reshape parameters "
1587 << reshapeOutputTensorShape
1588 << " does not equal output shape "
1589 << actualOutputTensorInfo.GetShape()
1590 << ": "
1591 << CHECK_LOCATION().AsString();
1592 throw ParseException(ss.str());
1593 }
1594
1595 armnn::ReshapeDescriptor reshapeDesc;
1596 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1597
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001598 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001599 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1600 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1601
Derek Lamberti8ddae332019-02-21 16:29:43 +00001602 RegisterInputSlots(graph, layerIndex, layer);
1603 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001604}
1605
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001606void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1607{
1608 CHECK_LAYERS(graph, 0, layerIndex);
1609
1610 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1611 CHECK_VALID_SIZE(inputs.size(), 1);
1612
1613 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1614 CHECK_VALID_SIZE(outputs.size(), 1);
1615
1616 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1617
1618 armnn::ResizeBilinearDescriptor descriptor;
1619 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1620 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1621 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1622
1623 auto layerName = GetLayerName(graph, layerIndex);
1624 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1625
1626 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1627 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1628
1629 RegisterInputSlots(graph, layerIndex, layer);
1630 RegisterOutputSlots(graph, layerIndex, layer);
1631}
1632
Derek Lamberti8ddae332019-02-21 16:29:43 +00001633void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001634{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001635 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001636
Derek Lamberti8ddae332019-02-21 16:29:43 +00001637 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001638 CHECK_VALID_SIZE(inputs.size(), 1);
1639
Derek Lamberti8ddae332019-02-21 16:29:43 +00001640 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001641 CHECK_VALID_SIZE(outputs.size(), 1);
1642
1643 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001644 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001645 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001646
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001647 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1648
1649 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1650 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1651
Derek Lamberti8ddae332019-02-21 16:29:43 +00001652 RegisterInputSlots(graph, layerIndex, layer);
1653 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001654}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001655
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001656void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1657{
1658 CHECK_LAYERS(graph, 0, layerIndex);
1659
1660 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1661 CHECK_VALID_SIZE(inputs.size(), 1);
1662
1663 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1664 CHECK_VALID_SIZE(outputs.size(), 1);
1665
1666 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1667 auto flatBufferPadList = flatBufferDescriptor->padList();
1668 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1669
1670 if (flatBufferPadList->Length() % 2 != 0)
1671 {
1672 throw ParseException(boost::str(
1673 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1674 }
1675
1676 std::vector<std::pair<unsigned int, unsigned int>> padList;
1677 padList.reserve(flatBufferPadList->Length() / 2);
1678 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1679 {
1680 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1681 }
1682
1683 armnn::SpaceToBatchNdDescriptor descriptor;
1684 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1685 descriptor.m_BlockShape =
1686 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1687 descriptor.m_PadList = padList;
1688
1689 auto layerName = GetLayerName(graph, layerIndex);
1690 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1691
1692 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1693 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1694
1695 RegisterInputSlots(graph, layerIndex, layer);
1696 RegisterOutputSlots(graph, layerIndex, layer);
1697}
1698
Nina Drozd57728782019-02-27 10:53:27 +00001699armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1700 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1701 unsigned int layerIndex)
1702{
1703 armnn::NormalizationDescriptor desc;
1704
1705 switch (normalizationDescriptor->normChannelType())
1706 {
1707 case NormalizationAlgorithmChannel_Across:
1708 {
1709 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1710 break;
1711 }
1712 case NormalizationAlgorithmChannel_Within:
1713 {
1714 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1715 break;
1716 }
1717 default:
1718 {
1719 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1720 }
1721 }
1722
1723 switch (normalizationDescriptor->normMethodType())
1724 {
1725 case NormalizationAlgorithmMethod_LocalBrightness:
1726 {
1727 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1728 break;
1729 }
1730 case NormalizationAlgorithmMethod_LocalContrast:
1731 {
1732 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1733 break;
1734 }
1735 default:
1736 {
1737 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1738 }
1739 }
1740
1741 switch (normalizationDescriptor->dataLayout())
1742 {
1743 case DataLayout_NCHW:
1744 {
1745 desc.m_DataLayout = armnn::DataLayout::NCHW;
1746 break;
1747 }
1748 case DataLayout_NHWC:
1749 {
1750 desc.m_DataLayout = armnn::DataLayout::NHWC;
1751 break;
1752 }
1753 default:
1754 {
1755 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1756 }
1757 }
1758
1759 desc.m_Alpha = normalizationDescriptor->alpha();
1760 desc.m_Beta = normalizationDescriptor->beta();
1761 desc.m_K = normalizationDescriptor->k();
1762 desc.m_NormSize = normalizationDescriptor->normSize();
1763
1764 return desc;
1765}
1766
1767void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1768{
1769 CHECK_LAYERS(graph, 0, layerIndex);
1770
1771 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1772
1773 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1774 CHECK_VALID_SIZE(inputs.size(), 1);
1775
1776 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1777 CHECK_VALID_SIZE(outputs.size(), 1);
1778
1779 auto outputInfo = ToTensorInfo(outputs[0]);
1780
1781 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1782 auto layerName = GetLayerName(graph, layerIndex);
1783
1784 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1785 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1786
1787 RegisterInputSlots(graph, layerIndex, layer);
1788 RegisterOutputSlots(graph, layerIndex, layer);
1789}
1790
Sadik Armagan8b42a382019-03-01 14:24:49 +00001791void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1792{
1793 CHECK_LAYERS(graph, 0, layerIndex);
1794 auto inputs = GetInputs(graph, layerIndex);
1795 CHECK_LOCATION();
1796 CHECK_VALID_SIZE(inputs.size(), 1);
1797
1798 auto outputs = GetOutputs(graph, layerIndex);
1799 CHECK_VALID_SIZE(outputs.size(), 1);
1800
1801 auto layerName = GetLayerName(graph, layerIndex);
1802 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1803
1804 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1805 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1806
1807 RegisterInputSlots(graph, layerIndex, layer);
1808 RegisterOutputSlots(graph, layerIndex, layer);
1809}
1810
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001811void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1812{
1813 CHECK_LAYERS(graph, 0, layerIndex);
1814
1815 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1816 CHECK_VALID_SIZE(inputs.size(), 1);
1817
1818 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1819 CHECK_VALID_SIZE(outputs.size(), 1);
1820
1821 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
1822
1823 auto flatBufferBegin = flatBufferDescriptor->begin();
1824 auto flatBufferEnd = flatBufferDescriptor->end();
1825 auto flatBufferStride = flatBufferDescriptor->stride();
1826
1827 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
1828 flatBufferBegin->Length() == flatBufferStride->Length()))
1829 {
1830 throw ParseException(boost::str(
1831 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
1832 }
1833
1834 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
1835 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
1836 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
1837
1838 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
1839 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
1840 descriptor.m_EndMask = flatBufferDescriptor->endMask();
1841 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
1842 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
1843 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
1844 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1845
1846 auto layerName = GetLayerName(graph, layerIndex);
1847 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
1848
1849 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1850 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1851
1852 RegisterInputSlots(graph, layerIndex, layer);
1853 RegisterOutputSlots(graph, layerIndex, layer);
1854}
1855
Conor Kennedyda1f9752019-03-01 14:37:12 +00001856void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1857{
1858 CHECK_LAYERS(graph, 0, layerIndex);
1859 auto inputs = GetInputs(graph, layerIndex);
1860 CHECK_LOCATION();
1861 CHECK_VALID_SIZE(inputs.size(), 2);
1862
1863 auto outputs = GetOutputs(graph, layerIndex);
1864 CHECK_VALID_SIZE(outputs.size(), 1);
1865
1866 auto layerName = GetLayerName(graph, layerIndex);
1867 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1868
1869 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1870 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1871
1872 RegisterInputSlots(graph, layerIndex, layer);
1873 RegisterOutputSlots(graph, layerIndex, layer);
1874}
1875
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001876void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
1877{
1878 CHECK_LAYERS(graph, 0, layerIndex);
1879
1880 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1881 CHECK_VALID_SIZE(inputs.size(), 2);
1882
1883 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1884 CHECK_VALID_SIZE(outputs.size(), 1);
1885
1886 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001887 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
1888
1889 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001890 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1891
1892 RegisterInputSlots(graph, layerIndex, layer);
1893 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001894}
1895
Sadik Armaganac97c8c2019-03-04 17:44:21 +00001896void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
1897{
1898 CHECK_LAYERS(graph, 0, layerIndex);
1899
1900 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1901 CHECK_VALID_SIZE(inputs.size(), 1);
1902
1903 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1904 CHECK_VALID_SIZE(outputs.size(), 1);
1905
1906 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
1907 auto flatBufferAxis = flatBufferDescriptor->axis();
1908 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
1909
1910 armnn::MeanDescriptor descriptor;
1911 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
1912 descriptor.m_KeepDims = flatBufferKeepDims;
1913
1914 auto layerName = GetLayerName(graph, layerIndex);
1915 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
1916
1917 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1918 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1919
1920 RegisterInputSlots(graph, layerIndex, layer);
1921 RegisterOutputSlots(graph, layerIndex, layer);
1922}
1923
Jim Flynn18ce3382019-03-08 11:08:30 +00001924void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
1925{
1926 CHECK_LAYERS(graph, 0, layerIndex);
1927
1928 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1929 CHECK_VALID_SIZE(inputs.size(), 1);
1930
1931 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1932
1933 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
1934 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
1935 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
1936 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
1937 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
1938 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
1939
1940 // Check numViews and numDimensions corresponds to the ones already serialized ...
1941 // numViews == flatBufferViewSizes.size();
1942 // foreach: numDimensions == flatBufferViewSizes[x].size();
1943
1944 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
1945 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
1946 {
1947 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
1948 {
1949 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
1950 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
1951 }
1952 }
1953
1954 auto layerName = GetLayerName(graph, layerIndex);
1955 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
1956
1957 // I could have as many outputs as views ...
1958 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
1959 {
1960 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
1961 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
1962 }
1963
1964 RegisterInputSlots(graph, layerIndex, layer);
1965 RegisterOutputSlots(graph, layerIndex, layer);
1966}
1967
Jim Flynn11af3752019-03-19 17:22:29 +00001968armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
1969{
1970 armnn::LstmDescriptor desc;
1971
1972 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
1973 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
1974 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
1975 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
1976 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
1977 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
1978
1979 return desc;
1980}
1981
1982void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
1983{
1984 CHECK_LAYERS(graph, 0, layerIndex);
1985
1986 auto inputs = GetInputs(graph, layerIndex);
1987 CHECK_VALID_SIZE(inputs.size(), 3);
1988
1989 auto outputs = GetOutputs(graph, layerIndex);
1990 CHECK_VALID_SIZE(outputs.size(), 4);
1991
1992 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
1993 auto layerName = GetLayerName(graph, layerIndex);
1994 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1995 auto flatBufferInputParams = flatBufferLayer->inputParams();
1996
1997 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
1998
1999 armnn::LstmInputParams lstmInputParams;
2000
2001 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2002 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2003 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2004 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2005 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2006 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2007 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2008 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2009 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2010
2011 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2012 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2013 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2014 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2015 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2016 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2017 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2018 lstmInputParams.m_CellBias = &cellBias;
2019 lstmInputParams.m_OutputGateBias = &outputGateBias;
2020
2021 armnn::ConstTensor inputToInputWeights;
2022 armnn::ConstTensor recurrentToInputWeights;
2023 armnn::ConstTensor cellToInputWeights;
2024 armnn::ConstTensor inputGateBias;
2025 if (!lstmDescriptor.m_CifgEnabled)
2026 {
2027 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2028 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2029 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2030 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2031
2032 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2033 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2034 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2035 lstmInputParams.m_InputGateBias = &inputGateBias;
2036 }
2037
2038 armnn::ConstTensor projectionWeights;
2039 armnn::ConstTensor projectionBias;
2040 if (lstmDescriptor.m_ProjectionEnabled)
2041 {
2042 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2043 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2044
2045 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2046 lstmInputParams.m_ProjectionBias = &projectionBias;
2047 }
2048
2049 armnn::ConstTensor cellToForgetWeights;
2050 armnn::ConstTensor cellToOutputWeights;
2051 if (lstmDescriptor.m_PeepholeEnabled)
2052 {
2053 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2054 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2055
2056 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2057 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2058 }
2059
2060 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2061
2062 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2063 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2064
2065 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2066 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2067
2068 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2069 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2070
2071 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2072 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2073
2074 RegisterInputSlots(graph, layerIndex, layer);
2075 RegisterOutputSlots(graph, layerIndex, layer);
2076}
2077
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002078void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2079{
2080 CHECK_LAYERS(graph, 0, layerIndex);
2081
2082 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2083 CHECK_VALID_SIZE(inputs.size(), 1);
2084
2085 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2086 CHECK_VALID_SIZE(outputs.size(), 1);
2087
2088 const std::string layerName = GetLayerName(graph, layerIndex);
2089 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2090
2091 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2092 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2093
2094 RegisterInputSlots(graph, layerIndex, layer);
2095 RegisterOutputSlots(graph, layerIndex, layer);
2096}
2097
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002098void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2099{
2100 CHECK_LAYERS(graph, 0, layerIndex);
2101
2102 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2103 CHECK_VALID_SIZE(inputs.size(), 2);
2104
2105 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2106 CHECK_VALID_SIZE(outputs.size(), 1);
2107
2108 const std::string layerName = GetLayerName(graph, layerIndex);
2109 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2110
2111 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2112 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2113
2114 RegisterInputSlots(graph, layerIndex, layer);
2115 RegisterOutputSlots(graph, layerIndex, layer);
2116}
2117
Sadik Armaganeff363d2019-04-05 15:25:46 +01002118void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2119{
2120 CHECK_LAYERS(graph, 0, layerIndex);
2121 auto inputs = GetInputs(graph, layerIndex);
2122 CHECK_LOCATION();
2123 CHECK_VALID_SIZE(inputs.size(), 2);
2124
2125 auto outputs = GetOutputs(graph, layerIndex);
2126 CHECK_VALID_SIZE(outputs.size(), 2);
2127
2128 auto layerName = GetLayerName(graph, layerIndex);
2129 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2130
2131 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2132 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2133
2134 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2135 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2136
2137 RegisterInputSlots(graph, layerIndex, layer);
2138 RegisterOutputSlots(graph, layerIndex, layer);
2139}
2140
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002141} // namespace armnnDeserializer