blob: d853a0826402f8756f040a25f436aa70d817aa0f [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
Mike Kellyaf484012019-02-20 16:53:11 +0000188 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000190 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000191 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Jim Flynne242f2d2019-05-22 14:24:13 +0100192 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000193 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
195 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000196 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000197 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000198 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000199 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000200 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000201 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000202 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000203 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000204 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Jim Flynn11af3752019-03-19 17:22:29 +0000205 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000206 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000207 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
208 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100209 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100210 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000211 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000212 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000213 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000214 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000215 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100216 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000217 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000218 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000219 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100220 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000221 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000222 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000223 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100224 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000225 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000226 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000227 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100228 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100229 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Kevin May43a799c2019-02-08 16:31:42 +0000230}
231
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000232Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000233{
234 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
235
236 switch(layerType)
237 {
Mike Kellyaf484012019-02-20 16:53:11 +0000238 case Layer::Layer_ActivationLayer:
239 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000240 case Layer::Layer_AdditionLayer:
241 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000242 case Layer::Layer_BatchToSpaceNdLayer:
243 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000244 case Layer::Layer_BatchNormalizationLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100246 case Layer::Layer_ConcatLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000248 case Layer::Layer_ConstantLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000250 case Layer::Layer_Convolution2dLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000252 case Layer::Layer_DepthwiseConvolution2dLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000254 case Layer::Layer_DequantizeLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000256 case Layer::Layer_DetectionPostProcessLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000258 case Layer::Layer_DivisionLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000260 case Layer::Layer_EqualLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000262 case Layer::Layer_FullyConnectedLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000264 case Layer::Layer_FloorLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000266 case Layer::Layer_GatherLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000268 case Layer::Layer_GreaterLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000270 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000271 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000272 case Layer::Layer_L2NormalizationLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000274 case Layer::Layer_LstmLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000276 case Layer::Layer_MeanLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000278 case Layer::Layer_MinimumLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000280 case Layer::Layer_MaximumLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100282 case Layer::Layer_MergeLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000284 case Layer::Layer_MergerLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000286 case Layer::Layer_MultiplicationLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000288 case Layer::Layer_NormalizationLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000290 case Layer::Layer_OutputLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000292 case Layer::Layer_PadLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000294 case Layer::Layer_PermuteLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000296 case Layer::Layer_Pooling2dLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100298 case Layer::Layer_PreluLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000300 case Layer::Layer_QuantizeLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000302 case Layer::Layer_ReshapeLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000304 case Layer::Layer_ResizeBilinearLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100306 case Layer::Layer_ResizeLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000308 case Layer::Layer_RsqrtLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000310 case Layer::Layer_SoftmaxLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000312 case Layer::Layer_SpaceToBatchNdLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100314 case Layer::Layer_SpaceToDepthLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000316 case Layer::Layer_SplitterLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000318 case Layer::Layer_StridedSliceLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000320 case Layer::Layer_SubtractionLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100322 case Layer::Layer_SwitchLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100324 case Layer::Layer_TransposeConvolution2dLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000326 case Layer::Layer_NONE:
327 default:
328 throw ParseException(boost::str(
329 boost::format("Layer must have a type %1%") %
330 Layer::Layer_NONE));
331 }
332}
333
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000334std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
335{
336 auto layer = GetBaseLayer(graph, index);
337 assert(layer);
338 return layer->layerName()->str();
339}
340
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000341int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000342{
343 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
344
345 if (layerType == Layer::Layer_InputLayer)
346 {
347 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
348 }
349 else if ( layerType == Layer::Layer_OutputLayer )
350 {
351 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
352 }
353 return 0;
354}
355
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000356armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000357{
358 switch (dataLayout)
359 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000360 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000361 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000362 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000363 default:
364 return armnn::DataLayout::NCHW;
365 }
366}
367
Mike Kellyaf484012019-02-20 16:53:11 +0000368armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
369{
370 switch (function)
371 {
372 case armnnSerializer::ActivationFunction_Sigmoid:
373 return armnn::ActivationFunction::Sigmoid;
374 case armnnSerializer::ActivationFunction_TanH:
375 return armnn::ActivationFunction::TanH;
376 case armnnSerializer::ActivationFunction_Linear:
377 return armnn::ActivationFunction::Linear;
378 case armnnSerializer::ActivationFunction_ReLu:
379 return armnn::ActivationFunction::ReLu;
380 case armnnSerializer::ActivationFunction_BoundedReLu:
381 return armnn::ActivationFunction::BoundedReLu;
382 case armnnSerializer::ActivationFunction_LeakyReLu:
383 return armnn::ActivationFunction::LeakyReLu;
384 case armnnSerializer::ActivationFunction_Abs:
385 return armnn::ActivationFunction::Abs;
386 case armnnSerializer::ActivationFunction_Sqrt:
387 return armnn::ActivationFunction::Sqrt;
388 case armnnSerializer::ActivationFunction_Square:
389 return armnn::ActivationFunction::Square;
390 default:
391 return armnn::ActivationFunction::Sigmoid;
392 }
393}
394
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100395armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
396{
397 switch (method)
398 {
399 case armnnSerializer::ResizeMethod_NearestNeighbor:
400 return armnn::ResizeMethod::NearestNeighbor;
401 case armnnSerializer::ResizeMethod_Bilinear:
402 return armnn::ResizeMethod::NearestNeighbor;
403 default:
404 return armnn::ResizeMethod::NearestNeighbor;
405 }
406}
407
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000408armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000409{
410 armnn::DataType type;
411 CHECK_TENSOR_PTR(tensorPtr);
412
413 switch (tensorPtr->dataType())
414 {
415 case DataType_QuantisedAsymm8:
416 type = armnn::DataType::QuantisedAsymm8;
417 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000418 case DataType_QuantisedSymm16:
419 type = armnn::DataType::QuantisedSymm16;
420 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000421 case DataType_Signed32:
422 type = armnn::DataType::Signed32;
423 break;
Kevin May43a799c2019-02-08 16:31:42 +0000424 case DataType_Float32:
425 type = armnn::DataType::Float32;
426 break;
427 case DataType_Float16:
428 type = armnn::DataType::Float16;
429 break;
430 case DataType_Boolean:
431 type = armnn::DataType::Boolean;
432 break;
433 default:
434 {
435 CheckLocation location = CHECK_LOCATION();
436 throw ParseException(
437 boost::str(
438 boost::format("Unsupported data type %1% = %2%. %3%") %
439 tensorPtr->dataType() %
440 EnumNameDataType(tensorPtr->dataType()) %
441 location.AsString()));
442 }
443 }
444 float quantizationScale = tensorPtr->quantizationScale();
445 int32_t quantizationOffset = tensorPtr->quantizationOffset();
446
447 auto dimensions = tensorPtr->dimensions();
448 unsigned int size = dimensions->size();
449 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
450
451 // two statements (on purpose) for easier debugging:
452 armnn::TensorInfo result(size,
453 outputDims.data(),
454 type,
455 quantizationScale,
456 quantizationOffset);
457 return result;
458}
459
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000460armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000461{
462 CHECK_CONST_TENSOR_PTR(constTensorPtr);
463 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
464
465 switch (constTensorPtr->data_type())
466 {
467 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000468 {
469 auto byteData = constTensorPtr->data_as_ByteData()->data();
470 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
471 return armnn::ConstTensor(tensorInfo, byteData->data());
472 }
Mike Kellya0766c32019-02-19 17:22:07 +0000473 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000474 {
475 auto shortData = constTensorPtr->data_as_ShortData()->data();
476 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
477 return armnn::ConstTensor(tensorInfo, shortData->data());
478 }
Mike Kellya0766c32019-02-19 17:22:07 +0000479 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000480 {
481 auto intData = constTensorPtr->data_as_IntData()->data();
482 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
483 return armnn::ConstTensor(tensorInfo, intData->data());
484 }
Mike Kellya0766c32019-02-19 17:22:07 +0000485 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000486 {
487 auto longData = constTensorPtr->data_as_LongData()->data();
488 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
489 return armnn::ConstTensor(tensorInfo, longData->data());
490 }
Mike Kellya0766c32019-02-19 17:22:07 +0000491 default:
492 {
493 CheckLocation location = CHECK_LOCATION();
494 throw ParseException(
495 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
496 constTensorPtr->data_type() %
497 EnumNameConstTensorData(constTensorPtr->data_type()) %
498 location.AsString()));
499 }
500 }
501}
502
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000503Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000504 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000505{
506 CHECK_LAYERS(graphPtr, 0, layerIndex);
507 auto layer = GetBaseLayer(graphPtr, layerIndex);
508 const auto& numInputs = layer->inputSlots()->size();
509
510 TensorRawPtrVector result(numInputs);
511
512 for (unsigned int i=0; i<numInputs; ++i)
513 {
514 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
515 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
516 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
517 }
518 return result;
519}
520
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000521Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000522 unsigned int layerIndex)
523{
524 CHECK_LAYERS(graphPtr, 0, layerIndex);
525 auto layer = GetBaseLayer(graphPtr, layerIndex);
526 const auto& numOutputs = layer->outputSlots()->size();
527
528 TensorRawPtrVector result(numOutputs);
529
530 for (unsigned int i=0; i<numOutputs; ++i)
531 {
532 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
533 }
534 return result;
535}
536
Derek Lamberti8ddae332019-02-21 16:29:43 +0000537void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000538{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000539 CHECK_LAYERS(graph, 0, layerIndex);
540 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000541 throw ParseException(
542 boost::str(
543 boost::format("Layer not supported. "
544 "layerIndex: %1% "
545 "layerName: %2% / %3%") %
546 layerIndex %
547 layerName %
548 CHECK_LOCATION().AsString()));
549}
550
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000551void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000552{
553 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000554 m_InputBindings.clear();
555 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000556}
557
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000558IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000559{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000560 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000561}
562
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000563IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000564{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000565 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000566}
567
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000568void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000569{
570 delete parser;
571}
572
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000573INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000574{
575 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000576 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
577 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000578}
579
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000580armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000581{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000582 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000583 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
584 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
585 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000586}
587
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000588Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000589{
590 if (binaryContent == nullptr)
591 {
592 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
593 CHECK_LOCATION().AsString()));
594 }
595 flatbuffers::Verifier verifier(binaryContent, len);
596 if (verifier.VerifyBuffer<SerializedGraph>() == false)
597 {
598 throw ParseException(
599 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
600 "flatbuffers format. size:%1% %2%") %
601 len %
602 CHECK_LOCATION().AsString()));
603 }
604 return GetSerializedGraph(binaryContent);
605}
606
Derek Lamberti8ddae332019-02-21 16:29:43 +0000607INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000608{
609 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000610 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000611 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000612 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000613 {
614 if (layer->layer_type() != Layer_InputLayer &&
615 layer->layer_type() != Layer_OutputLayer)
616 {
617 // lookup and call the parser function
618 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000619 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000620 }
621 ++layerIndex;
622 }
623
Derek Lamberti8ddae332019-02-21 16:29:43 +0000624 SetupInputLayers(graph);
625 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000626
627 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100628 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000629 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100630 Connections& connections = graphIt.second;
631 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000632 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100633 const unsigned int outputSlotIndex = outputIt.first;
634 IOutputSlot* outputSlot = outputIt.second;
635 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000636 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100637 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000638 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100639 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000640 }
Kevin May43a799c2019-02-08 16:31:42 +0000641 }
642 }
643 }
644
645 return std::move(m_Network);
646}
647
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000648BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000649 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000650{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000651 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000652 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000653 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000654 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000655 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000656 }
657 }
658 throw ParseException(
659 boost::str(
660 boost::format("No input binding found for layer:%1% / %2%") %
661 name %
662 CHECK_LOCATION().AsString()));
663}
664
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000665BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000666 const std::string& name) const
667{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000668 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000669 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000670 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000671 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000672 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000673 }
674 }
675 throw ParseException(
676 boost::str(
677 boost::format("No output binding found for layer:%1% / %2%") %
678 name %
679 CHECK_LOCATION().AsString()));
680}
681
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100682unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
683{
684 for (unsigned int i = 0; i < graph->layers()->size(); i++)
685 {
686 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
687 if (layer->index() == targetIndex)
688 {
689 return i;
690 }
691 }
692 throw ParseException("Layer with given index not found");
693}
694
Derek Lamberti8ddae332019-02-21 16:29:43 +0000695void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000696{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000697 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100698 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000699 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100700 m_InputBindings.reserve(numInputs);
701
702 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000703 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100704 const unsigned int inputId = graph->inputIds()->Get(i);
705 const unsigned int inputLayerIndex = GetLayerIndexInVector(graph, inputId);
706 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000707
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100708 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
709 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
710 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000711
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100712 IConnectableLayer* inputLayer =
713 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000714
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100715 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
716 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
717 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
718
Derek Lamberti8ddae332019-02-21 16:29:43 +0000719 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100720 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000721 }
722}
723
Derek Lamberti8ddae332019-02-21 16:29:43 +0000724void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000725{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000726 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100727 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000728 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100729 m_OutputBindings.reserve(numOutputs);
730
731 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000732 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100733 const unsigned int outputId = graph->outputIds()->Get(i);
734 const unsigned int outputLayerIndex = GetLayerIndexInVector(graph, outputId);
735 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000736
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100737 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
738 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
739 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000740
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100741 IConnectableLayer* outputLayer =
742 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000743
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100744 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
745
746 unsigned int sourceLayerIndex =
747 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
748 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
749 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
750
Derek Lamberti8ddae332019-02-21 16:29:43 +0000751 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100752 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000753 }
754}
755
Derek Lamberti8ddae332019-02-21 16:29:43 +0000756void Deserializer::RegisterOutputSlots(GraphPtr graph,
757 uint32_t layerIndex,
758 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000759{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000760 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000761 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100762 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
763 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000764 {
765 throw ParseException(
766 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
767 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100768 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000769 layer->GetNumOutputSlots() %
770 layerIndex %
771 CHECK_LOCATION().AsString()));
772 }
773
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100774 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000775 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100776 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
777 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
778 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
779 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000780 }
781}
782
Derek Lamberti8ddae332019-02-21 16:29:43 +0000783void Deserializer::RegisterInputSlots(GraphPtr graph,
784 uint32_t layerIndex,
785 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000786{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000787 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000788 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100789 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
790 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000791 {
792 throw ParseException(
793 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
794 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100795 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000796 layer->GetNumInputSlots() %
797 layerIndex %
798 CHECK_LOCATION().AsString()));
799 }
800
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100801 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000802 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100803 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
804 auto fbConnection = fbInputSlot->connection();
805 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
806 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000807 }
808}
809
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000810void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
811 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100812 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000813{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100814 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000815 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100816 m_GraphConnections[sourceLayerIndex] = Connections();
817 }
818
819 Connections& connections = m_GraphConnections[sourceLayerIndex];
820 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
821 {
822 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000823 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000824 else
825 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100826 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000827 }
828}
Kevin May43a799c2019-02-08 16:31:42 +0000829
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000830void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100831 uint32_t outputSlotIndex,
832 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000833{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100834 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
835 {
836 m_GraphConnections[sourceLayerIndex] = Connections();
837 }
838
839 Connections& connections = m_GraphConnections[sourceLayerIndex];
840 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
841 {
842 throw ParseException("Same output slot index processed twice");
843 }
844
845 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +0000846}
847
Derek Lamberti8ddae332019-02-21 16:29:43 +0000848void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000849{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000850 CHECK_LAYERS(graph, 0, layerIndex);
851 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000852 CHECK_LOCATION();
853 CHECK_VALID_SIZE(inputs.size(), 1);
854
Derek Lamberti8ddae332019-02-21 16:29:43 +0000855 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000856 CHECK_VALID_SIZE(outputs.size(), 1);
857
Derek Lamberti8ddae332019-02-21 16:29:43 +0000858 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000859 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000860 auto serializerDescriptor = serializerLayer->descriptor();
861
862 armnn::ActivationDescriptor descriptor;
863 descriptor.m_Function = ToActivationFunction(serializerDescriptor->function());
864 descriptor.m_A = serializerDescriptor->a();
865 descriptor.m_B = serializerDescriptor->b();
866
867 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
868 layerName.c_str());
869 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
870 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
871
Derek Lamberti8ddae332019-02-21 16:29:43 +0000872 RegisterInputSlots(graph, layerIndex, layer);
873 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000874}
875
Derek Lamberti8ddae332019-02-21 16:29:43 +0000876void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000877{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000878 CHECK_LAYERS(graph, 0, layerIndex);
879 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000880 CHECK_LOCATION();
881 CHECK_VALID_SIZE(inputs.size(), 2);
882
Derek Lamberti8ddae332019-02-21 16:29:43 +0000883 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000884 CHECK_VALID_SIZE(outputs.size(), 1);
885
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000886 auto layerName = GetLayerName(graph, layerIndex);
887 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000888
889 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
890 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
891
Derek Lamberti8ddae332019-02-21 16:29:43 +0000892 RegisterInputSlots(graph, layerIndex, layer);
893 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000894}
895
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000896void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
897{
898 CHECK_LAYERS(graph, 0, layerIndex);
899
900 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
901 CHECK_VALID_SIZE(inputs.size(), 1);
902
903 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
904 CHECK_VALID_SIZE(outputs.size(), 1);
905
906 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
907 auto flatBufferCrops = flatBufferDescriptor->crops();
908 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
909
910 if (flatBufferCrops->Length() % 2 != 0)
911 {
912 throw ParseException(boost::str(
913 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
914 }
915
916 std::vector<std::pair<unsigned int, unsigned int>> crops;
917 crops.reserve(flatBufferCrops->Length() / 2);
918 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
919 {
920 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
921 }
922
923 armnn::BatchToSpaceNdDescriptor descriptor;
924 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
925 descriptor.m_BlockShape =
926 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
927 descriptor.m_Crops = crops;
928
929 auto layerName = GetLayerName(graph, layerIndex);
930 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
931
932 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
933 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
934
935 RegisterInputSlots(graph, layerIndex, layer);
936 RegisterOutputSlots(graph, layerIndex, layer);
937}
938
ruoyan018e7fa232019-02-28 15:09:07 +0000939void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
940{
941 CHECK_LAYERS(graph, 0, layerIndex);
942
943 auto inputs = GetInputs(graph, layerIndex);
944 CHECK_VALID_SIZE(inputs.size(), 1);
945
946 auto outputs = GetOutputs(graph, layerIndex);
947 CHECK_VALID_SIZE(outputs.size(), 1);
948 auto outputInfo = ToTensorInfo(outputs[0]);
949
ruoyan015c7ab052019-03-04 14:48:02 +0000950 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +0000951
952 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
953 auto serializerDescriptor = serializerLayer->descriptor();
954
955 armnn::BatchNormalizationDescriptor descriptor;
956 descriptor.m_Eps = serializerDescriptor->eps();
957 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
958
959 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
960 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
961 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
962 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
963
964 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
965 mean,
966 variance,
967 beta,
968 gamma,
969 layerName.c_str());
970 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
971
972 RegisterInputSlots(graph, layerIndex, layer);
973 RegisterOutputSlots(graph, layerIndex, layer);
974}
975
Conor Kennedy76277882019-02-26 08:29:54 +0000976void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
977{
978 CHECK_LAYERS(graph, 0, layerIndex);
979 CHECK_LOCATION();
980
981 auto outputs = GetOutputs(graph, layerIndex);
982 CHECK_VALID_SIZE(outputs.size(), 1);
983
984 auto layerName = GetLayerName(graph, layerIndex);
985
986 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
987 auto serializerInput = serializerLayer->input();
988
989 armnn::ConstTensor input = ToConstTensor(serializerInput);
990
991 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
992
993 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
994 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
995
996 RegisterOutputSlots(graph, layerIndex, layer);
997}
998
Derek Lamberti8ddae332019-02-21 16:29:43 +0000999void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001000{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001001 CHECK_LAYERS(graph, 0, layerIndex);
1002 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001003 CHECK_LOCATION();
1004 CHECK_VALID_SIZE(inputs.size(), 1);
1005
Derek Lamberti8ddae332019-02-21 16:29:43 +00001006 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001007 CHECK_VALID_SIZE(outputs.size(), 1);
1008
Derek Lamberti8ddae332019-02-21 16:29:43 +00001009 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001010 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001011 auto serializerDescriptor = serializerLayer->descriptor();
1012
1013 armnn::Convolution2dDescriptor descriptor;
1014 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1015 descriptor.m_PadRight = serializerDescriptor->padRight();
1016 descriptor.m_PadTop = serializerDescriptor->padTop();
1017 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1018 descriptor.m_StrideX = serializerDescriptor->strideX();
1019 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001020 descriptor.m_DilationX = serializerDescriptor->dilationX();
1021 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001022 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1023 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1024
1025 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1026 armnn::ConstTensor biases;
1027
Matteo Martincighfc598e12019-05-14 10:36:13 +01001028 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001029 if (descriptor.m_BiasEnabled)
1030 {
1031 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001032 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001033 }
1034 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1035 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001036 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001037 layerName.c_str());
1038 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1039 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1040
Derek Lamberti8ddae332019-02-21 16:29:43 +00001041 RegisterInputSlots(graph, layerIndex, layer);
1042 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001043}
1044
Derek Lamberti8ddae332019-02-21 16:29:43 +00001045void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001046{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001047 CHECK_LAYERS(graph, 0, layerIndex);
1048 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001049 CHECK_LOCATION();
1050 CHECK_VALID_SIZE(inputs.size(), 1);
1051
Derek Lamberti8ddae332019-02-21 16:29:43 +00001052 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001053 CHECK_VALID_SIZE(outputs.size(), 1);
1054
Derek Lamberti8ddae332019-02-21 16:29:43 +00001055 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001056 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001057 auto serializerDescriptor = serializerLayer->descriptor();
1058
1059 armnn::DepthwiseConvolution2dDescriptor descriptor;
1060 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1061 descriptor.m_PadRight = serializerDescriptor->padRight();
1062 descriptor.m_PadTop = serializerDescriptor->padTop();
1063 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1064 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001065 descriptor.m_StrideY = serializerDescriptor->strideY();
1066 descriptor.m_DilationX = serializerDescriptor->dilationX();
1067 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001068 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1069 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1070
1071 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1072 armnn::ConstTensor biases;
1073
Matteo Martincighfc598e12019-05-14 10:36:13 +01001074 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001075 if (descriptor.m_BiasEnabled)
1076 {
1077 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001078 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001079 }
1080 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1081 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001082 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001083 layerName.c_str());
1084
1085 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1086 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1087
Derek Lamberti8ddae332019-02-21 16:29:43 +00001088 RegisterInputSlots(graph, layerIndex, layer);
1089 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001090}
1091
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001092void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1093{
1094 CHECK_LAYERS(graph, 0, layerIndex);
1095 auto inputs = GetInputs(graph, layerIndex);
1096 CHECK_LOCATION();
1097 CHECK_VALID_SIZE(inputs.size(), 2);
1098
1099 auto outputs = GetOutputs(graph, layerIndex);
1100 CHECK_VALID_SIZE(outputs.size(), 4);
1101
1102 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1103 auto layerName = GetLayerName(graph, layerIndex);
1104 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1105
1106 armnn::DetectionPostProcessDescriptor descriptor;
1107 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1108 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1109 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1110 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1111 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1112 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1113 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1114 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1115 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1116 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1117 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1118
1119 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1120
1121 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1122 anchors,
1123 layerName.c_str());
1124
1125 for (unsigned int i = 0; i < 4; i++)
1126 {
1127 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1128 }
1129
1130 RegisterInputSlots(graph, layerIndex, layer);
1131 RegisterOutputSlots(graph, layerIndex, layer);
1132}
1133
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001134void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1135{
1136 CHECK_LAYERS(graph, 0, layerIndex);
1137 auto inputs = GetInputs(graph, layerIndex);
1138 CHECK_LOCATION();
1139 CHECK_VALID_SIZE(inputs.size(), 2);
1140
1141 auto outputs = GetOutputs(graph, layerIndex);
1142 CHECK_VALID_SIZE(outputs.size(), 1);
1143
1144 auto layerName = GetLayerName(graph, layerIndex);
1145 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1146
1147 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1148 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1149
1150 RegisterInputSlots(graph, layerIndex, layer);
1151 RegisterOutputSlots(graph, layerIndex, layer);
1152}
1153
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001154void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1155{
1156 CHECK_LAYERS(graph, 0, layerIndex);
1157 auto inputs = GetInputs(graph, layerIndex);
1158 CHECK_LOCATION();
1159 CHECK_VALID_SIZE(inputs.size(), 2);
1160
1161 auto outputs = GetOutputs(graph, layerIndex);
1162 CHECK_VALID_SIZE(outputs.size(), 1);
1163
1164 auto layerName = GetLayerName(graph, layerIndex);
1165 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1166
1167 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1168 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1169
1170 RegisterInputSlots(graph, layerIndex, layer);
1171 RegisterOutputSlots(graph, layerIndex, layer);
1172}
1173
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001174void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1175{
1176 CHECK_LAYERS(graph, 0, layerIndex);
1177 auto inputs = GetInputs(graph, layerIndex);
1178 CHECK_LOCATION();
1179 CHECK_VALID_SIZE(inputs.size(), 2);
1180
1181 auto outputs = GetOutputs(graph, layerIndex);
1182 CHECK_VALID_SIZE(outputs.size(), 1);
1183
1184 auto layerName = GetLayerName(graph, layerIndex);
1185 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1186
1187 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1188 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1189
1190 RegisterInputSlots(graph, layerIndex, layer);
1191 RegisterOutputSlots(graph, layerIndex, layer);
1192}
1193
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001194void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1195{
1196 CHECK_LAYERS(graph, 0, layerIndex);
1197
1198 auto inputs = GetInputs(graph, layerIndex);
1199 CHECK_VALID_SIZE(inputs.size(), 1);
1200
1201 auto outputs = GetOutputs(graph, layerIndex);
1202 CHECK_VALID_SIZE(outputs.size(), 1);
1203 auto outputInfo = ToTensorInfo(outputs[0]);
1204
1205 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1206 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1207
1208 auto layerName = GetLayerName(graph, layerIndex);
1209 armnn::L2NormalizationDescriptor descriptor;
1210 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001211 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001212
1213 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1214 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1215
1216 RegisterInputSlots(graph, layerIndex, layer);
1217 RegisterOutputSlots(graph, layerIndex, layer);
1218}
1219
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001220void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1221{
1222 CHECK_LAYERS(graph, 0, layerIndex);
1223 auto inputs = GetInputs(graph, layerIndex);
1224 CHECK_LOCATION();
1225 CHECK_VALID_SIZE(inputs.size(), 2);
1226
1227 auto outputs = GetOutputs(graph, layerIndex);
1228 CHECK_VALID_SIZE(outputs.size(), 1);
1229
1230 auto layerName = GetLayerName(graph, layerIndex);
1231 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1232
1233 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1234 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1235
1236 RegisterInputSlots(graph, layerIndex, layer);
1237 RegisterOutputSlots(graph, layerIndex, layer);
1238}
1239
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001240void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1241{
1242 CHECK_LAYERS(graph, 0, layerIndex);
1243 auto inputs = GetInputs(graph, layerIndex);
1244 CHECK_LOCATION();
1245 CHECK_VALID_SIZE(inputs.size(), 2);
1246
1247 auto outputs = GetOutputs(graph, layerIndex);
1248 CHECK_VALID_SIZE(outputs.size(), 1);
1249
1250 auto layerName = GetLayerName(graph, layerIndex);
1251 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1252
1253 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1254 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1255
1256 RegisterInputSlots(graph, layerIndex, layer);
1257 RegisterOutputSlots(graph, layerIndex, layer);
1258}
1259
Jim Flynne242f2d2019-05-22 14:24:13 +01001260const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1261 unsigned int layerIndex)
1262{
1263 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1264
1265 switch (layerType)
1266 {
1267 case Layer::Layer_ConcatLayer:
1268 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1269 case Layer::Layer_MergerLayer:
1270 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1271 default:
1272 throw armnn::Exception("unknown layer type, should be concat or merger");
1273 }
1274}
1275
Jim Flynn906f9462019-05-10 13:55:21 +01001276void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001277{
1278 CHECK_LAYERS(graph, 0, layerIndex);
1279 CHECK_LOCATION();
1280
1281 auto outputs = GetOutputs(graph, layerIndex);
1282 CHECK_VALID_SIZE(outputs.size(), 1);
1283
Jim Flynnac25a1b2019-02-28 10:40:49 +00001284 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001285 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1286 unsigned int numViews = originsDescriptor->numViews();
1287 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001288
1289 // can now check the number of inputs == number of views
1290 auto inputs = GetInputs(graph, layerIndex);
1291 CHECK_VALID_SIZE(inputs.size(), numViews);
1292
1293 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001294 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001295 for (unsigned int v = 0; v < numViews; ++v)
1296 {
1297 auto originPtr = originsPtr->Get(v);
1298 for (unsigned int d = 0; d < numDimensions; ++d)
1299 {
1300 uint32_t value = originPtr->data()->Get(d);
1301 descriptor.SetViewOriginCoord(v, d, value);
1302 }
1303 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001304 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001305
Jim Flynn906f9462019-05-10 13:55:21 +01001306 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001307 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1308 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1309
1310 RegisterInputSlots(graph, layerIndex, layer);
1311 RegisterOutputSlots(graph, layerIndex, layer);
1312}
1313
Derek Lamberti8ddae332019-02-21 16:29:43 +00001314void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001315{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001316 CHECK_LAYERS(graph, 0, layerIndex);
1317 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001318 CHECK_LOCATION();
1319 CHECK_VALID_SIZE(inputs.size(), 2);
1320
Derek Lamberti8ddae332019-02-21 16:29:43 +00001321 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001322 CHECK_VALID_SIZE(outputs.size(), 1);
1323
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001324 auto layerName = GetLayerName(graph, layerIndex);
1325 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001326
1327 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1328 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1329
Derek Lamberti8ddae332019-02-21 16:29:43 +00001330 RegisterInputSlots(graph, layerIndex, layer);
1331 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001332}
1333
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001334void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1335{
1336 CHECK_LAYERS(graph, 0, layerIndex);
1337 CHECK_LOCATION();
1338
1339 auto inputs = GetInputs(graph, layerIndex);
1340 CHECK_VALID_SIZE(inputs.size(), 1);
1341
1342 auto outputs = GetOutputs(graph, layerIndex);
1343 CHECK_VALID_SIZE(outputs.size(), 1);
1344
1345 auto layerName = GetLayerName(graph, layerIndex);
1346
1347 armnn::IConnectableLayer* layer;
1348
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001349 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001350
1351 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1352 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1353
1354 RegisterInputSlots(graph, layerIndex, layer);
1355 RegisterOutputSlots(graph, layerIndex, layer);
1356}
1357
Derek Lamberti8ddae332019-02-21 16:29:43 +00001358void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001359{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001360 CHECK_LAYERS(graph, 0, layerIndex);
1361 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001362 CHECK_LOCATION();
1363 CHECK_VALID_SIZE(inputs.size(), 1);
1364
Derek Lamberti8ddae332019-02-21 16:29:43 +00001365 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001366 CHECK_VALID_SIZE(outputs.size(), 1);
1367
Derek Lamberti8ddae332019-02-21 16:29:43 +00001368 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001369 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001370 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1371
1372 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1373 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1374 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1375
1376 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1377
1378 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001379 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001380 if (flatBufferDescriptor->biasEnabled())
1381 {
1382 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001383 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001384 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001385 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1386 weightsTensor,
1387 optionalBiases,
1388 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001389
1390 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1391 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1392
Derek Lamberti8ddae332019-02-21 16:29:43 +00001393 RegisterInputSlots(graph, layerIndex, layer);
1394 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001395}
1396
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001397void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1398{
1399 CHECK_LAYERS(graph, 0, layerIndex);
1400
1401 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1402 CHECK_VALID_SIZE(inputs.size(), 1);
1403
1404 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1405 CHECK_VALID_SIZE(outputs.size(), 1);
1406
1407 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1408 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001409 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001410
1411 if (flatBufferPadList->Length() % 2 != 0)
1412 {
1413 throw ParseException(boost::str(
1414 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1415 }
1416
1417 std::vector<std::pair<unsigned int, unsigned int>> padList;
1418 padList.reserve(flatBufferPadList->Length() / 2);
1419 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1420 {
1421 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1422 }
1423
David Monahan34757812019-06-19 11:47:21 +01001424 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001425
1426 auto layerName = GetLayerName(graph, layerIndex);
1427 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1428
1429 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1430 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1431
1432 RegisterInputSlots(graph, layerIndex, layer);
1433 RegisterOutputSlots(graph, layerIndex, layer);
1434}
1435
Derek Lamberti8ddae332019-02-21 16:29:43 +00001436void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001437{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001438 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001439
1440 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001441 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001442
Derek Lamberti8ddae332019-02-21 16:29:43 +00001443 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001444 CHECK_VALID_SIZE(inputs.size(), 1);
1445
Derek Lamberti8ddae332019-02-21 16:29:43 +00001446 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001447 CHECK_VALID_SIZE(outputs.size(), 1);
1448 auto outputInfo = ToTensorInfo(outputs[0]);
1449
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001450 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001451 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1452
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001453 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001454 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1455
Derek Lamberti8ddae332019-02-21 16:29:43 +00001456 RegisterInputSlots(graph, layerIndex, layer);
1457 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001458}
1459
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001460armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001461 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001462{
1463 armnn::Pooling2dDescriptor desc;
1464
1465 switch (pooling2dDesc->poolType())
1466 {
1467 case PoolingAlgorithm_Average:
1468 {
1469 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001470 break;
1471 }
1472 case PoolingAlgorithm_Max:
1473 {
1474 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001475 break;
1476 }
1477 default:
1478 {
1479 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1480 }
1481 }
1482
1483 switch (pooling2dDesc->outputShapeRounding())
1484 {
1485 case OutputShapeRounding_Floor:
1486 {
1487 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1488 break;
1489 }
1490 case OutputShapeRounding_Ceiling:
1491 {
1492 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1493 break;
1494 }
1495 default:
1496 {
1497 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1498 }
1499 }
1500
1501 switch (pooling2dDesc->paddingMethod())
1502 {
1503 case PaddingMethod_Exclude:
1504 {
1505 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1506 break;
1507 }
1508 case PaddingMethod_IgnoreValue:
1509 {
1510 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1511 break;
1512 }
1513 default:
1514 {
1515 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1516 }
1517 }
1518
1519 switch (pooling2dDesc->dataLayout())
1520 {
1521 case DataLayout_NCHW:
1522 {
1523 desc.m_DataLayout = armnn::DataLayout::NCHW;
1524 break;
1525 }
1526 case DataLayout_NHWC:
1527 {
1528 desc.m_DataLayout = armnn::DataLayout::NHWC;
1529 break;
1530 }
1531 default:
1532 {
1533 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1534 }
1535 }
1536
1537 desc.m_PadRight = pooling2dDesc->padRight();
1538 desc.m_PadLeft = pooling2dDesc->padLeft();
1539 desc.m_PadBottom = pooling2dDesc->padBottom();
1540 desc.m_PadTop = pooling2dDesc->padTop();
1541 desc.m_StrideX = pooling2dDesc->strideX();
1542 desc.m_StrideY = pooling2dDesc->strideY();
1543 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1544 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1545
1546 return desc;
1547}
1548
Derek Lamberti8ddae332019-02-21 16:29:43 +00001549void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001550{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001551 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001552
Derek Lamberti8ddae332019-02-21 16:29:43 +00001553 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001554 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001555 CHECK_VALID_SIZE(inputs.size(), 1);
1556
Derek Lamberti8ddae332019-02-21 16:29:43 +00001557 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001558 CHECK_VALID_SIZE(outputs.size(), 1);
1559 auto outputInfo = ToTensorInfo(outputs[0]);
1560
1561 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001562 auto layerName = GetLayerName(graph, layerIndex);
1563 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001564 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1565
Derek Lamberti8ddae332019-02-21 16:29:43 +00001566 RegisterInputSlots(graph, layerIndex, layer);
1567 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001568}
1569
Derek Lamberti87acb272019-03-27 16:51:31 +00001570void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1571{
1572 CHECK_LAYERS(graph, 0, layerIndex);
1573
1574 auto inputs = GetInputs(graph, layerIndex);
1575 CHECK_VALID_SIZE(inputs.size(), 1);
1576
1577 auto outputs = GetOutputs(graph, layerIndex);
1578 CHECK_VALID_SIZE(outputs.size(), 1);
1579 auto outputInfo = ToTensorInfo(outputs[0]);
1580
1581 auto layerName = GetLayerName(graph, layerIndex);
1582 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1583 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1584
1585 RegisterInputSlots(graph, layerIndex, layer);
1586 RegisterOutputSlots(graph, layerIndex, layer);
1587}
1588
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001589armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001590 const std::vector<uint32_t>& targetDimsIn)
1591{
1592 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1593 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1594
1595 if (stretchDim != targetDimsIn.end())
1596 {
1597 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1598 {
1599 throw ParseException(boost::str(
1600 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1601 }
1602
1603 auto targetNumElements =
1604 boost::numeric_cast<unsigned int>(
1605 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1606
1607 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1608 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1609 }
1610
1611 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1612
1613 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1614 reshapeInfo.SetShape(outputShape);
1615
1616 return reshapeInfo;
1617}
1618
Derek Lamberti8ddae332019-02-21 16:29:43 +00001619void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001620{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001621 CHECK_LAYERS(graph, 0, layerIndex);
1622 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001623
Derek Lamberti8ddae332019-02-21 16:29:43 +00001624 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001625 CHECK_VALID_SIZE(outputs.size(), 1);
1626
1627 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1628 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1629
Derek Lamberti8ddae332019-02-21 16:29:43 +00001630 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001631 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1632
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001633 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001634 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1635
1636 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1637 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1638
1639 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1640 {
1641 std::stringstream ss;
1642 ss << "New shape defined in reshape parameters "
1643 << reshapeOutputTensorShape
1644 << " does not equal output shape "
1645 << actualOutputTensorInfo.GetShape()
1646 << ": "
1647 << CHECK_LOCATION().AsString();
1648 throw ParseException(ss.str());
1649 }
1650
1651 armnn::ReshapeDescriptor reshapeDesc;
1652 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1653
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001654 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001655 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1656 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1657
Derek Lamberti8ddae332019-02-21 16:29:43 +00001658 RegisterInputSlots(graph, layerIndex, layer);
1659 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001660}
1661
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01001662void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
1663{
1664 CHECK_LAYERS(graph, 0, layerIndex);
1665
1666 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1667 CHECK_VALID_SIZE(inputs.size(), 1);
1668
1669 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1670 CHECK_VALID_SIZE(outputs.size(), 1);
1671
1672 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
1673
1674 armnn::ResizeDescriptor descriptor;
1675 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1676 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1677 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
1678 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1679
1680 auto layerName = GetLayerName(graph, layerIndex);
1681 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
1682
1683 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1684 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1685
1686 RegisterInputSlots(graph, layerIndex, layer);
1687 RegisterOutputSlots(graph, layerIndex, layer);
1688}
1689
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001690void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1691{
1692 CHECK_LAYERS(graph, 0, layerIndex);
1693
1694 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1695 CHECK_VALID_SIZE(inputs.size(), 1);
1696
1697 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1698 CHECK_VALID_SIZE(outputs.size(), 1);
1699
1700 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1701
1702 armnn::ResizeBilinearDescriptor descriptor;
1703 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1704 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1705 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1706
1707 auto layerName = GetLayerName(graph, layerIndex);
1708 IConnectableLayer* layer = m_Network->AddResizeBilinearLayer(descriptor, layerName.c_str());
1709
1710 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1711 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1712
1713 RegisterInputSlots(graph, layerIndex, layer);
1714 RegisterOutputSlots(graph, layerIndex, layer);
1715}
1716
Derek Lamberti8ddae332019-02-21 16:29:43 +00001717void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001718{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001719 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001720
Derek Lamberti8ddae332019-02-21 16:29:43 +00001721 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001722 CHECK_VALID_SIZE(inputs.size(), 1);
1723
Derek Lamberti8ddae332019-02-21 16:29:43 +00001724 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001725 CHECK_VALID_SIZE(outputs.size(), 1);
1726
1727 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001728 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001729 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001730
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001731 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1732
1733 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1734 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1735
Derek Lamberti8ddae332019-02-21 16:29:43 +00001736 RegisterInputSlots(graph, layerIndex, layer);
1737 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001738}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001739
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001740void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1741{
1742 CHECK_LAYERS(graph, 0, layerIndex);
1743
1744 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1745 CHECK_VALID_SIZE(inputs.size(), 1);
1746
1747 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1748 CHECK_VALID_SIZE(outputs.size(), 1);
1749
1750 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1751 auto flatBufferPadList = flatBufferDescriptor->padList();
1752 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1753
1754 if (flatBufferPadList->Length() % 2 != 0)
1755 {
1756 throw ParseException(boost::str(
1757 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1758 }
1759
1760 std::vector<std::pair<unsigned int, unsigned int>> padList;
1761 padList.reserve(flatBufferPadList->Length() / 2);
1762 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1763 {
1764 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1765 }
1766
1767 armnn::SpaceToBatchNdDescriptor descriptor;
1768 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1769 descriptor.m_BlockShape =
1770 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1771 descriptor.m_PadList = padList;
1772
1773 auto layerName = GetLayerName(graph, layerIndex);
1774 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1775
1776 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1777 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1778
1779 RegisterInputSlots(graph, layerIndex, layer);
1780 RegisterOutputSlots(graph, layerIndex, layer);
1781}
1782
Aron Virginas-Taraa067142019-06-11 16:01:44 +01001783void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
1784{
1785 CHECK_LAYERS(graph, 0, layerIndex);
1786
1787 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1788 CHECK_VALID_SIZE(inputs.size(), 1);
1789
1790 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1791 CHECK_VALID_SIZE(outputs.size(), 1);
1792
1793 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
1794
1795 armnn::SpaceToDepthDescriptor descriptor;
1796 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
1797 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1798
1799 auto layerName = GetLayerName(graph, layerIndex);
1800 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
1801
1802 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1803 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1804
1805 RegisterInputSlots(graph, layerIndex, layer);
1806 RegisterOutputSlots(graph, layerIndex, layer);
1807}
1808
Nina Drozd57728782019-02-27 10:53:27 +00001809armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1810 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1811 unsigned int layerIndex)
1812{
1813 armnn::NormalizationDescriptor desc;
1814
1815 switch (normalizationDescriptor->normChannelType())
1816 {
1817 case NormalizationAlgorithmChannel_Across:
1818 {
1819 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1820 break;
1821 }
1822 case NormalizationAlgorithmChannel_Within:
1823 {
1824 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1825 break;
1826 }
1827 default:
1828 {
1829 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1830 }
1831 }
1832
1833 switch (normalizationDescriptor->normMethodType())
1834 {
1835 case NormalizationAlgorithmMethod_LocalBrightness:
1836 {
1837 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1838 break;
1839 }
1840 case NormalizationAlgorithmMethod_LocalContrast:
1841 {
1842 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1843 break;
1844 }
1845 default:
1846 {
1847 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1848 }
1849 }
1850
1851 switch (normalizationDescriptor->dataLayout())
1852 {
1853 case DataLayout_NCHW:
1854 {
1855 desc.m_DataLayout = armnn::DataLayout::NCHW;
1856 break;
1857 }
1858 case DataLayout_NHWC:
1859 {
1860 desc.m_DataLayout = armnn::DataLayout::NHWC;
1861 break;
1862 }
1863 default:
1864 {
1865 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1866 }
1867 }
1868
1869 desc.m_Alpha = normalizationDescriptor->alpha();
1870 desc.m_Beta = normalizationDescriptor->beta();
1871 desc.m_K = normalizationDescriptor->k();
1872 desc.m_NormSize = normalizationDescriptor->normSize();
1873
1874 return desc;
1875}
1876
1877void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1878{
1879 CHECK_LAYERS(graph, 0, layerIndex);
1880
1881 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1882
1883 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1884 CHECK_VALID_SIZE(inputs.size(), 1);
1885
1886 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1887 CHECK_VALID_SIZE(outputs.size(), 1);
1888
1889 auto outputInfo = ToTensorInfo(outputs[0]);
1890
1891 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1892 auto layerName = GetLayerName(graph, layerIndex);
1893
1894 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1895 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1896
1897 RegisterInputSlots(graph, layerIndex, layer);
1898 RegisterOutputSlots(graph, layerIndex, layer);
1899}
1900
Sadik Armagan8b42a382019-03-01 14:24:49 +00001901void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
1902{
1903 CHECK_LAYERS(graph, 0, layerIndex);
1904 auto inputs = GetInputs(graph, layerIndex);
1905 CHECK_LOCATION();
1906 CHECK_VALID_SIZE(inputs.size(), 1);
1907
1908 auto outputs = GetOutputs(graph, layerIndex);
1909 CHECK_VALID_SIZE(outputs.size(), 1);
1910
1911 auto layerName = GetLayerName(graph, layerIndex);
1912 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
1913
1914 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1915 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1916
1917 RegisterInputSlots(graph, layerIndex, layer);
1918 RegisterOutputSlots(graph, layerIndex, layer);
1919}
1920
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00001921void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
1922{
1923 CHECK_LAYERS(graph, 0, layerIndex);
1924
1925 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1926 CHECK_VALID_SIZE(inputs.size(), 1);
1927
1928 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1929 CHECK_VALID_SIZE(outputs.size(), 1);
1930
1931 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
1932
1933 auto flatBufferBegin = flatBufferDescriptor->begin();
1934 auto flatBufferEnd = flatBufferDescriptor->end();
1935 auto flatBufferStride = flatBufferDescriptor->stride();
1936
1937 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
1938 flatBufferBegin->Length() == flatBufferStride->Length()))
1939 {
1940 throw ParseException(boost::str(
1941 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
1942 }
1943
1944 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
1945 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
1946 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
1947
1948 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
1949 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
1950 descriptor.m_EndMask = flatBufferDescriptor->endMask();
1951 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
1952 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
1953 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
1954 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1955
1956 auto layerName = GetLayerName(graph, layerIndex);
1957 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
1958
1959 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1960 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1961
1962 RegisterInputSlots(graph, layerIndex, layer);
1963 RegisterOutputSlots(graph, layerIndex, layer);
1964}
1965
Conor Kennedyda1f9752019-03-01 14:37:12 +00001966void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
1967{
1968 CHECK_LAYERS(graph, 0, layerIndex);
1969 auto inputs = GetInputs(graph, layerIndex);
1970 CHECK_LOCATION();
1971 CHECK_VALID_SIZE(inputs.size(), 2);
1972
1973 auto outputs = GetOutputs(graph, layerIndex);
1974 CHECK_VALID_SIZE(outputs.size(), 1);
1975
1976 auto layerName = GetLayerName(graph, layerIndex);
1977 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
1978
1979 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1980 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1981
1982 RegisterInputSlots(graph, layerIndex, layer);
1983 RegisterOutputSlots(graph, layerIndex, layer);
1984}
1985
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001986void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
1987{
1988 CHECK_LAYERS(graph, 0, layerIndex);
1989
1990 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1991 CHECK_VALID_SIZE(inputs.size(), 2);
1992
1993 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1994 CHECK_VALID_SIZE(outputs.size(), 1);
1995
1996 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00001997 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
1998
1999 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002000 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2001
2002 RegisterInputSlots(graph, layerIndex, layer);
2003 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002004}
2005
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002006void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2007{
2008 CHECK_LAYERS(graph, 0, layerIndex);
2009
2010 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2011 CHECK_VALID_SIZE(inputs.size(), 1);
2012
2013 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2014 CHECK_VALID_SIZE(outputs.size(), 1);
2015
2016 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2017 auto flatBufferAxis = flatBufferDescriptor->axis();
2018 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2019
2020 armnn::MeanDescriptor descriptor;
2021 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2022 descriptor.m_KeepDims = flatBufferKeepDims;
2023
2024 auto layerName = GetLayerName(graph, layerIndex);
2025 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2026
2027 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2028 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2029
2030 RegisterInputSlots(graph, layerIndex, layer);
2031 RegisterOutputSlots(graph, layerIndex, layer);
2032}
2033
Jim Flynn18ce3382019-03-08 11:08:30 +00002034void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2035{
2036 CHECK_LAYERS(graph, 0, layerIndex);
2037
2038 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2039 CHECK_VALID_SIZE(inputs.size(), 1);
2040
2041 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2042
2043 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2044 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2045 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2046 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2047 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2048 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2049
2050 // Check numViews and numDimensions corresponds to the ones already serialized ...
2051 // numViews == flatBufferViewSizes.size();
2052 // foreach: numDimensions == flatBufferViewSizes[x].size();
2053
2054 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2055 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2056 {
2057 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2058 {
2059 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2060 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2061 }
2062 }
2063
2064 auto layerName = GetLayerName(graph, layerIndex);
2065 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2066
2067 // I could have as many outputs as views ...
2068 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2069 {
2070 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2071 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2072 }
2073
2074 RegisterInputSlots(graph, layerIndex, layer);
2075 RegisterOutputSlots(graph, layerIndex, layer);
2076}
2077
Jim Flynn11af3752019-03-19 17:22:29 +00002078armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2079{
2080 armnn::LstmDescriptor desc;
2081
2082 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2083 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2084 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2085 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2086 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2087 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
2088
2089 return desc;
2090}
2091
2092void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2093{
2094 CHECK_LAYERS(graph, 0, layerIndex);
2095
2096 auto inputs = GetInputs(graph, layerIndex);
2097 CHECK_VALID_SIZE(inputs.size(), 3);
2098
2099 auto outputs = GetOutputs(graph, layerIndex);
2100 CHECK_VALID_SIZE(outputs.size(), 4);
2101
2102 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2103 auto layerName = GetLayerName(graph, layerIndex);
2104 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2105 auto flatBufferInputParams = flatBufferLayer->inputParams();
2106
2107 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2108
2109 armnn::LstmInputParams lstmInputParams;
2110
2111 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2112 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2113 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2114 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2115 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2116 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2117 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2118 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2119 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2120
2121 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2122 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2123 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2124 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2125 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2126 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2127 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2128 lstmInputParams.m_CellBias = &cellBias;
2129 lstmInputParams.m_OutputGateBias = &outputGateBias;
2130
2131 armnn::ConstTensor inputToInputWeights;
2132 armnn::ConstTensor recurrentToInputWeights;
2133 armnn::ConstTensor cellToInputWeights;
2134 armnn::ConstTensor inputGateBias;
2135 if (!lstmDescriptor.m_CifgEnabled)
2136 {
2137 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2138 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2139 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2140 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2141
2142 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2143 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2144 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2145 lstmInputParams.m_InputGateBias = &inputGateBias;
2146 }
2147
2148 armnn::ConstTensor projectionWeights;
2149 armnn::ConstTensor projectionBias;
2150 if (lstmDescriptor.m_ProjectionEnabled)
2151 {
2152 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2153 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2154
2155 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2156 lstmInputParams.m_ProjectionBias = &projectionBias;
2157 }
2158
2159 armnn::ConstTensor cellToForgetWeights;
2160 armnn::ConstTensor cellToOutputWeights;
2161 if (lstmDescriptor.m_PeepholeEnabled)
2162 {
2163 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2164 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2165
2166 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2167 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2168 }
2169
2170 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2171
2172 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2173 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2174
2175 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2176 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2177
2178 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2179 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2180
2181 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2182 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2183
2184 RegisterInputSlots(graph, layerIndex, layer);
2185 RegisterOutputSlots(graph, layerIndex, layer);
2186}
2187
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002188void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2189{
2190 CHECK_LAYERS(graph, 0, layerIndex);
2191
2192 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2193 CHECK_VALID_SIZE(inputs.size(), 1);
2194
2195 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2196 CHECK_VALID_SIZE(outputs.size(), 1);
2197
2198 const std::string layerName = GetLayerName(graph, layerIndex);
2199 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2200
2201 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2202 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2203
2204 RegisterInputSlots(graph, layerIndex, layer);
2205 RegisterOutputSlots(graph, layerIndex, layer);
2206}
2207
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002208void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2209{
2210 CHECK_LAYERS(graph, 0, layerIndex);
2211
2212 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2213 CHECK_VALID_SIZE(inputs.size(), 2);
2214
2215 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2216 CHECK_VALID_SIZE(outputs.size(), 1);
2217
2218 const std::string layerName = GetLayerName(graph, layerIndex);
2219 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2220
2221 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2222 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2223
2224 RegisterInputSlots(graph, layerIndex, layer);
2225 RegisterOutputSlots(graph, layerIndex, layer);
2226}
2227
Sadik Armaganeff363d2019-04-05 15:25:46 +01002228void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2229{
2230 CHECK_LAYERS(graph, 0, layerIndex);
2231 auto inputs = GetInputs(graph, layerIndex);
2232 CHECK_LOCATION();
2233 CHECK_VALID_SIZE(inputs.size(), 2);
2234
2235 auto outputs = GetOutputs(graph, layerIndex);
2236 CHECK_VALID_SIZE(outputs.size(), 2);
2237
2238 auto layerName = GetLayerName(graph, layerIndex);
2239 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2240
2241 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2242 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2243
2244 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2245 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2246
2247 RegisterInputSlots(graph, layerIndex, layer);
2248 RegisterOutputSlots(graph, layerIndex, layer);
2249}
2250
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002251void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2252{
2253 CHECK_LAYERS(graph, 0, layerIndex);
2254 auto inputs = GetInputs(graph, layerIndex);
2255 CHECK_LOCATION();
2256 CHECK_VALID_SIZE(inputs.size(), 2);
2257
2258 auto outputs = GetOutputs(graph, layerIndex);
2259 CHECK_VALID_SIZE(outputs.size(), 1);
2260
2261 auto layerName = GetLayerName(graph, layerIndex);
2262 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2263
2264 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2265 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2266
2267 RegisterInputSlots(graph, layerIndex, layer);
2268 RegisterOutputSlots(graph, layerIndex, layer);
2269}
2270
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002271void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2272{
2273 CHECK_LAYERS(graph, 0, layerIndex);
2274
2275 auto inputs = GetInputs(graph, layerIndex);
2276 CHECK_VALID_SIZE(inputs.size(), 1);
2277
2278 auto outputs = GetOutputs(graph, layerIndex);
2279 CHECK_VALID_SIZE(outputs.size(), 1);
2280
2281 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2282 auto layerName = GetLayerName(graph, layerIndex);
2283 auto serializerDescriptor = serializerLayer->descriptor();
2284
2285 armnn::TransposeConvolution2dDescriptor descriptor;
2286 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2287 descriptor.m_PadRight = serializerDescriptor->padRight();
2288 descriptor.m_PadTop = serializerDescriptor->padTop();
2289 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2290 descriptor.m_StrideX = serializerDescriptor->strideX();
2291 descriptor.m_StrideY = serializerDescriptor->strideY();;
2292 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2293 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2294
2295 // weights & biases
2296 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2297 armnn::Optional<armnn::ConstTensor> optionalBiases;
2298 if (descriptor.m_BiasEnabled)
2299 {
2300 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2301 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2302 }
2303
2304 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2305 weights,
2306 optionalBiases,
2307 layerName.c_str());
2308
2309 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2310 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2311
2312 RegisterInputSlots(graph, layerIndex, layer);
2313 RegisterOutputSlots(graph, layerIndex, layer);
2314}
2315
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002316} // namespace armnnDeserializer