blob: c85282f4180188a71c26ca7d691906b5c9089fb0 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
8#include <armnn/ArmNN.hpp>
9#include <armnn/Exceptions.hpp>
10
11#include <ParserHelper.hpp>
12#include <Permute.hpp>
13#include <VerificationHelpers.hpp>
14
15#include <boost/filesystem.hpp>
16#include <boost/format.hpp>
17#include <boost/core/ignore_unused.hpp>
18#include <boost/assert.hpp>
19#include <boost/format.hpp>
20#include <boost/log/trivial.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010021#include <boost/format.hpp>
22#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000023#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100188 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000189 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000190 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100191 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000192 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000193 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Jim Flynne242f2d2019-05-22 14:24:13 +0100194 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000195 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000196 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100197 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000198 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000199 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000200 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000201 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000202 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000203 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000204 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000205 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000206 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000207 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Jim Flynn11af3752019-03-19 17:22:29 +0000208 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000209 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000210 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
211 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100212 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100213 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000214 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000215 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000216 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000217 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000218 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100219 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000220 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100221 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000222 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000223 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100224 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000225 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100226 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000227 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000228 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100229 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000230 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100231 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000232 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000233 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100234 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100235 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Kevin May43a799c2019-02-08 16:31:42 +0000236}
237
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000238Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000239{
240 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
241
242 switch(layerType)
243 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100244 case Layer::Layer_AbsLayer:
245 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000246 case Layer::Layer_ActivationLayer:
247 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000248 case Layer::Layer_AdditionLayer:
249 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100250 case Layer::Layer_ArgMinMaxLayer:
251 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000252 case Layer::Layer_BatchToSpaceNdLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000254 case Layer::Layer_BatchNormalizationLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100256 case Layer::Layer_ConcatLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000258 case Layer::Layer_ConstantLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000260 case Layer::Layer_Convolution2dLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100262 case Layer::Layer_DepthToSpaceLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000264 case Layer::Layer_DepthwiseConvolution2dLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000266 case Layer::Layer_DequantizeLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000268 case Layer::Layer_DetectionPostProcessLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000270 case Layer::Layer_DivisionLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000272 case Layer::Layer_EqualLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000274 case Layer::Layer_FullyConnectedLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000276 case Layer::Layer_FloorLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000278 case Layer::Layer_GatherLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000280 case Layer::Layer_GreaterLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000282 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000283 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000284 case Layer::Layer_L2NormalizationLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000286 case Layer::Layer_LstmLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000288 case Layer::Layer_MeanLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000290 case Layer::Layer_MinimumLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000292 case Layer::Layer_MaximumLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100294 case Layer::Layer_MergeLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000296 case Layer::Layer_MergerLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000298 case Layer::Layer_MultiplicationLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000300 case Layer::Layer_NormalizationLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000302 case Layer::Layer_OutputLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000304 case Layer::Layer_PadLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000306 case Layer::Layer_PermuteLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000308 case Layer::Layer_Pooling2dLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100310 case Layer::Layer_PreluLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000312 case Layer::Layer_QuantizeLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100314 case Layer::Layer_QuantizedLstmLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000316 case Layer::Layer_ReshapeLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000318 case Layer::Layer_ResizeBilinearLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100320 case Layer::Layer_ResizeLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000322 case Layer::Layer_RsqrtLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100324 case Layer::Layer_SliceLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000326 case Layer::Layer_SoftmaxLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000328 case Layer::Layer_SpaceToBatchNdLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100330 case Layer::Layer_SpaceToDepthLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000332 case Layer::Layer_SplitterLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100334 case Layer::Layer_StackLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000336 case Layer::Layer_StridedSliceLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000338 case Layer::Layer_SubtractionLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100340 case Layer::Layer_SwitchLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100342 case Layer::Layer_TransposeConvolution2dLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000344 case Layer::Layer_NONE:
345 default:
346 throw ParseException(boost::str(
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100347 boost::format("Layer type %1% not recognized") %
348 layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000349 }
350}
351
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000352std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
353{
354 auto layer = GetBaseLayer(graph, index);
355 assert(layer);
356 return layer->layerName()->str();
357}
358
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000359int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000360{
361 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
362
363 if (layerType == Layer::Layer_InputLayer)
364 {
365 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
366 }
367 else if ( layerType == Layer::Layer_OutputLayer )
368 {
369 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
370 }
371 return 0;
372}
373
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000374armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000375{
376 switch (dataLayout)
377 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000378 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000379 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000380 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000381 default:
382 return armnn::DataLayout::NCHW;
383 }
384}
385
Mike Kellyaf484012019-02-20 16:53:11 +0000386armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
387{
388 switch (function)
389 {
390 case armnnSerializer::ActivationFunction_Sigmoid:
391 return armnn::ActivationFunction::Sigmoid;
392 case armnnSerializer::ActivationFunction_TanH:
393 return armnn::ActivationFunction::TanH;
394 case armnnSerializer::ActivationFunction_Linear:
395 return armnn::ActivationFunction::Linear;
396 case armnnSerializer::ActivationFunction_ReLu:
397 return armnn::ActivationFunction::ReLu;
398 case armnnSerializer::ActivationFunction_BoundedReLu:
399 return armnn::ActivationFunction::BoundedReLu;
400 case armnnSerializer::ActivationFunction_LeakyReLu:
401 return armnn::ActivationFunction::LeakyReLu;
402 case armnnSerializer::ActivationFunction_Abs:
403 return armnn::ActivationFunction::Abs;
404 case armnnSerializer::ActivationFunction_Sqrt:
405 return armnn::ActivationFunction::Sqrt;
406 case armnnSerializer::ActivationFunction_Square:
407 return armnn::ActivationFunction::Square;
408 default:
409 return armnn::ActivationFunction::Sigmoid;
410 }
411}
412
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100413armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
414{
415 switch (function)
416 {
417 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
418 return armnn::ArgMinMaxFunction::Max;
419 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
420 default:
421 return armnn::ArgMinMaxFunction::Min;
422 }
423}
424
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100425armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
426{
427 switch (method)
428 {
429 case armnnSerializer::ResizeMethod_NearestNeighbor:
430 return armnn::ResizeMethod::NearestNeighbor;
431 case armnnSerializer::ResizeMethod_Bilinear:
432 return armnn::ResizeMethod::NearestNeighbor;
433 default:
434 return armnn::ResizeMethod::NearestNeighbor;
435 }
436}
437
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000438armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000439{
440 armnn::DataType type;
441 CHECK_TENSOR_PTR(tensorPtr);
442
443 switch (tensorPtr->dataType())
444 {
445 case DataType_QuantisedAsymm8:
446 type = armnn::DataType::QuantisedAsymm8;
447 break;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000448 case DataType_QuantisedSymm16:
449 type = armnn::DataType::QuantisedSymm16;
450 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000451 case DataType_Signed32:
452 type = armnn::DataType::Signed32;
453 break;
Kevin May43a799c2019-02-08 16:31:42 +0000454 case DataType_Float32:
455 type = armnn::DataType::Float32;
456 break;
457 case DataType_Float16:
458 type = armnn::DataType::Float16;
459 break;
460 case DataType_Boolean:
461 type = armnn::DataType::Boolean;
462 break;
463 default:
464 {
465 CheckLocation location = CHECK_LOCATION();
466 throw ParseException(
467 boost::str(
468 boost::format("Unsupported data type %1% = %2%. %3%") %
469 tensorPtr->dataType() %
470 EnumNameDataType(tensorPtr->dataType()) %
471 location.AsString()));
472 }
473 }
474 float quantizationScale = tensorPtr->quantizationScale();
475 int32_t quantizationOffset = tensorPtr->quantizationOffset();
476
477 auto dimensions = tensorPtr->dimensions();
478 unsigned int size = dimensions->size();
479 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
480
481 // two statements (on purpose) for easier debugging:
482 armnn::TensorInfo result(size,
483 outputDims.data(),
484 type,
485 quantizationScale,
486 quantizationOffset);
487 return result;
488}
489
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000490armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000491{
492 CHECK_CONST_TENSOR_PTR(constTensorPtr);
493 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
494
495 switch (constTensorPtr->data_type())
496 {
497 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000498 {
499 auto byteData = constTensorPtr->data_as_ByteData()->data();
500 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
501 return armnn::ConstTensor(tensorInfo, byteData->data());
502 }
Mike Kellya0766c32019-02-19 17:22:07 +0000503 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000504 {
505 auto shortData = constTensorPtr->data_as_ShortData()->data();
506 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
507 return armnn::ConstTensor(tensorInfo, shortData->data());
508 }
Mike Kellya0766c32019-02-19 17:22:07 +0000509 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000510 {
511 auto intData = constTensorPtr->data_as_IntData()->data();
512 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
513 return armnn::ConstTensor(tensorInfo, intData->data());
514 }
Mike Kellya0766c32019-02-19 17:22:07 +0000515 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000516 {
517 auto longData = constTensorPtr->data_as_LongData()->data();
518 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
519 return armnn::ConstTensor(tensorInfo, longData->data());
520 }
Mike Kellya0766c32019-02-19 17:22:07 +0000521 default:
522 {
523 CheckLocation location = CHECK_LOCATION();
524 throw ParseException(
525 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
526 constTensorPtr->data_type() %
527 EnumNameConstTensorData(constTensorPtr->data_type()) %
528 location.AsString()));
529 }
530 }
531}
532
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000533Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000534 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000535{
536 CHECK_LAYERS(graphPtr, 0, layerIndex);
537 auto layer = GetBaseLayer(graphPtr, layerIndex);
538 const auto& numInputs = layer->inputSlots()->size();
539
540 TensorRawPtrVector result(numInputs);
541
542 for (unsigned int i=0; i<numInputs; ++i)
543 {
544 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
545 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
546 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
547 }
548 return result;
549}
550
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000551Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000552 unsigned int layerIndex)
553{
554 CHECK_LAYERS(graphPtr, 0, layerIndex);
555 auto layer = GetBaseLayer(graphPtr, layerIndex);
556 const auto& numOutputs = layer->outputSlots()->size();
557
558 TensorRawPtrVector result(numOutputs);
559
560 for (unsigned int i=0; i<numOutputs; ++i)
561 {
562 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
563 }
564 return result;
565}
566
Derek Lamberti8ddae332019-02-21 16:29:43 +0000567void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000568{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000569 CHECK_LAYERS(graph, 0, layerIndex);
570 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000571 throw ParseException(
572 boost::str(
573 boost::format("Layer not supported. "
574 "layerIndex: %1% "
575 "layerName: %2% / %3%") %
576 layerIndex %
577 layerName %
578 CHECK_LOCATION().AsString()));
579}
580
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000581void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000582{
583 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000584 m_InputBindings.clear();
585 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000586}
587
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000588IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000589{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000590 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000591}
592
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000593IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000594{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000595 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000596}
597
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000598void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000599{
600 delete parser;
601}
602
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000603INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000604{
605 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000606 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
607 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000608}
609
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000610armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000611{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000612 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000613 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
614 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
615 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000616}
617
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000618Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000619{
620 if (binaryContent == nullptr)
621 {
622 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
623 CHECK_LOCATION().AsString()));
624 }
625 flatbuffers::Verifier verifier(binaryContent, len);
626 if (verifier.VerifyBuffer<SerializedGraph>() == false)
627 {
628 throw ParseException(
629 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
630 "flatbuffers format. size:%1% %2%") %
631 len %
632 CHECK_LOCATION().AsString()));
633 }
634 return GetSerializedGraph(binaryContent);
635}
636
Derek Lamberti8ddae332019-02-21 16:29:43 +0000637INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000638{
639 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000640 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000641 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000642 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000643 {
644 if (layer->layer_type() != Layer_InputLayer &&
645 layer->layer_type() != Layer_OutputLayer)
646 {
647 // lookup and call the parser function
648 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000649 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000650 }
651 ++layerIndex;
652 }
653
Derek Lamberti8ddae332019-02-21 16:29:43 +0000654 SetupInputLayers(graph);
655 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000656
657 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100658 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000659 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100660 Connections& connections = graphIt.second;
661 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000662 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100663 const unsigned int outputSlotIndex = outputIt.first;
664 IOutputSlot* outputSlot = outputIt.second;
665 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000666 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100667 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000668 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100669 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000670 }
Kevin May43a799c2019-02-08 16:31:42 +0000671 }
672 }
673 }
674
675 return std::move(m_Network);
676}
677
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000678BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000679 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000680{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000681 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000682 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000683 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000684 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000685 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000686 }
687 }
688 throw ParseException(
689 boost::str(
690 boost::format("No input binding found for layer:%1% / %2%") %
691 name %
692 CHECK_LOCATION().AsString()));
693}
694
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000695BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000696 const std::string& name) const
697{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000698 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000699 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000700 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000701 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000702 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000703 }
704 }
705 throw ParseException(
706 boost::str(
707 boost::format("No output binding found for layer:%1% / %2%") %
708 name %
709 CHECK_LOCATION().AsString()));
710}
711
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100712unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
713{
714 for (unsigned int i = 0; i < graph->layers()->size(); i++)
715 {
716 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
717 if (layer->index() == targetIndex)
718 {
719 return i;
720 }
721 }
722 throw ParseException("Layer with given index not found");
723}
724
Derek Lamberti8ddae332019-02-21 16:29:43 +0000725void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000726{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000727 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100728 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000729 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100730 m_InputBindings.reserve(numInputs);
731
732 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000733 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100734 const unsigned int inputId = graph->inputIds()->Get(i);
735 const unsigned int inputLayerIndex = GetLayerIndexInVector(graph, inputId);
736 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000737
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100738 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
739 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
740 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000741
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100742 IConnectableLayer* inputLayer =
743 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000744
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100745 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
746 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
747 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
748
Derek Lamberti8ddae332019-02-21 16:29:43 +0000749 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100750 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000751 }
752}
753
Derek Lamberti8ddae332019-02-21 16:29:43 +0000754void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000755{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000756 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100757 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000758 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100759 m_OutputBindings.reserve(numOutputs);
760
761 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000762 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100763 const unsigned int outputId = graph->outputIds()->Get(i);
764 const unsigned int outputLayerIndex = GetLayerIndexInVector(graph, outputId);
765 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000766
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100767 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
768 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
769 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000770
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100771 IConnectableLayer* outputLayer =
772 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000773
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100774 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
775
776 unsigned int sourceLayerIndex =
777 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
778 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
779 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
780
Derek Lamberti8ddae332019-02-21 16:29:43 +0000781 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100782 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000783 }
784}
785
Derek Lamberti8ddae332019-02-21 16:29:43 +0000786void Deserializer::RegisterOutputSlots(GraphPtr graph,
787 uint32_t layerIndex,
788 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000789{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000790 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000791 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100792 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
793 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000794 {
795 throw ParseException(
796 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
797 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100798 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000799 layer->GetNumOutputSlots() %
800 layerIndex %
801 CHECK_LOCATION().AsString()));
802 }
803
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100804 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000805 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100806 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
807 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
808 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
809 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000810 }
811}
812
Derek Lamberti8ddae332019-02-21 16:29:43 +0000813void Deserializer::RegisterInputSlots(GraphPtr graph,
814 uint32_t layerIndex,
815 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000816{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000817 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000818 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100819 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
820 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000821 {
822 throw ParseException(
823 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
824 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100825 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000826 layer->GetNumInputSlots() %
827 layerIndex %
828 CHECK_LOCATION().AsString()));
829 }
830
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100831 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000832 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100833 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
834 auto fbConnection = fbInputSlot->connection();
835 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
836 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000837 }
838}
839
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000840void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
841 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100842 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000843{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100844 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000845 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100846 m_GraphConnections[sourceLayerIndex] = Connections();
847 }
848
849 Connections& connections = m_GraphConnections[sourceLayerIndex];
850 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
851 {
852 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000853 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000854 else
855 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100856 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000857 }
858}
Kevin May43a799c2019-02-08 16:31:42 +0000859
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000860void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100861 uint32_t outputSlotIndex,
862 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000863{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100864 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
865 {
866 m_GraphConnections[sourceLayerIndex] = Connections();
867 }
868
869 Connections& connections = m_GraphConnections[sourceLayerIndex];
870 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
871 {
872 throw ParseException("Same output slot index processed twice");
873 }
874
875 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +0000876}
877
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100878void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
879{
880 CHECK_LAYERS(graph, 0, layerIndex);
881 auto inputs = GetInputs(graph, layerIndex);
882 CHECK_LOCATION();
883 CHECK_VALID_SIZE(inputs.size(), 1);
884
885 auto outputs = GetOutputs(graph, layerIndex);
886 CHECK_VALID_SIZE(outputs.size(), 1);
887
888 auto layerName = GetLayerName(graph, layerIndex);
889
890 IConnectableLayer* layer = m_Network->AddAbsLayer(layerName.c_str());
891 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
892 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
893
894 RegisterInputSlots(graph, layerIndex, layer);
895 RegisterOutputSlots(graph, layerIndex, layer);
896}
897
Derek Lamberti8ddae332019-02-21 16:29:43 +0000898void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +0000899{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000900 CHECK_LAYERS(graph, 0, layerIndex);
901 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000902 CHECK_LOCATION();
903 CHECK_VALID_SIZE(inputs.size(), 1);
904
Derek Lamberti8ddae332019-02-21 16:29:43 +0000905 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000906 CHECK_VALID_SIZE(outputs.size(), 1);
907
Derek Lamberti8ddae332019-02-21 16:29:43 +0000908 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000909 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +0000910 auto serializerDescriptor = serializerLayer->descriptor();
911
912 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +0900913 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +0000914 descriptor.m_A = serializerDescriptor->a();
915 descriptor.m_B = serializerDescriptor->b();
916
917 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
918 layerName.c_str());
919 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
920 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
921
Derek Lamberti8ddae332019-02-21 16:29:43 +0000922 RegisterInputSlots(graph, layerIndex, layer);
923 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +0000924}
925
Derek Lamberti8ddae332019-02-21 16:29:43 +0000926void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000927{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000928 CHECK_LAYERS(graph, 0, layerIndex);
929 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000930 CHECK_LOCATION();
931 CHECK_VALID_SIZE(inputs.size(), 2);
932
Derek Lamberti8ddae332019-02-21 16:29:43 +0000933 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000934 CHECK_VALID_SIZE(outputs.size(), 1);
935
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000936 auto layerName = GetLayerName(graph, layerIndex);
937 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +0000938
939 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
940 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
941
Derek Lamberti8ddae332019-02-21 16:29:43 +0000942 RegisterInputSlots(graph, layerIndex, layer);
943 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +0000944}
945
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100946void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
947{
948 CHECK_LAYERS(graph, 0, layerIndex);
949 auto inputs = GetInputs(graph, layerIndex);
950 CHECK_LOCATION();
951 CHECK_VALID_SIZE(inputs.size(), 1);
952
953 auto outputs = GetOutputs(graph, layerIndex);
954 CHECK_VALID_SIZE(outputs.size(), 1);
955
956 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
957 auto serializerDescriptor = serializerLayer->descriptor();
958
959 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +0900960 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100961 descriptor.m_Axis = serializerDescriptor->axis();
962 auto layerName = GetLayerName(graph, layerIndex);
963 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
964
965 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
966 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
967
968 RegisterInputSlots(graph, layerIndex, layer);
969 RegisterOutputSlots(graph, layerIndex, layer);
970}
971
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000972void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
973{
974 CHECK_LAYERS(graph, 0, layerIndex);
975
976 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
977 CHECK_VALID_SIZE(inputs.size(), 1);
978
979 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
980 CHECK_VALID_SIZE(outputs.size(), 1);
981
982 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
983 auto flatBufferCrops = flatBufferDescriptor->crops();
984 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
985
986 if (flatBufferCrops->Length() % 2 != 0)
987 {
988 throw ParseException(boost::str(
989 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
990 }
991
992 std::vector<std::pair<unsigned int, unsigned int>> crops;
993 crops.reserve(flatBufferCrops->Length() / 2);
994 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
995 {
996 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
997 }
998
999 armnn::BatchToSpaceNdDescriptor descriptor;
1000 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1001 descriptor.m_BlockShape =
1002 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1003 descriptor.m_Crops = crops;
1004
1005 auto layerName = GetLayerName(graph, layerIndex);
1006 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1007
1008 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1009 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1010
1011 RegisterInputSlots(graph, layerIndex, layer);
1012 RegisterOutputSlots(graph, layerIndex, layer);
1013}
1014
ruoyan018e7fa232019-02-28 15:09:07 +00001015void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1016{
1017 CHECK_LAYERS(graph, 0, layerIndex);
1018
1019 auto inputs = GetInputs(graph, layerIndex);
1020 CHECK_VALID_SIZE(inputs.size(), 1);
1021
1022 auto outputs = GetOutputs(graph, layerIndex);
1023 CHECK_VALID_SIZE(outputs.size(), 1);
1024 auto outputInfo = ToTensorInfo(outputs[0]);
1025
ruoyan015c7ab052019-03-04 14:48:02 +00001026 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001027
1028 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1029 auto serializerDescriptor = serializerLayer->descriptor();
1030
1031 armnn::BatchNormalizationDescriptor descriptor;
1032 descriptor.m_Eps = serializerDescriptor->eps();
1033 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1034
1035 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1036 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1037 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1038 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1039
1040 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1041 mean,
1042 variance,
1043 beta,
1044 gamma,
1045 layerName.c_str());
1046 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1047
1048 RegisterInputSlots(graph, layerIndex, layer);
1049 RegisterOutputSlots(graph, layerIndex, layer);
1050}
1051
Conor Kennedy76277882019-02-26 08:29:54 +00001052void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1053{
1054 CHECK_LAYERS(graph, 0, layerIndex);
1055 CHECK_LOCATION();
1056
1057 auto outputs = GetOutputs(graph, layerIndex);
1058 CHECK_VALID_SIZE(outputs.size(), 1);
1059
1060 auto layerName = GetLayerName(graph, layerIndex);
1061
1062 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1063 auto serializerInput = serializerLayer->input();
1064
1065 armnn::ConstTensor input = ToConstTensor(serializerInput);
1066
1067 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1068
1069 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1070 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1071
1072 RegisterOutputSlots(graph, layerIndex, layer);
1073}
1074
Derek Lamberti8ddae332019-02-21 16:29:43 +00001075void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001076{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001077 CHECK_LAYERS(graph, 0, layerIndex);
1078 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001079 CHECK_LOCATION();
1080 CHECK_VALID_SIZE(inputs.size(), 1);
1081
Derek Lamberti8ddae332019-02-21 16:29:43 +00001082 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001083 CHECK_VALID_SIZE(outputs.size(), 1);
1084
Derek Lamberti8ddae332019-02-21 16:29:43 +00001085 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001086 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001087 auto serializerDescriptor = serializerLayer->descriptor();
1088
1089 armnn::Convolution2dDescriptor descriptor;
1090 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1091 descriptor.m_PadRight = serializerDescriptor->padRight();
1092 descriptor.m_PadTop = serializerDescriptor->padTop();
1093 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1094 descriptor.m_StrideX = serializerDescriptor->strideX();
1095 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001096 descriptor.m_DilationX = serializerDescriptor->dilationX();
1097 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001098 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1099 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1100
1101 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1102 armnn::ConstTensor biases;
1103
Matteo Martincighfc598e12019-05-14 10:36:13 +01001104 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001105 if (descriptor.m_BiasEnabled)
1106 {
1107 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001108 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001109 }
1110 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1111 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001112 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001113 layerName.c_str());
1114 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1115 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1116
Derek Lamberti8ddae332019-02-21 16:29:43 +00001117 RegisterInputSlots(graph, layerIndex, layer);
1118 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001119}
1120
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001121void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1122{
1123 CHECK_LAYERS(graph, 0, layerIndex);
1124
1125 auto inputs = GetInputs(graph, layerIndex);
1126 CHECK_VALID_SIZE(inputs.size(), 1);
1127
1128 auto outputs = GetOutputs(graph, layerIndex);
1129 CHECK_VALID_SIZE(outputs.size(), 1);
1130
1131 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1132
1133 armnn::DepthToSpaceDescriptor descriptor;
1134 descriptor.m_BlockSize = fbDescriptor->blockSize();
1135 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1136
1137 auto layerName = GetLayerName(graph, layerIndex);
1138 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1139
1140 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1141 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1142
1143 RegisterInputSlots(graph, layerIndex, layer);
1144 RegisterOutputSlots(graph, layerIndex, layer);
1145}
1146
Derek Lamberti8ddae332019-02-21 16:29:43 +00001147void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001148{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001149 CHECK_LAYERS(graph, 0, layerIndex);
1150 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001151 CHECK_LOCATION();
1152 CHECK_VALID_SIZE(inputs.size(), 1);
1153
Derek Lamberti8ddae332019-02-21 16:29:43 +00001154 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001155 CHECK_VALID_SIZE(outputs.size(), 1);
1156
Derek Lamberti8ddae332019-02-21 16:29:43 +00001157 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001158 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001159 auto serializerDescriptor = serializerLayer->descriptor();
1160
1161 armnn::DepthwiseConvolution2dDescriptor descriptor;
1162 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1163 descriptor.m_PadRight = serializerDescriptor->padRight();
1164 descriptor.m_PadTop = serializerDescriptor->padTop();
1165 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1166 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001167 descriptor.m_StrideY = serializerDescriptor->strideY();
1168 descriptor.m_DilationX = serializerDescriptor->dilationX();
1169 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001170 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1171 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1172
1173 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1174 armnn::ConstTensor biases;
1175
Matteo Martincighfc598e12019-05-14 10:36:13 +01001176 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001177 if (descriptor.m_BiasEnabled)
1178 {
1179 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001180 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001181 }
1182 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1183 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001184 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001185 layerName.c_str());
1186
1187 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1188 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1189
Derek Lamberti8ddae332019-02-21 16:29:43 +00001190 RegisterInputSlots(graph, layerIndex, layer);
1191 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001192}
1193
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001194void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1195{
1196 CHECK_LAYERS(graph, 0, layerIndex);
1197 auto inputs = GetInputs(graph, layerIndex);
1198 CHECK_LOCATION();
1199 CHECK_VALID_SIZE(inputs.size(), 2);
1200
1201 auto outputs = GetOutputs(graph, layerIndex);
1202 CHECK_VALID_SIZE(outputs.size(), 4);
1203
1204 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1205 auto layerName = GetLayerName(graph, layerIndex);
1206 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1207
1208 armnn::DetectionPostProcessDescriptor descriptor;
1209 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1210 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1211 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1212 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1213 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1214 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1215 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1216 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1217 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1218 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1219 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1220
1221 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1222
1223 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1224 anchors,
1225 layerName.c_str());
1226
1227 for (unsigned int i = 0; i < 4; i++)
1228 {
1229 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1230 }
1231
1232 RegisterInputSlots(graph, layerIndex, layer);
1233 RegisterOutputSlots(graph, layerIndex, layer);
1234}
1235
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001236void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1237{
1238 CHECK_LAYERS(graph, 0, layerIndex);
1239 auto inputs = GetInputs(graph, layerIndex);
1240 CHECK_LOCATION();
1241 CHECK_VALID_SIZE(inputs.size(), 2);
1242
1243 auto outputs = GetOutputs(graph, layerIndex);
1244 CHECK_VALID_SIZE(outputs.size(), 1);
1245
1246 auto layerName = GetLayerName(graph, layerIndex);
1247 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1248
1249 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1250 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1251
1252 RegisterInputSlots(graph, layerIndex, layer);
1253 RegisterOutputSlots(graph, layerIndex, layer);
1254}
1255
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001256void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1257{
1258 CHECK_LAYERS(graph, 0, layerIndex);
1259 auto inputs = GetInputs(graph, layerIndex);
1260 CHECK_LOCATION();
1261 CHECK_VALID_SIZE(inputs.size(), 2);
1262
1263 auto outputs = GetOutputs(graph, layerIndex);
1264 CHECK_VALID_SIZE(outputs.size(), 1);
1265
1266 auto layerName = GetLayerName(graph, layerIndex);
1267 IConnectableLayer* layer = m_Network->AddEqualLayer(layerName.c_str());
1268
1269 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1270 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1271
1272 RegisterInputSlots(graph, layerIndex, layer);
1273 RegisterOutputSlots(graph, layerIndex, layer);
1274}
1275
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001276void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1277{
1278 CHECK_LAYERS(graph, 0, layerIndex);
1279 auto inputs = GetInputs(graph, layerIndex);
1280 CHECK_LOCATION();
1281 CHECK_VALID_SIZE(inputs.size(), 2);
1282
1283 auto outputs = GetOutputs(graph, layerIndex);
1284 CHECK_VALID_SIZE(outputs.size(), 1);
1285
1286 auto layerName = GetLayerName(graph, layerIndex);
1287 IConnectableLayer* layer = m_Network->AddGreaterLayer(layerName.c_str());
1288
1289 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1290 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1291
1292 RegisterInputSlots(graph, layerIndex, layer);
1293 RegisterOutputSlots(graph, layerIndex, layer);
1294}
1295
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001296void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1297{
1298 CHECK_LAYERS(graph, 0, layerIndex);
1299
1300 auto inputs = GetInputs(graph, layerIndex);
1301 CHECK_VALID_SIZE(inputs.size(), 1);
1302
1303 auto outputs = GetOutputs(graph, layerIndex);
1304 CHECK_VALID_SIZE(outputs.size(), 1);
1305 auto outputInfo = ToTensorInfo(outputs[0]);
1306
1307 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1308 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1309
1310 auto layerName = GetLayerName(graph, layerIndex);
1311 armnn::L2NormalizationDescriptor descriptor;
1312 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001313 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001314
1315 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1316 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1317
1318 RegisterInputSlots(graph, layerIndex, layer);
1319 RegisterOutputSlots(graph, layerIndex, layer);
1320}
1321
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001322void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1323{
1324 CHECK_LAYERS(graph, 0, layerIndex);
1325 auto inputs = GetInputs(graph, layerIndex);
1326 CHECK_LOCATION();
1327 CHECK_VALID_SIZE(inputs.size(), 2);
1328
1329 auto outputs = GetOutputs(graph, layerIndex);
1330 CHECK_VALID_SIZE(outputs.size(), 1);
1331
1332 auto layerName = GetLayerName(graph, layerIndex);
1333 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1334
1335 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1336 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1337
1338 RegisterInputSlots(graph, layerIndex, layer);
1339 RegisterOutputSlots(graph, layerIndex, layer);
1340}
1341
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001342void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1343{
1344 CHECK_LAYERS(graph, 0, layerIndex);
1345 auto inputs = GetInputs(graph, layerIndex);
1346 CHECK_LOCATION();
1347 CHECK_VALID_SIZE(inputs.size(), 2);
1348
1349 auto outputs = GetOutputs(graph, layerIndex);
1350 CHECK_VALID_SIZE(outputs.size(), 1);
1351
1352 auto layerName = GetLayerName(graph, layerIndex);
1353 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1354
1355 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1356 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1357
1358 RegisterInputSlots(graph, layerIndex, layer);
1359 RegisterOutputSlots(graph, layerIndex, layer);
1360}
1361
Jim Flynne242f2d2019-05-22 14:24:13 +01001362const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1363 unsigned int layerIndex)
1364{
1365 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1366
1367 switch (layerType)
1368 {
1369 case Layer::Layer_ConcatLayer:
1370 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1371 case Layer::Layer_MergerLayer:
1372 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1373 default:
1374 throw armnn::Exception("unknown layer type, should be concat or merger");
1375 }
1376}
1377
Jim Flynn906f9462019-05-10 13:55:21 +01001378void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001379{
1380 CHECK_LAYERS(graph, 0, layerIndex);
1381 CHECK_LOCATION();
1382
1383 auto outputs = GetOutputs(graph, layerIndex);
1384 CHECK_VALID_SIZE(outputs.size(), 1);
1385
Jim Flynnac25a1b2019-02-28 10:40:49 +00001386 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001387 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1388 unsigned int numViews = originsDescriptor->numViews();
1389 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001390
1391 // can now check the number of inputs == number of views
1392 auto inputs = GetInputs(graph, layerIndex);
1393 CHECK_VALID_SIZE(inputs.size(), numViews);
1394
1395 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001396 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001397 for (unsigned int v = 0; v < numViews; ++v)
1398 {
1399 auto originPtr = originsPtr->Get(v);
1400 for (unsigned int d = 0; d < numDimensions; ++d)
1401 {
1402 uint32_t value = originPtr->data()->Get(d);
1403 descriptor.SetViewOriginCoord(v, d, value);
1404 }
1405 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001406 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001407
Jim Flynn906f9462019-05-10 13:55:21 +01001408 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001409 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1410 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1411
1412 RegisterInputSlots(graph, layerIndex, layer);
1413 RegisterOutputSlots(graph, layerIndex, layer);
1414}
1415
Derek Lamberti8ddae332019-02-21 16:29:43 +00001416void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001417{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001418 CHECK_LAYERS(graph, 0, layerIndex);
1419 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001420 CHECK_LOCATION();
1421 CHECK_VALID_SIZE(inputs.size(), 2);
1422
Derek Lamberti8ddae332019-02-21 16:29:43 +00001423 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001424 CHECK_VALID_SIZE(outputs.size(), 1);
1425
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001426 auto layerName = GetLayerName(graph, layerIndex);
1427 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001428
1429 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1430 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1431
Derek Lamberti8ddae332019-02-21 16:29:43 +00001432 RegisterInputSlots(graph, layerIndex, layer);
1433 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001434}
1435
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001436void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1437{
1438 CHECK_LAYERS(graph, 0, layerIndex);
1439 CHECK_LOCATION();
1440
1441 auto inputs = GetInputs(graph, layerIndex);
1442 CHECK_VALID_SIZE(inputs.size(), 1);
1443
1444 auto outputs = GetOutputs(graph, layerIndex);
1445 CHECK_VALID_SIZE(outputs.size(), 1);
1446
1447 auto layerName = GetLayerName(graph, layerIndex);
1448
1449 armnn::IConnectableLayer* layer;
1450
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001451 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001452
1453 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1454 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1455
1456 RegisterInputSlots(graph, layerIndex, layer);
1457 RegisterOutputSlots(graph, layerIndex, layer);
1458}
1459
Derek Lamberti8ddae332019-02-21 16:29:43 +00001460void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001461{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001462 CHECK_LAYERS(graph, 0, layerIndex);
1463 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001464 CHECK_LOCATION();
1465 CHECK_VALID_SIZE(inputs.size(), 1);
1466
Derek Lamberti8ddae332019-02-21 16:29:43 +00001467 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001468 CHECK_VALID_SIZE(outputs.size(), 1);
1469
Derek Lamberti8ddae332019-02-21 16:29:43 +00001470 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001471 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001472 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1473
1474 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1475 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1476 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1477
1478 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1479
1480 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001481 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001482 if (flatBufferDescriptor->biasEnabled())
1483 {
1484 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001485 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001486 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001487 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1488 weightsTensor,
1489 optionalBiases,
1490 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001491
1492 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1493 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1494
Derek Lamberti8ddae332019-02-21 16:29:43 +00001495 RegisterInputSlots(graph, layerIndex, layer);
1496 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001497}
1498
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001499void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1500{
1501 CHECK_LAYERS(graph, 0, layerIndex);
1502
1503 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1504 CHECK_VALID_SIZE(inputs.size(), 1);
1505
1506 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1507 CHECK_VALID_SIZE(outputs.size(), 1);
1508
1509 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1510 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001511 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001512
1513 if (flatBufferPadList->Length() % 2 != 0)
1514 {
1515 throw ParseException(boost::str(
1516 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1517 }
1518
1519 std::vector<std::pair<unsigned int, unsigned int>> padList;
1520 padList.reserve(flatBufferPadList->Length() / 2);
1521 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1522 {
1523 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1524 }
1525
David Monahan34757812019-06-19 11:47:21 +01001526 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001527
1528 auto layerName = GetLayerName(graph, layerIndex);
1529 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1530
1531 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1532 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1533
1534 RegisterInputSlots(graph, layerIndex, layer);
1535 RegisterOutputSlots(graph, layerIndex, layer);
1536}
1537
Derek Lamberti8ddae332019-02-21 16:29:43 +00001538void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001539{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001540 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001541
1542 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001543 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001544
Derek Lamberti8ddae332019-02-21 16:29:43 +00001545 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001546 CHECK_VALID_SIZE(inputs.size(), 1);
1547
Derek Lamberti8ddae332019-02-21 16:29:43 +00001548 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001549 CHECK_VALID_SIZE(outputs.size(), 1);
1550 auto outputInfo = ToTensorInfo(outputs[0]);
1551
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001552 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001553 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1554
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001555 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001556 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1557
Derek Lamberti8ddae332019-02-21 16:29:43 +00001558 RegisterInputSlots(graph, layerIndex, layer);
1559 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001560}
1561
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001562armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001563 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001564{
1565 armnn::Pooling2dDescriptor desc;
1566
1567 switch (pooling2dDesc->poolType())
1568 {
1569 case PoolingAlgorithm_Average:
1570 {
1571 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001572 break;
1573 }
1574 case PoolingAlgorithm_Max:
1575 {
1576 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001577 break;
1578 }
1579 default:
1580 {
1581 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1582 }
1583 }
1584
1585 switch (pooling2dDesc->outputShapeRounding())
1586 {
1587 case OutputShapeRounding_Floor:
1588 {
1589 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1590 break;
1591 }
1592 case OutputShapeRounding_Ceiling:
1593 {
1594 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1595 break;
1596 }
1597 default:
1598 {
1599 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1600 }
1601 }
1602
1603 switch (pooling2dDesc->paddingMethod())
1604 {
1605 case PaddingMethod_Exclude:
1606 {
1607 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1608 break;
1609 }
1610 case PaddingMethod_IgnoreValue:
1611 {
1612 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1613 break;
1614 }
1615 default:
1616 {
1617 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1618 }
1619 }
1620
1621 switch (pooling2dDesc->dataLayout())
1622 {
1623 case DataLayout_NCHW:
1624 {
1625 desc.m_DataLayout = armnn::DataLayout::NCHW;
1626 break;
1627 }
1628 case DataLayout_NHWC:
1629 {
1630 desc.m_DataLayout = armnn::DataLayout::NHWC;
1631 break;
1632 }
1633 default:
1634 {
1635 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1636 }
1637 }
1638
1639 desc.m_PadRight = pooling2dDesc->padRight();
1640 desc.m_PadLeft = pooling2dDesc->padLeft();
1641 desc.m_PadBottom = pooling2dDesc->padBottom();
1642 desc.m_PadTop = pooling2dDesc->padTop();
1643 desc.m_StrideX = pooling2dDesc->strideX();
1644 desc.m_StrideY = pooling2dDesc->strideY();
1645 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1646 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1647
1648 return desc;
1649}
1650
Derek Lamberti8ddae332019-02-21 16:29:43 +00001651void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001652{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001653 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001654
Derek Lamberti8ddae332019-02-21 16:29:43 +00001655 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001656 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001657 CHECK_VALID_SIZE(inputs.size(), 1);
1658
Derek Lamberti8ddae332019-02-21 16:29:43 +00001659 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001660 CHECK_VALID_SIZE(outputs.size(), 1);
1661 auto outputInfo = ToTensorInfo(outputs[0]);
1662
1663 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001664 auto layerName = GetLayerName(graph, layerIndex);
1665 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001666 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1667
Derek Lamberti8ddae332019-02-21 16:29:43 +00001668 RegisterInputSlots(graph, layerIndex, layer);
1669 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001670}
1671
Derek Lamberti87acb272019-03-27 16:51:31 +00001672void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1673{
1674 CHECK_LAYERS(graph, 0, layerIndex);
1675
1676 auto inputs = GetInputs(graph, layerIndex);
1677 CHECK_VALID_SIZE(inputs.size(), 1);
1678
1679 auto outputs = GetOutputs(graph, layerIndex);
1680 CHECK_VALID_SIZE(outputs.size(), 1);
1681 auto outputInfo = ToTensorInfo(outputs[0]);
1682
1683 auto layerName = GetLayerName(graph, layerIndex);
1684 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1685 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1686
1687 RegisterInputSlots(graph, layerIndex, layer);
1688 RegisterOutputSlots(graph, layerIndex, layer);
1689}
1690
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001691armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001692 const std::vector<uint32_t>& targetDimsIn)
1693{
1694 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1695 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1696
1697 if (stretchDim != targetDimsIn.end())
1698 {
1699 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1700 {
1701 throw ParseException(boost::str(
1702 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1703 }
1704
1705 auto targetNumElements =
1706 boost::numeric_cast<unsigned int>(
1707 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1708
1709 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1710 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1711 }
1712
1713 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1714
1715 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1716 reshapeInfo.SetShape(outputShape);
1717
1718 return reshapeInfo;
1719}
1720
Derek Lamberti8ddae332019-02-21 16:29:43 +00001721void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001722{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001723 CHECK_LAYERS(graph, 0, layerIndex);
1724 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001725
Derek Lamberti8ddae332019-02-21 16:29:43 +00001726 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001727 CHECK_VALID_SIZE(outputs.size(), 1);
1728
1729 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1730 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1731
Derek Lamberti8ddae332019-02-21 16:29:43 +00001732 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001733 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1734
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001735 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001736 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1737
1738 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1739 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1740
1741 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1742 {
1743 std::stringstream ss;
1744 ss << "New shape defined in reshape parameters "
1745 << reshapeOutputTensorShape
1746 << " does not equal output shape "
1747 << actualOutputTensorInfo.GetShape()
1748 << ": "
1749 << CHECK_LOCATION().AsString();
1750 throw ParseException(ss.str());
1751 }
1752
1753 armnn::ReshapeDescriptor reshapeDesc;
1754 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1755
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001756 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001757 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1758 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1759
Derek Lamberti8ddae332019-02-21 16:29:43 +00001760 RegisterInputSlots(graph, layerIndex, layer);
1761 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001762}
1763
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01001764void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
1765{
1766 CHECK_LAYERS(graph, 0, layerIndex);
1767
1768 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1769 CHECK_VALID_SIZE(inputs.size(), 1);
1770
1771 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1772 CHECK_VALID_SIZE(outputs.size(), 1);
1773
1774 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
1775
1776 armnn::ResizeDescriptor descriptor;
1777 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
1778 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
1779 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
1780 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1781
1782 auto layerName = GetLayerName(graph, layerIndex);
1783 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
1784
1785 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1786 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1787
1788 RegisterInputSlots(graph, layerIndex, layer);
1789 RegisterOutputSlots(graph, layerIndex, layer);
1790}
1791
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001792void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
1793{
1794 CHECK_LAYERS(graph, 0, layerIndex);
1795
1796 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1797 CHECK_VALID_SIZE(inputs.size(), 1);
1798
1799 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1800 CHECK_VALID_SIZE(outputs.size(), 1);
1801
1802 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
1803
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001804 armnn::ResizeDescriptor descriptor;
1805 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001806 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001807 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
1808 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001809
1810 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01001811 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00001812
1813 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1814 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1815
1816 RegisterInputSlots(graph, layerIndex, layer);
1817 RegisterOutputSlots(graph, layerIndex, layer);
1818}
1819
Derek Lamberti8ddae332019-02-21 16:29:43 +00001820void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001821{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001822 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001823
Derek Lamberti8ddae332019-02-21 16:29:43 +00001824 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001825 CHECK_VALID_SIZE(inputs.size(), 1);
1826
Derek Lamberti8ddae332019-02-21 16:29:43 +00001827 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001828 CHECK_VALID_SIZE(outputs.size(), 1);
1829
1830 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00001831 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001832 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001833
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001834 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
1835
1836 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1837 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1838
Derek Lamberti8ddae332019-02-21 16:29:43 +00001839 RegisterInputSlots(graph, layerIndex, layer);
1840 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001841}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00001842
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00001843void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
1844{
1845 CHECK_LAYERS(graph, 0, layerIndex);
1846
1847 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1848 CHECK_VALID_SIZE(inputs.size(), 1);
1849
1850 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1851 CHECK_VALID_SIZE(outputs.size(), 1);
1852
1853 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
1854 auto flatBufferPadList = flatBufferDescriptor->padList();
1855 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1856
1857 if (flatBufferPadList->Length() % 2 != 0)
1858 {
1859 throw ParseException(boost::str(
1860 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1861 }
1862
1863 std::vector<std::pair<unsigned int, unsigned int>> padList;
1864 padList.reserve(flatBufferPadList->Length() / 2);
1865 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1866 {
1867 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1868 }
1869
1870 armnn::SpaceToBatchNdDescriptor descriptor;
1871 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1872 descriptor.m_BlockShape =
1873 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1874 descriptor.m_PadList = padList;
1875
1876 auto layerName = GetLayerName(graph, layerIndex);
1877 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
1878
1879 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1880 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1881
1882 RegisterInputSlots(graph, layerIndex, layer);
1883 RegisterOutputSlots(graph, layerIndex, layer);
1884}
1885
Aron Virginas-Taraa067142019-06-11 16:01:44 +01001886void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
1887{
1888 CHECK_LAYERS(graph, 0, layerIndex);
1889
1890 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1891 CHECK_VALID_SIZE(inputs.size(), 1);
1892
1893 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1894 CHECK_VALID_SIZE(outputs.size(), 1);
1895
1896 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
1897
1898 armnn::SpaceToDepthDescriptor descriptor;
1899 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
1900 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1901
1902 auto layerName = GetLayerName(graph, layerIndex);
1903 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
1904
1905 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1906 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1907
1908 RegisterInputSlots(graph, layerIndex, layer);
1909 RegisterOutputSlots(graph, layerIndex, layer);
1910}
1911
Nina Drozd57728782019-02-27 10:53:27 +00001912armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
1913 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
1914 unsigned int layerIndex)
1915{
1916 armnn::NormalizationDescriptor desc;
1917
1918 switch (normalizationDescriptor->normChannelType())
1919 {
1920 case NormalizationAlgorithmChannel_Across:
1921 {
1922 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
1923 break;
1924 }
1925 case NormalizationAlgorithmChannel_Within:
1926 {
1927 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
1928 break;
1929 }
1930 default:
1931 {
1932 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
1933 }
1934 }
1935
1936 switch (normalizationDescriptor->normMethodType())
1937 {
1938 case NormalizationAlgorithmMethod_LocalBrightness:
1939 {
1940 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
1941 break;
1942 }
1943 case NormalizationAlgorithmMethod_LocalContrast:
1944 {
1945 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
1946 break;
1947 }
1948 default:
1949 {
1950 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
1951 }
1952 }
1953
1954 switch (normalizationDescriptor->dataLayout())
1955 {
1956 case DataLayout_NCHW:
1957 {
1958 desc.m_DataLayout = armnn::DataLayout::NCHW;
1959 break;
1960 }
1961 case DataLayout_NHWC:
1962 {
1963 desc.m_DataLayout = armnn::DataLayout::NHWC;
1964 break;
1965 }
1966 default:
1967 {
1968 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1969 }
1970 }
1971
1972 desc.m_Alpha = normalizationDescriptor->alpha();
1973 desc.m_Beta = normalizationDescriptor->beta();
1974 desc.m_K = normalizationDescriptor->k();
1975 desc.m_NormSize = normalizationDescriptor->normSize();
1976
1977 return desc;
1978}
1979
1980void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
1981{
1982 CHECK_LAYERS(graph, 0, layerIndex);
1983
1984 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
1985
1986 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1987 CHECK_VALID_SIZE(inputs.size(), 1);
1988
1989 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1990 CHECK_VALID_SIZE(outputs.size(), 1);
1991
1992 auto outputInfo = ToTensorInfo(outputs[0]);
1993
1994 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
1995 auto layerName = GetLayerName(graph, layerIndex);
1996
1997 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
1998 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1999
2000 RegisterInputSlots(graph, layerIndex, layer);
2001 RegisterOutputSlots(graph, layerIndex, layer);
2002}
2003
Sadik Armagan8b42a382019-03-01 14:24:49 +00002004void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2005{
2006 CHECK_LAYERS(graph, 0, layerIndex);
2007 auto inputs = GetInputs(graph, layerIndex);
2008 CHECK_LOCATION();
2009 CHECK_VALID_SIZE(inputs.size(), 1);
2010
2011 auto outputs = GetOutputs(graph, layerIndex);
2012 CHECK_VALID_SIZE(outputs.size(), 1);
2013
2014 auto layerName = GetLayerName(graph, layerIndex);
2015 IConnectableLayer* layer = m_Network->AddRsqrtLayer(layerName.c_str());
2016
2017 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2018 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2019
2020 RegisterInputSlots(graph, layerIndex, layer);
2021 RegisterOutputSlots(graph, layerIndex, layer);
2022}
2023
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002024void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2025{
2026 CHECK_LAYERS(graph, 0, layerIndex);
2027
2028 auto inputs = GetInputs(graph, layerIndex);
2029 CHECK_VALID_SIZE(inputs.size(), 1);
2030
2031 auto outputs = GetOutputs(graph, layerIndex);
2032 CHECK_VALID_SIZE(outputs.size(), 1);
2033
2034 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2035
2036 auto fbBegin = fbDescriptor->begin();
2037 auto fbSize = fbDescriptor->size();
2038
2039 if (fbBegin->Length() != fbSize->Length())
2040 {
2041 throw ParseException(boost::str(
2042 boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString()));
2043 }
2044
2045 armnn::SliceDescriptor descriptor;
2046 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2047 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2048
2049 auto layerName = GetLayerName(graph, layerIndex);
2050 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2051
2052 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2053 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2054
2055 RegisterInputSlots(graph, layerIndex, layer);
2056 RegisterOutputSlots(graph, layerIndex, layer);
2057}
2058
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002059void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2060{
2061 CHECK_LAYERS(graph, 0, layerIndex);
2062
2063 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2064 CHECK_VALID_SIZE(inputs.size(), 1);
2065
2066 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2067 CHECK_VALID_SIZE(outputs.size(), 1);
2068
2069 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2070
2071 auto flatBufferBegin = flatBufferDescriptor->begin();
2072 auto flatBufferEnd = flatBufferDescriptor->end();
2073 auto flatBufferStride = flatBufferDescriptor->stride();
2074
2075 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2076 flatBufferBegin->Length() == flatBufferStride->Length()))
2077 {
2078 throw ParseException(boost::str(
2079 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2080 }
2081
2082 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2083 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2084 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2085
2086 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2087 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2088 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2089 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2090 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2091 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2092 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2093
2094 auto layerName = GetLayerName(graph, layerIndex);
2095 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2096
2097 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2098 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2099
2100 RegisterInputSlots(graph, layerIndex, layer);
2101 RegisterOutputSlots(graph, layerIndex, layer);
2102}
2103
Conor Kennedyda1f9752019-03-01 14:37:12 +00002104void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2105{
2106 CHECK_LAYERS(graph, 0, layerIndex);
2107 auto inputs = GetInputs(graph, layerIndex);
2108 CHECK_LOCATION();
2109 CHECK_VALID_SIZE(inputs.size(), 2);
2110
2111 auto outputs = GetOutputs(graph, layerIndex);
2112 CHECK_VALID_SIZE(outputs.size(), 1);
2113
2114 auto layerName = GetLayerName(graph, layerIndex);
2115 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2116
2117 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2118 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2119
2120 RegisterInputSlots(graph, layerIndex, layer);
2121 RegisterOutputSlots(graph, layerIndex, layer);
2122}
2123
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002124void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2125{
2126 CHECK_LAYERS(graph, 0, layerIndex);
2127
2128 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2129 CHECK_VALID_SIZE(inputs.size(), 2);
2130
2131 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2132 CHECK_VALID_SIZE(outputs.size(), 1);
2133
2134 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002135 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2136
2137 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002138 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2139
2140 RegisterInputSlots(graph, layerIndex, layer);
2141 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002142}
2143
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002144void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2145{
2146 CHECK_LAYERS(graph, 0, layerIndex);
2147
2148 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2149 CHECK_VALID_SIZE(inputs.size(), 1);
2150
2151 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2152 CHECK_VALID_SIZE(outputs.size(), 1);
2153
2154 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2155 auto flatBufferAxis = flatBufferDescriptor->axis();
2156 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2157
2158 armnn::MeanDescriptor descriptor;
2159 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2160 descriptor.m_KeepDims = flatBufferKeepDims;
2161
2162 auto layerName = GetLayerName(graph, layerIndex);
2163 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2164
2165 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2166 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2167
2168 RegisterInputSlots(graph, layerIndex, layer);
2169 RegisterOutputSlots(graph, layerIndex, layer);
2170}
2171
Jim Flynn18ce3382019-03-08 11:08:30 +00002172void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2173{
2174 CHECK_LAYERS(graph, 0, layerIndex);
2175
2176 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2177 CHECK_VALID_SIZE(inputs.size(), 1);
2178
2179 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2180
2181 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2182 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2183 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2184 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2185 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2186 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2187
2188 // Check numViews and numDimensions corresponds to the ones already serialized ...
2189 // numViews == flatBufferViewSizes.size();
2190 // foreach: numDimensions == flatBufferViewSizes[x].size();
2191
2192 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2193 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2194 {
2195 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2196 {
2197 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2198 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2199 }
2200 }
2201
2202 auto layerName = GetLayerName(graph, layerIndex);
2203 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2204
2205 // I could have as many outputs as views ...
2206 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2207 {
2208 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2209 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2210 }
2211
2212 RegisterInputSlots(graph, layerIndex, layer);
2213 RegisterOutputSlots(graph, layerIndex, layer);
2214}
2215
Jim Flynn11af3752019-03-19 17:22:29 +00002216armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2217{
2218 armnn::LstmDescriptor desc;
2219
2220 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2221 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2222 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2223 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2224 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2225 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002226 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002227
2228 return desc;
2229}
2230
2231void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2232{
2233 CHECK_LAYERS(graph, 0, layerIndex);
2234
2235 auto inputs = GetInputs(graph, layerIndex);
2236 CHECK_VALID_SIZE(inputs.size(), 3);
2237
2238 auto outputs = GetOutputs(graph, layerIndex);
2239 CHECK_VALID_SIZE(outputs.size(), 4);
2240
2241 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2242 auto layerName = GetLayerName(graph, layerIndex);
2243 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2244 auto flatBufferInputParams = flatBufferLayer->inputParams();
2245
2246 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2247
2248 armnn::LstmInputParams lstmInputParams;
2249
2250 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2251 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2252 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2253 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2254 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2255 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2256 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2257 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2258 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2259
2260 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2261 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2262 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2263 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2264 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2265 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2266 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2267 lstmInputParams.m_CellBias = &cellBias;
2268 lstmInputParams.m_OutputGateBias = &outputGateBias;
2269
2270 armnn::ConstTensor inputToInputWeights;
2271 armnn::ConstTensor recurrentToInputWeights;
2272 armnn::ConstTensor cellToInputWeights;
2273 armnn::ConstTensor inputGateBias;
2274 if (!lstmDescriptor.m_CifgEnabled)
2275 {
2276 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2277 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2278 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2279 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2280
2281 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2282 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2283 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2284 lstmInputParams.m_InputGateBias = &inputGateBias;
2285 }
2286
2287 armnn::ConstTensor projectionWeights;
2288 armnn::ConstTensor projectionBias;
2289 if (lstmDescriptor.m_ProjectionEnabled)
2290 {
2291 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2292 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2293
2294 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2295 lstmInputParams.m_ProjectionBias = &projectionBias;
2296 }
2297
2298 armnn::ConstTensor cellToForgetWeights;
2299 armnn::ConstTensor cellToOutputWeights;
2300 if (lstmDescriptor.m_PeepholeEnabled)
2301 {
2302 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2303 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2304
2305 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2306 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2307 }
2308
Jan Eilersf8c62972019-07-17 11:07:49 +01002309 armnn::ConstTensor inputLayerNormWeights;
2310 armnn::ConstTensor forgetLayerNormWeights;
2311 armnn::ConstTensor cellLayerNormWeights;
2312 armnn::ConstTensor outputLayerNormWeights;
2313 if (lstmDescriptor.m_LayerNormEnabled)
2314 {
2315 if (!lstmDescriptor.m_CifgEnabled)
2316 {
2317 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2318 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2319 }
2320 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2321 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2322 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2323
2324 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2325 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2326 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2327 }
2328
Jim Flynn11af3752019-03-19 17:22:29 +00002329 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2330
2331 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2332 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2333
2334 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2335 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2336
2337 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2338 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2339
2340 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2341 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2342
2343 RegisterInputSlots(graph, layerIndex, layer);
2344 RegisterOutputSlots(graph, layerIndex, layer);
2345}
2346
Jan Eilers5b01a892019-07-23 09:47:43 +01002347void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2348{
2349 CHECK_LAYERS(graph, 0, layerIndex);
2350
2351 auto inputs = GetInputs(graph, layerIndex);
2352 CHECK_VALID_SIZE(inputs.size(), 3);
2353
2354 auto outputs = GetOutputs(graph, layerIndex);
2355 CHECK_VALID_SIZE(outputs.size(), 2);
2356
2357 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2358 auto layerName = GetLayerName(graph, layerIndex);
2359 auto flatBufferInputParams = flatBufferLayer->inputParams();
2360
2361 armnn::QuantizedLstmInputParams lstmInputParams;
2362
2363 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2364 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2365 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2366 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2367 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2368 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2369 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2370 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2371 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2372 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2373 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2374 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2375
2376 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2377 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2378 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2379 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2380 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2381 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2382 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2383 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2384 lstmInputParams.m_InputGateBias = &inputGateBias;
2385 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2386 lstmInputParams.m_CellBias = &cellBias;
2387 lstmInputParams.m_OutputGateBias = &outputGateBias;
2388
2389 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2390
2391 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2392 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2393
2394 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2395 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2396
2397 RegisterInputSlots(graph, layerIndex, layer);
2398 RegisterOutputSlots(graph, layerIndex, layer);
2399}
2400
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002401void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2402{
2403 CHECK_LAYERS(graph, 0, layerIndex);
2404
2405 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2406 CHECK_VALID_SIZE(inputs.size(), 1);
2407
2408 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2409 CHECK_VALID_SIZE(outputs.size(), 1);
2410
2411 const std::string layerName = GetLayerName(graph, layerIndex);
2412 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2413
2414 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2415 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2416
2417 RegisterInputSlots(graph, layerIndex, layer);
2418 RegisterOutputSlots(graph, layerIndex, layer);
2419}
2420
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002421void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2422{
2423 CHECK_LAYERS(graph, 0, layerIndex);
2424
2425 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2426 CHECK_VALID_SIZE(inputs.size(), 2);
2427
2428 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2429 CHECK_VALID_SIZE(outputs.size(), 1);
2430
2431 const std::string layerName = GetLayerName(graph, layerIndex);
2432 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2433
2434 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2435 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2436
2437 RegisterInputSlots(graph, layerIndex, layer);
2438 RegisterOutputSlots(graph, layerIndex, layer);
2439}
2440
Sadik Armaganeff363d2019-04-05 15:25:46 +01002441void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2442{
2443 CHECK_LAYERS(graph, 0, layerIndex);
2444 auto inputs = GetInputs(graph, layerIndex);
2445 CHECK_LOCATION();
2446 CHECK_VALID_SIZE(inputs.size(), 2);
2447
2448 auto outputs = GetOutputs(graph, layerIndex);
2449 CHECK_VALID_SIZE(outputs.size(), 2);
2450
2451 auto layerName = GetLayerName(graph, layerIndex);
2452 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2453
2454 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2455 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2456
2457 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2458 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2459
2460 RegisterInputSlots(graph, layerIndex, layer);
2461 RegisterOutputSlots(graph, layerIndex, layer);
2462}
2463
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002464void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2465{
2466 CHECK_LAYERS(graph, 0, layerIndex);
2467 auto inputs = GetInputs(graph, layerIndex);
2468 CHECK_LOCATION();
2469 CHECK_VALID_SIZE(inputs.size(), 2);
2470
2471 auto outputs = GetOutputs(graph, layerIndex);
2472 CHECK_VALID_SIZE(outputs.size(), 1);
2473
2474 auto layerName = GetLayerName(graph, layerIndex);
2475 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2476
2477 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2478 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2479
2480 RegisterInputSlots(graph, layerIndex, layer);
2481 RegisterOutputSlots(graph, layerIndex, layer);
2482}
2483
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002484void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2485{
2486 CHECK_LAYERS(graph, 0, layerIndex);
2487
2488 auto inputs = GetInputs(graph, layerIndex);
2489 CHECK_VALID_SIZE(inputs.size(), 1);
2490
2491 auto outputs = GetOutputs(graph, layerIndex);
2492 CHECK_VALID_SIZE(outputs.size(), 1);
2493
2494 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2495 auto layerName = GetLayerName(graph, layerIndex);
2496 auto serializerDescriptor = serializerLayer->descriptor();
2497
2498 armnn::TransposeConvolution2dDescriptor descriptor;
2499 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2500 descriptor.m_PadRight = serializerDescriptor->padRight();
2501 descriptor.m_PadTop = serializerDescriptor->padTop();
2502 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2503 descriptor.m_StrideX = serializerDescriptor->strideX();
2504 descriptor.m_StrideY = serializerDescriptor->strideY();;
2505 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2506 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2507
2508 // weights & biases
2509 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2510 armnn::Optional<armnn::ConstTensor> optionalBiases;
2511 if (descriptor.m_BiasEnabled)
2512 {
2513 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2514 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2515 }
2516
2517 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2518 weights,
2519 optionalBiases,
2520 layerName.c_str());
2521
2522 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2523 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2524
2525 RegisterInputSlots(graph, layerIndex, layer);
2526 RegisterOutputSlots(graph, layerIndex, layer);
2527}
2528
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002529void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2530{
2531 CHECK_LAYERS(graph, 0, layerIndex);
2532 auto inputs = GetInputs(graph, layerIndex);
2533
2534 auto outputs = GetOutputs(graph, layerIndex);
2535 CHECK_VALID_SIZE(outputs.size(), 1);
2536
2537 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2538 unsigned int axis = flatBufferDescriptor->axis();
2539 unsigned int numInputs = flatBufferDescriptor->numInputs();
2540 CHECK_VALID_SIZE(inputs.size(), numInputs);
2541
2542 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2543 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2544 flatBufferInputShape->begin() + flatBufferInputShape->size());
2545
2546 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2547 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2548
2549 for (unsigned int i=0; i<inputs.size(); ++i)
2550 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01002551 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002552 if (descriptor.m_InputShape != inputShape)
2553 {
2554 std::stringstream ss;
2555 ss << "Shape of input "
2556 << i
2557 << " "
2558 << inputShape
2559 << " does not equal defined input shape "
2560 << descriptor.m_InputShape
2561 << ": "
2562 << CHECK_LOCATION().AsString();
2563 throw ParseException(ss.str());
2564 }
2565 }
2566
2567 auto layerName = GetLayerName(graph, layerIndex);
2568 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2569
2570 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2571 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2572
2573 RegisterInputSlots(graph, layerIndex, layer);
2574 RegisterOutputSlots(graph, layerIndex, layer);
2575}
2576
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002577} // namespace armnnDeserializer