blob: 4c2f2f1397fe80997ff498e5913e82bd8dd0836e [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Kevin May43a799c2019-02-08 16:31:42 +000023#include <boost/format.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
48 throw ParseException(
49 boost::str(
50 boost::format("%1% was called with invalid (null) graph. "
51 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
52 "layers:%2% at %3%") %
53 location.m_Function %
54 layersIndex %
55 location.FileLine()));
56 }
57 else if (layersIndex >= graph->layers()->size())
58 {
59 throw ParseException(
60 boost::str(
61 boost::format("%1% was called with an invalid layers index. "
62 "layers:%2% at %3%") %
63 location.m_Function %
64 layersIndex %
65 location.FileLine()));
66 }
67}
68
Derek Lamberti0028d1b2019-02-20 13:57:42 +000069void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000070 unsigned int layersIndex,
71 unsigned int layerIndex,
72 const CheckLocation& location)
73{
74 if (graph->layers() == nullptr)
75 {
76 throw ParseException(
77 boost::str(
78 boost::format("%1% was called with invalid (null) graph. "
79 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000080 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000081 location.m_Function %
82 layersIndex %
83 location.FileLine()));
84 }
85 else if (layersIndex >= graph->layers()->size())
86 {
87 throw ParseException(
88 boost::str(
89 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000090 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000091 location.m_Function %
92 layersIndex %
93 location.FileLine()));
94 }
95 else if (layerIndex >= graph->layers()[layersIndex].size()
96 && layerIndex != VIRTUAL_LAYER_ID)
97 {
98 throw ParseException(
99 boost::str(
100 boost::format("%1% was called with an invalid layer index. "
101 "layers:%2% layer:%3% at %4%") %
102 location.m_Function %
103 layersIndex %
104 layerIndex %
105 location.FileLine()));
106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
114 throw ParseException(
115 boost::str(
116 boost::format("%1% was called with a null tensor pointer. "
117 "at %2%") %
118 location.m_Function %
119 location.FileLine()));
120
121 }
122}
123
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000124void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000125 const CheckLocation& location)
126{
127 if (rawPtr == nullptr)
128 {
129 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
130 location.m_Function %
131 location.FileLine()));
132 }
133}
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135void CheckConstTensorSize(const unsigned int constTensorSize,
136 const unsigned int tensorSize,
137 const CheckLocation& location)
138{
139 if (constTensorSize != tensorSize)
140 {
141 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
142 location.m_Function %
143 location.FileLine()));
144 }
145}
146
Kevin May43a799c2019-02-08 16:31:42 +0000147#define CHECK_TENSOR_PTR(TENSOR_PTR) \
148 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
149
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000150#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
151 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
152
Mike Kellya0766c32019-02-19 17:22:07 +0000153#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
154 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
155
Kevin May43a799c2019-02-08 16:31:42 +0000156#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
157 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
158
159#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
160 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
161}
162
Saoirse Stewart263829c2019-02-19 15:54:14 +0000163bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
164{
165 const unsigned int actualSize = actual.GetNumDimensions();
166 if (actualSize != expected.size())
167 {
168 return false;
169 }
170
171 for (unsigned int i = 0u; i < actualSize; i++)
172 {
173 if (actual[i] != static_cast<unsigned int>(expected[i]))
174 {
175 return false;
176 }
177 }
178
179 return true;
180}
181
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000183: m_Network(nullptr, nullptr),
184//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000186{
187 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100188 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000189 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000190 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100191 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000192 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000193 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100194 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
Jim Flynne242f2d2019-05-22 14:24:13 +0100195 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000196 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000197 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100198 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000199 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000200 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000201 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000202 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
josh minor4a3c6102020-01-06 16:40:46 -0600203 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &Deserializer::ParseElementwiseUnary;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000204 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000205 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Keith Davis300ad562020-06-04 16:34:23 +0100206 m_ParserFunctions[Layer_FillLayer] = &Deserializer::ParseFill;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000207 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000208 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000209 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100210 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000211 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Sadik Armagan26257852019-10-14 13:00:47 +0100212 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000213 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000214 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000215 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
216 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100217 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100218 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000219 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000220 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000221 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000222 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000223 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100224 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
James Conroy8d333182020-05-13 10:27:58 +0100225 m_ParserFunctions[Layer_QLstmLayer] = &Deserializer::ParseQLstm;
Derek Lamberti87acb272019-03-27 16:51:31 +0000226 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100227 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Finn Williams2605b232020-06-10 15:53:46 +0100228 m_ParserFunctions[Layer_RankLayer] = &Deserializer::ParseRank;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000229 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000230 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100231 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000232 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100233 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000234 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000235 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100236 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000237 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100238 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100239 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000240 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000241 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100242 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100243 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000244 m_ParserFunctions[Layer_TransposeLayer] = &Deserializer::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000245}
246
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000247Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000248{
249 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
250
251 switch(layerType)
252 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100253 case Layer::Layer_AbsLayer:
254 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000255 case Layer::Layer_ActivationLayer:
256 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000257 case Layer::Layer_AdditionLayer:
258 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100259 case Layer::Layer_ArgMinMaxLayer:
260 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000261 case Layer::Layer_BatchToSpaceNdLayer:
262 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000263 case Layer::Layer_BatchNormalizationLayer:
264 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100265 case Layer::Layer_ComparisonLayer:
266 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100267 case Layer::Layer_ConcatLayer:
268 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000269 case Layer::Layer_ConstantLayer:
270 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000271 case Layer::Layer_Convolution2dLayer:
272 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100273 case Layer::Layer_DepthToSpaceLayer:
274 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000275 case Layer::Layer_DepthwiseConvolution2dLayer:
276 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000277 case Layer::Layer_DequantizeLayer:
278 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000279 case Layer::Layer_DetectionPostProcessLayer:
280 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000281 case Layer::Layer_DivisionLayer:
282 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000283 case Layer::Layer_EqualLayer:
284 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000285 case Layer::Layer_FullyConnectedLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100287 case Layer::Layer_FillLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000289 case Layer::Layer_FloorLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000291 case Layer::Layer_GatherLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000293 case Layer::Layer_GreaterLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000295 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000296 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100297 case Layer::Layer_InstanceNormalizationLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000299 case Layer::Layer_L2NormalizationLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100301 case Layer::Layer_LogSoftmaxLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000303 case Layer::Layer_LstmLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000305 case Layer::Layer_MeanLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000307 case Layer::Layer_MinimumLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000309 case Layer::Layer_MaximumLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100311 case Layer::Layer_MergeLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000313 case Layer::Layer_MergerLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000315 case Layer::Layer_MultiplicationLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000317 case Layer::Layer_NormalizationLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000319 case Layer::Layer_OutputLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000321 case Layer::Layer_PadLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000323 case Layer::Layer_PermuteLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000325 case Layer::Layer_Pooling2dLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100327 case Layer::Layer_PreluLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100329 case Layer::Layer_QLstmLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000331 case Layer::Layer_QuantizeLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100333 case Layer::Layer_QuantizedLstmLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100335 case Layer::Layer_RankLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000337 case Layer::Layer_ReshapeLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000339 case Layer::Layer_ResizeBilinearLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100341 case Layer::Layer_ResizeLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000343 case Layer::Layer_RsqrtLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100345 case Layer::Layer_SliceLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000347 case Layer::Layer_SoftmaxLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000349 case Layer::Layer_SpaceToBatchNdLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100351 case Layer::Layer_SpaceToDepthLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000353 case Layer::Layer_SplitterLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100355 case Layer::Layer_StackLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100357 case Layer::Layer_StandInLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000359 case Layer::Layer_StridedSliceLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000361 case Layer::Layer_SubtractionLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100363 case Layer::Layer_SwitchLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100365 case Layer::Layer_TransposeConvolution2dLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000367 case Layer::Layer_TransposeLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000369 case Layer::Layer_NONE:
370 default:
371 throw ParseException(boost::str(
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100372 boost::format("Layer type %1% not recognized") %
373 layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000374 }
375}
376
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000377std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
378{
379 auto layer = GetBaseLayer(graph, index);
380 assert(layer);
381 return layer->layerName()->str();
382}
383
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000384int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000385{
386 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
387
388 if (layerType == Layer::Layer_InputLayer)
389 {
390 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
391 }
392 else if ( layerType == Layer::Layer_OutputLayer )
393 {
394 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
395 }
396 return 0;
397}
398
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000399armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000400{
401 switch (dataLayout)
402 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000403 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000404 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000405 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000406 default:
407 return armnn::DataLayout::NCHW;
408 }
409}
410
Mike Kellyaf484012019-02-20 16:53:11 +0000411armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
412{
413 switch (function)
414 {
415 case armnnSerializer::ActivationFunction_Sigmoid:
416 return armnn::ActivationFunction::Sigmoid;
417 case armnnSerializer::ActivationFunction_TanH:
418 return armnn::ActivationFunction::TanH;
419 case armnnSerializer::ActivationFunction_Linear:
420 return armnn::ActivationFunction::Linear;
421 case armnnSerializer::ActivationFunction_ReLu:
422 return armnn::ActivationFunction::ReLu;
423 case armnnSerializer::ActivationFunction_BoundedReLu:
424 return armnn::ActivationFunction::BoundedReLu;
425 case armnnSerializer::ActivationFunction_LeakyReLu:
426 return armnn::ActivationFunction::LeakyReLu;
427 case armnnSerializer::ActivationFunction_Abs:
428 return armnn::ActivationFunction::Abs;
429 case armnnSerializer::ActivationFunction_Sqrt:
430 return armnn::ActivationFunction::Sqrt;
431 case armnnSerializer::ActivationFunction_Square:
432 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000433 case armnnSerializer::ActivationFunction_Elu:
434 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000435 case armnnSerializer::ActivationFunction_HardSwish:
436 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000437 default:
438 return armnn::ActivationFunction::Sigmoid;
439 }
440}
441
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100442armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
443{
444 switch (function)
445 {
446 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
447 return armnn::ArgMinMaxFunction::Max;
448 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
449 default:
450 return armnn::ArgMinMaxFunction::Min;
451 }
452}
453
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100454armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
455{
456 switch (operation)
457 {
458 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
459 return armnn::ComparisonOperation::Equal;
460 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
461 return armnn::ComparisonOperation::Greater;
462 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
463 return armnn::ComparisonOperation::GreaterOrEqual;
464 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
465 return armnn::ComparisonOperation::Less;
466 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
467 return armnn::ComparisonOperation::LessOrEqual;
468 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
469 default:
470 return armnn::ComparisonOperation::NotEqual;
471 }
472}
473
josh minor4a3c6102020-01-06 16:40:46 -0600474armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
475{
476 switch (operation)
477 {
478 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
479 return armnn::UnaryOperation::Abs;
480 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
481 return armnn::UnaryOperation::Rsqrt;
482 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
483 return armnn::UnaryOperation::Sqrt;
484 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
485 return armnn::UnaryOperation::Exp;
486 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
487 return armnn::UnaryOperation::Neg;
488 default:
489 throw armnn::InvalidArgumentException("Unary operation unknown");
490 }
491}
492
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100493armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
494{
495 switch (method)
496 {
497 case armnnSerializer::ResizeMethod_NearestNeighbor:
498 return armnn::ResizeMethod::NearestNeighbor;
499 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000500 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100501 default:
502 return armnn::ResizeMethod::NearestNeighbor;
503 }
504}
505
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000506armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000507{
508 armnn::DataType type;
509 CHECK_TENSOR_PTR(tensorPtr);
510
511 switch (tensorPtr->dataType())
512 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000513 case DataType_QAsymmS8:
514 type = armnn::DataType::QAsymmS8;
515 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000516 case DataType_QSymmS8:
517 type = armnn::DataType::QSymmS8;
518 break;
Kevin May43a799c2019-02-08 16:31:42 +0000519 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000520 case DataType_QAsymmU8:
521 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000522 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000523 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000524 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000525 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000526 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000527 case DataType_Signed32:
528 type = armnn::DataType::Signed32;
529 break;
Kevin May43a799c2019-02-08 16:31:42 +0000530 case DataType_Float32:
531 type = armnn::DataType::Float32;
532 break;
533 case DataType_Float16:
534 type = armnn::DataType::Float16;
535 break;
536 case DataType_Boolean:
537 type = armnn::DataType::Boolean;
538 break;
539 default:
540 {
541 CheckLocation location = CHECK_LOCATION();
542 throw ParseException(
543 boost::str(
544 boost::format("Unsupported data type %1% = %2%. %3%") %
545 tensorPtr->dataType() %
546 EnumNameDataType(tensorPtr->dataType()) %
547 location.AsString()));
548 }
549 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000550
Finn Williams2605b232020-06-10 15:53:46 +0100551 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
552 {
553 float quantizationScale = tensorPtr->quantizationScale();
554 int32_t quantizationOffset = tensorPtr->quantizationOffset();
555
556 return armnn::TensorInfo(armnn::TensorShape{armnn::Dimensionality::Scalar},
557 type,
558 quantizationScale,
559 quantizationOffset);
560 }
Kevin May43a799c2019-02-08 16:31:42 +0000561
562 auto dimensions = tensorPtr->dimensions();
563 unsigned int size = dimensions->size();
564 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
565
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000566 auto quantizationScales = tensorPtr->quantizationScales();
567
568 if (quantizationScales)
569 {
570 unsigned int quantizationScalesSize = quantizationScales->size();
571 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
572 unsigned int quantizationDim = tensorPtr->quantizationDim();
573 armnn::TensorInfo result(size,
574 outputDims.data(),
575 type,
576 scales,
577 quantizationDim);
578 return result;
579 }
580
581 float quantizationScale = tensorPtr->quantizationScale();
582 int32_t quantizationOffset = tensorPtr->quantizationOffset();
583
Kevin May43a799c2019-02-08 16:31:42 +0000584 // two statements (on purpose) for easier debugging:
585 armnn::TensorInfo result(size,
586 outputDims.data(),
587 type,
588 quantizationScale,
589 quantizationOffset);
590 return result;
591}
592
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000593armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000594{
595 CHECK_CONST_TENSOR_PTR(constTensorPtr);
596 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
597
598 switch (constTensorPtr->data_type())
599 {
600 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000601 {
602 auto byteData = constTensorPtr->data_as_ByteData()->data();
603 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
604 return armnn::ConstTensor(tensorInfo, byteData->data());
605 }
Mike Kellya0766c32019-02-19 17:22:07 +0000606 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000607 {
608 auto shortData = constTensorPtr->data_as_ShortData()->data();
609 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
610 return armnn::ConstTensor(tensorInfo, shortData->data());
611 }
Mike Kellya0766c32019-02-19 17:22:07 +0000612 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000613 {
614 auto intData = constTensorPtr->data_as_IntData()->data();
615 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
616 return armnn::ConstTensor(tensorInfo, intData->data());
617 }
Mike Kellya0766c32019-02-19 17:22:07 +0000618 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000619 {
620 auto longData = constTensorPtr->data_as_LongData()->data();
621 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
622 return armnn::ConstTensor(tensorInfo, longData->data());
623 }
Mike Kellya0766c32019-02-19 17:22:07 +0000624 default:
625 {
626 CheckLocation location = CHECK_LOCATION();
627 throw ParseException(
628 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
629 constTensorPtr->data_type() %
630 EnumNameConstTensorData(constTensorPtr->data_type()) %
631 location.AsString()));
632 }
633 }
634}
635
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000636Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000637 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000638{
639 CHECK_LAYERS(graphPtr, 0, layerIndex);
640 auto layer = GetBaseLayer(graphPtr, layerIndex);
641 const auto& numInputs = layer->inputSlots()->size();
642
643 TensorRawPtrVector result(numInputs);
644
645 for (unsigned int i=0; i<numInputs; ++i)
646 {
647 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
648 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
649 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
650 }
651 return result;
652}
653
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000654Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000655 unsigned int layerIndex)
656{
657 CHECK_LAYERS(graphPtr, 0, layerIndex);
658 auto layer = GetBaseLayer(graphPtr, layerIndex);
659 const auto& numOutputs = layer->outputSlots()->size();
660
661 TensorRawPtrVector result(numOutputs);
662
663 for (unsigned int i=0; i<numOutputs; ++i)
664 {
665 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
666 }
667 return result;
668}
669
Derek Lamberti8ddae332019-02-21 16:29:43 +0000670void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000671{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000672 CHECK_LAYERS(graph, 0, layerIndex);
673 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000674 throw ParseException(
675 boost::str(
676 boost::format("Layer not supported. "
677 "layerIndex: %1% "
678 "layerName: %2% / %3%") %
679 layerIndex %
680 layerName %
681 CHECK_LOCATION().AsString()));
682}
683
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000684void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000685{
686 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000687 m_InputBindings.clear();
688 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000689}
690
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000691IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000692{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000693 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000694}
695
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000696IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000697{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000698 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000699}
700
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000701void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000702{
703 delete parser;
704}
705
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000706INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000707{
708 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000709 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
710 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000711}
712
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000713armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000714{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000715 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000716 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
717 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
718 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000719}
720
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000721Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000722{
723 if (binaryContent == nullptr)
724 {
725 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
726 CHECK_LOCATION().AsString()));
727 }
728 flatbuffers::Verifier verifier(binaryContent, len);
729 if (verifier.VerifyBuffer<SerializedGraph>() == false)
730 {
731 throw ParseException(
732 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
733 "flatbuffers format. size:%1% %2%") %
734 len %
735 CHECK_LOCATION().AsString()));
736 }
737 return GetSerializedGraph(binaryContent);
738}
739
Derek Lamberti8ddae332019-02-21 16:29:43 +0000740INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000741{
742 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100743 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000744 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000745 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000746 {
747 if (layer->layer_type() != Layer_InputLayer &&
748 layer->layer_type() != Layer_OutputLayer)
749 {
750 // lookup and call the parser function
751 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000752 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000753 }
754 ++layerIndex;
755 }
756
Derek Lamberti8ddae332019-02-21 16:29:43 +0000757 SetupInputLayers(graph);
758 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000759
760 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100761 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000762 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100763 Connections& connections = graphIt.second;
764 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000765 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100766 const unsigned int outputSlotIndex = outputIt.first;
767 IOutputSlot* outputSlot = outputIt.second;
768 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000769 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100770 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000771 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100772 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000773 }
Kevin May43a799c2019-02-08 16:31:42 +0000774 }
775 }
776 }
777
778 return std::move(m_Network);
779}
780
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000781BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000782 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000783{
Jan Eilers8eb25602020-03-09 12:13:48 +0000784 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000785 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000786 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000787 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000788 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000789 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000790 }
791 }
792 throw ParseException(
793 boost::str(
794 boost::format("No input binding found for layer:%1% / %2%") %
795 name %
796 CHECK_LOCATION().AsString()));
797}
798
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000799BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000800 const std::string& name) const
801{
Jan Eilers8eb25602020-03-09 12:13:48 +0000802 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000803 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000804 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000805 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000806 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000807 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000808 }
809 }
810 throw ParseException(
811 boost::str(
812 boost::format("No output binding found for layer:%1% / %2%") %
813 name %
814 CHECK_LOCATION().AsString()));
815}
816
Tee Jungaa920c52019-11-05 10:48:25 +0000817unsigned int Deserializer::GetInputLayerInVector(GraphPtr graph, int targetId)
818{
819 for (unsigned int i = 0; i < graph->layers()->size(); i++)
820 {
821 auto layer = graph->layers()->Get(i);
822 if (layer->layer_type() == Layer::Layer_InputLayer)
823 {
824 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
825 if (layerBindingId == targetId)
826 {
827 return i;
828 }
829 }
830 }
831 throw ParseException("Input layer with given layerBindingId not found");
832}
833
834unsigned int Deserializer::GetOutputLayerInVector(GraphPtr graph, int targetId)
835{
836 for (unsigned int i = 0; i < graph->layers()->size(); i++)
837 {
838 auto layer = graph->layers()->Get(i);
839 if (layer->layer_type() == Layer::Layer_OutputLayer)
840 {
841 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
842 if (layerBindingId == targetId)
843 {
844 return i;
845 }
846 }
847 }
848 throw ParseException("Output layer with given layerBindingId not found");
849}
850
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100851unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
852{
853 for (unsigned int i = 0; i < graph->layers()->size(); i++)
854 {
855 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
856 if (layer->index() == targetIndex)
857 {
858 return i;
859 }
860 }
861 throw ParseException("Layer with given index not found");
862}
863
Tee Jungaa920c52019-11-05 10:48:25 +0000864Deserializer::FeatureVersions Deserializer::GetFeatureVersions(GraphPtr graph)
865{
866 Deserializer::FeatureVersions versions;
867
868 if (graph->featureVersions())
869 {
870 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
871 }
872
873 return versions;
874}
875
Derek Lamberti8ddae332019-02-21 16:29:43 +0000876void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000877{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000878 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100879 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000880 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100881 m_InputBindings.reserve(numInputs);
882
883 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000884 {
Tee Jungaa920c52019-11-05 10:48:25 +0000885 unsigned int inputLayerIndex = 0xFFFFFFFF;
886 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
887 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100888 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000889 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
890 }
891 else
892 {
893 const int inputId = graph->inputIds()->Get(i);
894 inputLayerIndex = GetInputLayerInVector(graph, inputId);
895 }
896
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100897 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000898
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100899 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
900 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100901 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000902
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100903 IConnectableLayer* inputLayer =
904 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000905
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100906 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
907 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
908 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
909
Derek Lamberti8ddae332019-02-21 16:29:43 +0000910 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100911 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000912 }
913}
914
Derek Lamberti8ddae332019-02-21 16:29:43 +0000915void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000916{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000917 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100918 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000919 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100920 m_OutputBindings.reserve(numOutputs);
921
922 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000923 {
Tee Jungaa920c52019-11-05 10:48:25 +0000924 unsigned int outputLayerIndex = 0xFFFFFFFF;
925 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
926 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100927 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000928 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
929 }
930 else
931 {
932 const int outputId = graph->outputIds()->Get(i);
933 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
934 }
935
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100936 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000937
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100938 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
939 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Josh Minor4c10dfc2020-06-17 13:56:20 -0500940 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000941
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100942 IConnectableLayer* outputLayer =
943 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000944
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100945 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
946
947 unsigned int sourceLayerIndex =
948 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
949 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Josh Minor4c10dfc2020-06-17 13:56:20 -0500950 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(i)->tensorInfo());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100951
Derek Lamberti8ddae332019-02-21 16:29:43 +0000952 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100953 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000954 }
955}
956
Derek Lamberti8ddae332019-02-21 16:29:43 +0000957void Deserializer::RegisterOutputSlots(GraphPtr graph,
958 uint32_t layerIndex,
959 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000960{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000961 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100962 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100963 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
964 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000965 {
966 throw ParseException(
967 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
968 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100969 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000970 layer->GetNumOutputSlots() %
971 layerIndex %
972 CHECK_LOCATION().AsString()));
973 }
974
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100975 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000976 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100977 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
978 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
979 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
980 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000981 }
982}
983
Derek Lamberti8ddae332019-02-21 16:29:43 +0000984void Deserializer::RegisterInputSlots(GraphPtr graph,
985 uint32_t layerIndex,
986 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000987{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000988 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100989 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100990 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
991 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000992 {
993 throw ParseException(
994 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
995 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100996 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000997 layer->GetNumInputSlots() %
998 layerIndex %
999 CHECK_LOCATION().AsString()));
1000 }
1001
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001002 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001003 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001004 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1005 auto fbConnection = fbInputSlot->connection();
1006 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1007 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001008 }
1009}
1010
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001011void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
1012 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001013 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001014{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001015 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001016 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001017 m_GraphConnections[sourceLayerIndex] = Connections();
1018 }
1019
1020 Connections& connections = m_GraphConnections[sourceLayerIndex];
1021 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1022 {
1023 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001024 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001025 else
1026 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001027 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001028 }
1029}
Kevin May43a799c2019-02-08 16:31:42 +00001030
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001031void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001032 uint32_t outputSlotIndex,
1033 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001034{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001035 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1036 {
1037 m_GraphConnections[sourceLayerIndex] = Connections();
1038 }
1039
1040 Connections& connections = m_GraphConnections[sourceLayerIndex];
1041 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1042 {
1043 throw ParseException("Same output slot index processed twice");
1044 }
1045
1046 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001047}
1048
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001049void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
1050{
1051 CHECK_LAYERS(graph, 0, layerIndex);
1052 auto inputs = GetInputs(graph, layerIndex);
1053 CHECK_LOCATION();
1054 CHECK_VALID_SIZE(inputs.size(), 1);
1055
1056 auto outputs = GetOutputs(graph, layerIndex);
1057 CHECK_VALID_SIZE(outputs.size(), 1);
1058
1059 auto layerName = GetLayerName(graph, layerIndex);
1060
josh minor4a3c6102020-01-06 16:40:46 -06001061 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1062 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001063 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1064 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1065
1066 RegisterInputSlots(graph, layerIndex, layer);
1067 RegisterOutputSlots(graph, layerIndex, layer);
1068}
1069
Derek Lamberti8ddae332019-02-21 16:29:43 +00001070void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001071{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001072 CHECK_LAYERS(graph, 0, layerIndex);
1073 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001074 CHECK_LOCATION();
1075 CHECK_VALID_SIZE(inputs.size(), 1);
1076
Derek Lamberti8ddae332019-02-21 16:29:43 +00001077 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001078 CHECK_VALID_SIZE(outputs.size(), 1);
1079
Derek Lamberti8ddae332019-02-21 16:29:43 +00001080 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001081 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001082 auto serializerDescriptor = serializerLayer->descriptor();
1083
1084 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001085 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001086 descriptor.m_A = serializerDescriptor->a();
1087 descriptor.m_B = serializerDescriptor->b();
1088
1089 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1090 layerName.c_str());
1091 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1092 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1093
Derek Lamberti8ddae332019-02-21 16:29:43 +00001094 RegisterInputSlots(graph, layerIndex, layer);
1095 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001096}
1097
Derek Lamberti8ddae332019-02-21 16:29:43 +00001098void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001099{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001100 CHECK_LAYERS(graph, 0, layerIndex);
1101 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001102 CHECK_LOCATION();
1103 CHECK_VALID_SIZE(inputs.size(), 2);
1104
Derek Lamberti8ddae332019-02-21 16:29:43 +00001105 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001106 CHECK_VALID_SIZE(outputs.size(), 1);
1107
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001108 auto layerName = GetLayerName(graph, layerIndex);
1109 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001110
1111 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1112 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1113
Derek Lamberti8ddae332019-02-21 16:29:43 +00001114 RegisterInputSlots(graph, layerIndex, layer);
1115 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001116}
1117
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001118void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
1119{
1120 CHECK_LAYERS(graph, 0, layerIndex);
1121 auto inputs = GetInputs(graph, layerIndex);
1122 CHECK_LOCATION();
1123 CHECK_VALID_SIZE(inputs.size(), 1);
1124
1125 auto outputs = GetOutputs(graph, layerIndex);
1126 CHECK_VALID_SIZE(outputs.size(), 1);
1127
1128 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1129 auto serializerDescriptor = serializerLayer->descriptor();
1130
1131 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001132 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001133 descriptor.m_Axis = serializerDescriptor->axis();
1134 auto layerName = GetLayerName(graph, layerIndex);
1135 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1136
1137 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1138 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1139
1140 RegisterInputSlots(graph, layerIndex, layer);
1141 RegisterOutputSlots(graph, layerIndex, layer);
1142}
1143
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001144void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
1145{
1146 CHECK_LAYERS(graph, 0, layerIndex);
1147
1148 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1149 CHECK_VALID_SIZE(inputs.size(), 1);
1150
1151 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1152 CHECK_VALID_SIZE(outputs.size(), 1);
1153
1154 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1155 auto flatBufferCrops = flatBufferDescriptor->crops();
1156 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1157
1158 if (flatBufferCrops->Length() % 2 != 0)
1159 {
1160 throw ParseException(boost::str(
1161 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1162 }
1163
1164 std::vector<std::pair<unsigned int, unsigned int>> crops;
1165 crops.reserve(flatBufferCrops->Length() / 2);
1166 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1167 {
1168 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1169 }
1170
1171 armnn::BatchToSpaceNdDescriptor descriptor;
1172 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1173 descriptor.m_BlockShape =
1174 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1175 descriptor.m_Crops = crops;
1176
1177 auto layerName = GetLayerName(graph, layerIndex);
1178 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1179
1180 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1181 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1182
1183 RegisterInputSlots(graph, layerIndex, layer);
1184 RegisterOutputSlots(graph, layerIndex, layer);
1185}
1186
ruoyan018e7fa232019-02-28 15:09:07 +00001187void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1188{
1189 CHECK_LAYERS(graph, 0, layerIndex);
1190
1191 auto inputs = GetInputs(graph, layerIndex);
1192 CHECK_VALID_SIZE(inputs.size(), 1);
1193
1194 auto outputs = GetOutputs(graph, layerIndex);
1195 CHECK_VALID_SIZE(outputs.size(), 1);
1196 auto outputInfo = ToTensorInfo(outputs[0]);
1197
ruoyan015c7ab052019-03-04 14:48:02 +00001198 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001199
1200 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1201 auto serializerDescriptor = serializerLayer->descriptor();
1202
1203 armnn::BatchNormalizationDescriptor descriptor;
1204 descriptor.m_Eps = serializerDescriptor->eps();
1205 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1206
1207 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1208 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1209 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1210 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1211
1212 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1213 mean,
1214 variance,
1215 beta,
1216 gamma,
1217 layerName.c_str());
1218 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1219
1220 RegisterInputSlots(graph, layerIndex, layer);
1221 RegisterOutputSlots(graph, layerIndex, layer);
1222}
1223
Conor Kennedy76277882019-02-26 08:29:54 +00001224void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1225{
1226 CHECK_LAYERS(graph, 0, layerIndex);
1227 CHECK_LOCATION();
1228
1229 auto outputs = GetOutputs(graph, layerIndex);
1230 CHECK_VALID_SIZE(outputs.size(), 1);
1231
1232 auto layerName = GetLayerName(graph, layerIndex);
1233
1234 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1235 auto serializerInput = serializerLayer->input();
1236
1237 armnn::ConstTensor input = ToConstTensor(serializerInput);
1238
1239 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1240
1241 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1242 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1243
1244 RegisterOutputSlots(graph, layerIndex, layer);
1245}
1246
Derek Lamberti8ddae332019-02-21 16:29:43 +00001247void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001248{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001249 CHECK_LAYERS(graph, 0, layerIndex);
1250 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001251 CHECK_LOCATION();
1252 CHECK_VALID_SIZE(inputs.size(), 1);
1253
Derek Lamberti8ddae332019-02-21 16:29:43 +00001254 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001255 CHECK_VALID_SIZE(outputs.size(), 1);
1256
Derek Lamberti8ddae332019-02-21 16:29:43 +00001257 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001258 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001259 auto serializerDescriptor = serializerLayer->descriptor();
1260
1261 armnn::Convolution2dDescriptor descriptor;
1262 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1263 descriptor.m_PadRight = serializerDescriptor->padRight();
1264 descriptor.m_PadTop = serializerDescriptor->padTop();
1265 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1266 descriptor.m_StrideX = serializerDescriptor->strideX();
1267 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001268 descriptor.m_DilationX = serializerDescriptor->dilationX();
1269 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001270 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1271 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1272
1273 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1274 armnn::ConstTensor biases;
1275
Matteo Martincighfc598e12019-05-14 10:36:13 +01001276 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001277 if (descriptor.m_BiasEnabled)
1278 {
1279 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001280 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001281 }
1282 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1283 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001284 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001285 layerName.c_str());
1286 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1287 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1288
Derek Lamberti8ddae332019-02-21 16:29:43 +00001289 RegisterInputSlots(graph, layerIndex, layer);
1290 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001291}
1292
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001293void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1294{
1295 CHECK_LAYERS(graph, 0, layerIndex);
1296
1297 auto inputs = GetInputs(graph, layerIndex);
1298 CHECK_VALID_SIZE(inputs.size(), 1);
1299
1300 auto outputs = GetOutputs(graph, layerIndex);
1301 CHECK_VALID_SIZE(outputs.size(), 1);
1302
1303 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1304
1305 armnn::DepthToSpaceDescriptor descriptor;
1306 descriptor.m_BlockSize = fbDescriptor->blockSize();
1307 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1308
1309 auto layerName = GetLayerName(graph, layerIndex);
1310 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1311
1312 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1313 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1314
1315 RegisterInputSlots(graph, layerIndex, layer);
1316 RegisterOutputSlots(graph, layerIndex, layer);
1317}
1318
Derek Lamberti8ddae332019-02-21 16:29:43 +00001319void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001320{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001321 CHECK_LAYERS(graph, 0, layerIndex);
1322 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001323 CHECK_LOCATION();
1324 CHECK_VALID_SIZE(inputs.size(), 1);
1325
Derek Lamberti8ddae332019-02-21 16:29:43 +00001326 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001327 CHECK_VALID_SIZE(outputs.size(), 1);
1328
Derek Lamberti8ddae332019-02-21 16:29:43 +00001329 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001330 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001331 auto serializerDescriptor = serializerLayer->descriptor();
1332
1333 armnn::DepthwiseConvolution2dDescriptor descriptor;
1334 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1335 descriptor.m_PadRight = serializerDescriptor->padRight();
1336 descriptor.m_PadTop = serializerDescriptor->padTop();
1337 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1338 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001339 descriptor.m_StrideY = serializerDescriptor->strideY();
1340 descriptor.m_DilationX = serializerDescriptor->dilationX();
1341 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001342 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1343 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1344
1345 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1346 armnn::ConstTensor biases;
1347
Matteo Martincighfc598e12019-05-14 10:36:13 +01001348 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001349 if (descriptor.m_BiasEnabled)
1350 {
1351 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001352 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001353 }
1354 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1355 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001356 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001357 layerName.c_str());
1358
1359 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1360 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1361
Derek Lamberti8ddae332019-02-21 16:29:43 +00001362 RegisterInputSlots(graph, layerIndex, layer);
1363 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001364}
1365
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001366void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1367{
1368 CHECK_LAYERS(graph, 0, layerIndex);
1369 auto inputs = GetInputs(graph, layerIndex);
1370 CHECK_LOCATION();
1371 CHECK_VALID_SIZE(inputs.size(), 2);
1372
1373 auto outputs = GetOutputs(graph, layerIndex);
1374 CHECK_VALID_SIZE(outputs.size(), 4);
1375
1376 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1377 auto layerName = GetLayerName(graph, layerIndex);
1378 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1379
1380 armnn::DetectionPostProcessDescriptor descriptor;
1381 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1382 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1383 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1384 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1385 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1386 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1387 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1388 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1389 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1390 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1391 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1392
1393 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1394
1395 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1396 anchors,
1397 layerName.c_str());
1398
1399 for (unsigned int i = 0; i < 4; i++)
1400 {
1401 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1402 }
1403
1404 RegisterInputSlots(graph, layerIndex, layer);
1405 RegisterOutputSlots(graph, layerIndex, layer);
1406}
1407
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001408void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1409{
1410 CHECK_LAYERS(graph, 0, layerIndex);
1411 auto inputs = GetInputs(graph, layerIndex);
1412 CHECK_LOCATION();
1413 CHECK_VALID_SIZE(inputs.size(), 2);
1414
1415 auto outputs = GetOutputs(graph, layerIndex);
1416 CHECK_VALID_SIZE(outputs.size(), 1);
1417
1418 auto layerName = GetLayerName(graph, layerIndex);
1419 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1420
1421 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1422 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1423
1424 RegisterInputSlots(graph, layerIndex, layer);
1425 RegisterOutputSlots(graph, layerIndex, layer);
1426}
1427
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001428void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1429{
1430 CHECK_LAYERS(graph, 0, layerIndex);
1431 auto inputs = GetInputs(graph, layerIndex);
1432 CHECK_LOCATION();
1433 CHECK_VALID_SIZE(inputs.size(), 2);
1434
1435 auto outputs = GetOutputs(graph, layerIndex);
1436 CHECK_VALID_SIZE(outputs.size(), 1);
1437
1438 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001439 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1440 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001441
1442 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1443 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1444
1445 RegisterInputSlots(graph, layerIndex, layer);
1446 RegisterOutputSlots(graph, layerIndex, layer);
1447}
1448
Keith Davis300ad562020-06-04 16:34:23 +01001449void Deserializer::ParseFill(GraphPtr graph, unsigned int layerIndex)
1450{
1451 CHECK_LAYERS(graph, 0, layerIndex);
1452 auto inputs = GetInputs(graph, layerIndex);
1453 CHECK_LOCATION();
1454 CHECK_VALID_SIZE(inputs.size(), 1);
1455
1456 auto outputs = GetOutputs(graph, layerIndex);
1457 CHECK_VALID_SIZE(outputs.size(), 1);
1458
1459 auto layerName = GetLayerName(graph, layerIndex);
1460 armnn::FillDescriptor descriptor(1.0f);
1461 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1462
1463 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1464 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1465
1466 RegisterInputSlots(graph, layerIndex, layer);
1467 RegisterOutputSlots(graph, layerIndex, layer);
1468}
1469
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001470void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1471{
1472 CHECK_LAYERS(graph, 0, layerIndex);
1473 auto inputs = GetInputs(graph, layerIndex);
1474 CHECK_LOCATION();
1475 CHECK_VALID_SIZE(inputs.size(), 2);
1476
1477 auto outputs = GetOutputs(graph, layerIndex);
1478 CHECK_VALID_SIZE(outputs.size(), 1);
1479
1480 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001481 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1482 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001483
1484 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1485 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1486
1487 RegisterInputSlots(graph, layerIndex, layer);
1488 RegisterOutputSlots(graph, layerIndex, layer);
1489}
1490
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001491void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1492{
1493 CHECK_LAYERS(graph, 0, layerIndex);
1494
1495 auto inputs = GetInputs(graph, layerIndex);
1496 CHECK_VALID_SIZE(inputs.size(), 1);
1497
1498 auto outputs = GetOutputs(graph, layerIndex);
1499 CHECK_VALID_SIZE(outputs.size(), 1);
1500
1501 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1502 auto fbDescriptor = fbLayer->descriptor();
1503
1504 armnn::InstanceNormalizationDescriptor descriptor;
1505 descriptor.m_Gamma = fbDescriptor->gamma();
1506 descriptor.m_Beta = fbDescriptor->beta();
1507 descriptor.m_Eps = fbDescriptor->eps();
1508 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1509
1510 const std::string layerName = GetLayerName(graph, layerIndex);
1511 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1512
1513 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1514 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1515
1516 RegisterInputSlots(graph, layerIndex, layer);
1517 RegisterOutputSlots(graph, layerIndex, layer);
1518}
1519
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001520void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1521{
1522 CHECK_LAYERS(graph, 0, layerIndex);
1523
1524 auto inputs = GetInputs(graph, layerIndex);
1525 CHECK_VALID_SIZE(inputs.size(), 1);
1526
1527 auto outputs = GetOutputs(graph, layerIndex);
1528 CHECK_VALID_SIZE(outputs.size(), 1);
1529 auto outputInfo = ToTensorInfo(outputs[0]);
1530
1531 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1532 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1533
1534 auto layerName = GetLayerName(graph, layerIndex);
1535 armnn::L2NormalizationDescriptor descriptor;
1536 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001537 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001538
1539 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1540 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1541
1542 RegisterInputSlots(graph, layerIndex, layer);
1543 RegisterOutputSlots(graph, layerIndex, layer);
1544}
1545
Sadik Armagan26257852019-10-14 13:00:47 +01001546void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1547{
1548 CHECK_LAYERS(graph, 0, layerIndex);
1549
1550 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1551 CHECK_VALID_SIZE(inputs.size(), 1);
1552
1553 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1554 CHECK_VALID_SIZE(outputs.size(), 1);
1555
1556 armnn::LogSoftmaxDescriptor descriptor;
1557 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1558 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1559 auto layerName = GetLayerName(graph, layerIndex);
1560
1561 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1562
1563 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1564 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1565
1566 RegisterInputSlots(graph, layerIndex, layer);
1567 RegisterOutputSlots(graph, layerIndex, layer);
1568}
1569
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001570void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1571{
1572 CHECK_LAYERS(graph, 0, layerIndex);
1573 auto inputs = GetInputs(graph, layerIndex);
1574 CHECK_LOCATION();
1575 CHECK_VALID_SIZE(inputs.size(), 2);
1576
1577 auto outputs = GetOutputs(graph, layerIndex);
1578 CHECK_VALID_SIZE(outputs.size(), 1);
1579
1580 auto layerName = GetLayerName(graph, layerIndex);
1581 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1582
1583 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1584 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1585
1586 RegisterInputSlots(graph, layerIndex, layer);
1587 RegisterOutputSlots(graph, layerIndex, layer);
1588}
1589
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001590void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1591{
1592 CHECK_LAYERS(graph, 0, layerIndex);
1593 auto inputs = GetInputs(graph, layerIndex);
1594 CHECK_LOCATION();
1595 CHECK_VALID_SIZE(inputs.size(), 2);
1596
1597 auto outputs = GetOutputs(graph, layerIndex);
1598 CHECK_VALID_SIZE(outputs.size(), 1);
1599
1600 auto layerName = GetLayerName(graph, layerIndex);
1601 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1602
1603 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1604 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1605
1606 RegisterInputSlots(graph, layerIndex, layer);
1607 RegisterOutputSlots(graph, layerIndex, layer);
1608}
1609
Jim Flynne242f2d2019-05-22 14:24:13 +01001610const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1611 unsigned int layerIndex)
1612{
1613 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1614
1615 switch (layerType)
1616 {
1617 case Layer::Layer_ConcatLayer:
1618 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1619 case Layer::Layer_MergerLayer:
1620 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1621 default:
1622 throw armnn::Exception("unknown layer type, should be concat or merger");
1623 }
1624}
1625
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001626void Deserializer::ParseComparison(GraphPtr graph, unsigned int layerIndex)
1627{
1628 CHECK_LAYERS(graph, 0, layerIndex);
1629 CHECK_LOCATION();
1630
1631 auto inputs = GetInputs(graph, layerIndex);
1632 CHECK_VALID_SIZE(inputs.size(), 2);
1633
1634 auto outputs = GetOutputs(graph, layerIndex);
1635 CHECK_VALID_SIZE(outputs.size(), 1);
1636
1637 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1638 auto fbDescriptor = fbLayer->descriptor();
1639
1640 armnn::ComparisonDescriptor descriptor;
1641 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1642
1643 const std::string& layerName = GetLayerName(graph, layerIndex);
1644 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1645
1646 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1647 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1648
1649 RegisterInputSlots(graph, layerIndex, layer);
1650 RegisterOutputSlots(graph, layerIndex, layer);
1651}
1652
josh minor4a3c6102020-01-06 16:40:46 -06001653void Deserializer::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
1654{
1655 CHECK_LAYERS(graph, 0, layerIndex);
1656 CHECK_LOCATION();
1657
1658 auto inputs = GetInputs(graph, layerIndex);
1659 CHECK_VALID_SIZE(inputs.size(), 1);
1660
1661 auto outputs = GetOutputs(graph, layerIndex);
1662 CHECK_VALID_SIZE(outputs.size(), 1);
1663
1664 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1665 auto fbDescriptor = fbLayer->descriptor();
1666
1667 armnn::ElementwiseUnaryDescriptor descriptor;
1668 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1669
1670 const std::string& layerName = GetLayerName(graph, layerIndex);
1671 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1672
1673 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1674 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1675
1676 RegisterInputSlots(graph, layerIndex, layer);
1677 RegisterOutputSlots(graph, layerIndex, layer);
1678}
1679
Jim Flynn906f9462019-05-10 13:55:21 +01001680void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001681{
1682 CHECK_LAYERS(graph, 0, layerIndex);
1683 CHECK_LOCATION();
1684
1685 auto outputs = GetOutputs(graph, layerIndex);
1686 CHECK_VALID_SIZE(outputs.size(), 1);
1687
Jim Flynnac25a1b2019-02-28 10:40:49 +00001688 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001689 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1690 unsigned int numViews = originsDescriptor->numViews();
1691 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001692
1693 // can now check the number of inputs == number of views
1694 auto inputs = GetInputs(graph, layerIndex);
1695 CHECK_VALID_SIZE(inputs.size(), numViews);
1696
1697 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001698 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001699 for (unsigned int v = 0; v < numViews; ++v)
1700 {
1701 auto originPtr = originsPtr->Get(v);
1702 for (unsigned int d = 0; d < numDimensions; ++d)
1703 {
1704 uint32_t value = originPtr->data()->Get(d);
1705 descriptor.SetViewOriginCoord(v, d, value);
1706 }
1707 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001708 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001709
Jim Flynn906f9462019-05-10 13:55:21 +01001710 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001711 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1712 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1713
1714 RegisterInputSlots(graph, layerIndex, layer);
1715 RegisterOutputSlots(graph, layerIndex, layer);
1716}
1717
Derek Lamberti8ddae332019-02-21 16:29:43 +00001718void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001719{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001720 CHECK_LAYERS(graph, 0, layerIndex);
1721 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001722 CHECK_LOCATION();
1723 CHECK_VALID_SIZE(inputs.size(), 2);
1724
Derek Lamberti8ddae332019-02-21 16:29:43 +00001725 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001726 CHECK_VALID_SIZE(outputs.size(), 1);
1727
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001728 auto layerName = GetLayerName(graph, layerIndex);
1729 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001730
1731 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1732 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1733
Derek Lamberti8ddae332019-02-21 16:29:43 +00001734 RegisterInputSlots(graph, layerIndex, layer);
1735 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001736}
1737
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001738void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1739{
1740 CHECK_LAYERS(graph, 0, layerIndex);
1741 CHECK_LOCATION();
1742
1743 auto inputs = GetInputs(graph, layerIndex);
1744 CHECK_VALID_SIZE(inputs.size(), 1);
1745
1746 auto outputs = GetOutputs(graph, layerIndex);
1747 CHECK_VALID_SIZE(outputs.size(), 1);
1748
1749 auto layerName = GetLayerName(graph, layerIndex);
1750
1751 armnn::IConnectableLayer* layer;
1752
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001753 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001754
1755 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1756 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1757
1758 RegisterInputSlots(graph, layerIndex, layer);
1759 RegisterOutputSlots(graph, layerIndex, layer);
1760}
1761
Derek Lamberti8ddae332019-02-21 16:29:43 +00001762void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001763{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001764 CHECK_LAYERS(graph, 0, layerIndex);
1765 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001766 CHECK_LOCATION();
1767 CHECK_VALID_SIZE(inputs.size(), 1);
1768
Derek Lamberti8ddae332019-02-21 16:29:43 +00001769 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001770 CHECK_VALID_SIZE(outputs.size(), 1);
1771
Derek Lamberti8ddae332019-02-21 16:29:43 +00001772 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001773 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001774 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1775
1776 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1777 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1778 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1779
1780 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1781
1782 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001783 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001784 if (flatBufferDescriptor->biasEnabled())
1785 {
1786 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001787 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001788 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001789 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1790 weightsTensor,
1791 optionalBiases,
1792 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001793
1794 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1795 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1796
Derek Lamberti8ddae332019-02-21 16:29:43 +00001797 RegisterInputSlots(graph, layerIndex, layer);
1798 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001799}
1800
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001801void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1802{
1803 CHECK_LAYERS(graph, 0, layerIndex);
1804
1805 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1806 CHECK_VALID_SIZE(inputs.size(), 1);
1807
1808 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1809 CHECK_VALID_SIZE(outputs.size(), 1);
1810
1811 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1812 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001813 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001814
1815 if (flatBufferPadList->Length() % 2 != 0)
1816 {
1817 throw ParseException(boost::str(
1818 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1819 }
1820
1821 std::vector<std::pair<unsigned int, unsigned int>> padList;
1822 padList.reserve(flatBufferPadList->Length() / 2);
1823 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1824 {
1825 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1826 }
1827
David Monahan34757812019-06-19 11:47:21 +01001828 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001829
1830 auto layerName = GetLayerName(graph, layerIndex);
1831 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1832
1833 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1834 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1835
1836 RegisterInputSlots(graph, layerIndex, layer);
1837 RegisterOutputSlots(graph, layerIndex, layer);
1838}
1839
Derek Lamberti8ddae332019-02-21 16:29:43 +00001840void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001841{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001842 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001843
1844 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001845 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001846
Derek Lamberti8ddae332019-02-21 16:29:43 +00001847 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001848 CHECK_VALID_SIZE(inputs.size(), 1);
1849
Derek Lamberti8ddae332019-02-21 16:29:43 +00001850 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001851 CHECK_VALID_SIZE(outputs.size(), 1);
1852 auto outputInfo = ToTensorInfo(outputs[0]);
1853
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001854 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001855 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1856
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001857 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001858 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1859
Derek Lamberti8ddae332019-02-21 16:29:43 +00001860 RegisterInputSlots(graph, layerIndex, layer);
1861 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001862}
1863
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001864armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001865 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001866{
Jan Eilers8eb25602020-03-09 12:13:48 +00001867 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001868 armnn::Pooling2dDescriptor desc;
1869
1870 switch (pooling2dDesc->poolType())
1871 {
1872 case PoolingAlgorithm_Average:
1873 {
1874 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001875 break;
1876 }
1877 case PoolingAlgorithm_Max:
1878 {
1879 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001880 break;
1881 }
1882 default:
1883 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001884 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001885 }
1886 }
1887
1888 switch (pooling2dDesc->outputShapeRounding())
1889 {
1890 case OutputShapeRounding_Floor:
1891 {
1892 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1893 break;
1894 }
1895 case OutputShapeRounding_Ceiling:
1896 {
1897 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1898 break;
1899 }
1900 default:
1901 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001902 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001903 }
1904 }
1905
1906 switch (pooling2dDesc->paddingMethod())
1907 {
1908 case PaddingMethod_Exclude:
1909 {
1910 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1911 break;
1912 }
1913 case PaddingMethod_IgnoreValue:
1914 {
1915 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1916 break;
1917 }
1918 default:
1919 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001920 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001921 }
1922 }
1923
1924 switch (pooling2dDesc->dataLayout())
1925 {
1926 case DataLayout_NCHW:
1927 {
1928 desc.m_DataLayout = armnn::DataLayout::NCHW;
1929 break;
1930 }
1931 case DataLayout_NHWC:
1932 {
1933 desc.m_DataLayout = armnn::DataLayout::NHWC;
1934 break;
1935 }
1936 default:
1937 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001938 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001939 }
1940 }
1941
1942 desc.m_PadRight = pooling2dDesc->padRight();
1943 desc.m_PadLeft = pooling2dDesc->padLeft();
1944 desc.m_PadBottom = pooling2dDesc->padBottom();
1945 desc.m_PadTop = pooling2dDesc->padTop();
1946 desc.m_StrideX = pooling2dDesc->strideX();
1947 desc.m_StrideY = pooling2dDesc->strideY();
1948 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1949 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1950
1951 return desc;
1952}
1953
Derek Lamberti8ddae332019-02-21 16:29:43 +00001954void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001955{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001956 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001957
Derek Lamberti8ddae332019-02-21 16:29:43 +00001958 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001959 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001960 CHECK_VALID_SIZE(inputs.size(), 1);
1961
Derek Lamberti8ddae332019-02-21 16:29:43 +00001962 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001963 CHECK_VALID_SIZE(outputs.size(), 1);
1964 auto outputInfo = ToTensorInfo(outputs[0]);
1965
1966 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001967 auto layerName = GetLayerName(graph, layerIndex);
1968 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001969 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1970
Derek Lamberti8ddae332019-02-21 16:29:43 +00001971 RegisterInputSlots(graph, layerIndex, layer);
1972 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001973}
1974
Derek Lamberti87acb272019-03-27 16:51:31 +00001975void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1976{
1977 CHECK_LAYERS(graph, 0, layerIndex);
1978
1979 auto inputs = GetInputs(graph, layerIndex);
1980 CHECK_VALID_SIZE(inputs.size(), 1);
1981
1982 auto outputs = GetOutputs(graph, layerIndex);
1983 CHECK_VALID_SIZE(outputs.size(), 1);
1984 auto outputInfo = ToTensorInfo(outputs[0]);
1985
1986 auto layerName = GetLayerName(graph, layerIndex);
1987 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1988 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1989
1990 RegisterInputSlots(graph, layerIndex, layer);
1991 RegisterOutputSlots(graph, layerIndex, layer);
1992}
1993
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001994armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001995 const std::vector<uint32_t>& targetDimsIn)
1996{
1997 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1998 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1999
2000 if (stretchDim != targetDimsIn.end())
2001 {
2002 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2003 {
2004 throw ParseException(boost::str(
2005 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
2006 }
2007
2008 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002009 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002010 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2011
2012 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2013 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2014 }
2015
2016 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2017
2018 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2019 reshapeInfo.SetShape(outputShape);
2020
2021 return reshapeInfo;
2022}
2023
Finn Williams2605b232020-06-10 15:53:46 +01002024void Deserializer::ParseRank(GraphPtr graph, unsigned int layerIndex)
2025{
2026 CHECK_LAYERS(graph, 0, layerIndex);
2027
2028 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2029 CHECK_VALID_SIZE(inputs.size(), 1);
2030
2031 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2032 CHECK_VALID_SIZE(outputs.size(), 1);
2033
2034 auto layerName = GetLayerName(graph, layerIndex);
2035 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2036
2037 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2038 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2039
2040 RegisterInputSlots(graph, layerIndex, layer);
2041 RegisterOutputSlots(graph, layerIndex, layer);
2042}
2043
Derek Lamberti8ddae332019-02-21 16:29:43 +00002044void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002045{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002046 CHECK_LAYERS(graph, 0, layerIndex);
2047 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002048
Derek Lamberti8ddae332019-02-21 16:29:43 +00002049 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002050 CHECK_VALID_SIZE(outputs.size(), 1);
2051
2052 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2053 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2054
Derek Lamberti8ddae332019-02-21 16:29:43 +00002055 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002056 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2057
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002058 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002059 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2060
2061 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2062 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2063
2064 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2065 {
2066 std::stringstream ss;
2067 ss << "New shape defined in reshape parameters "
2068 << reshapeOutputTensorShape
2069 << " does not equal output shape "
2070 << actualOutputTensorInfo.GetShape()
2071 << ": "
2072 << CHECK_LOCATION().AsString();
2073 throw ParseException(ss.str());
2074 }
2075
2076 armnn::ReshapeDescriptor reshapeDesc;
2077 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2078
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002079 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002080 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2081 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2082
Derek Lamberti8ddae332019-02-21 16:29:43 +00002083 RegisterInputSlots(graph, layerIndex, layer);
2084 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002085}
2086
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002087void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
2088{
2089 CHECK_LAYERS(graph, 0, layerIndex);
2090
2091 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2092 CHECK_VALID_SIZE(inputs.size(), 1);
2093
2094 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2095 CHECK_VALID_SIZE(outputs.size(), 1);
2096
2097 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2098
2099 armnn::ResizeDescriptor descriptor;
2100 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2101 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2102 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2103 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002104 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2105 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002106
2107 auto layerName = GetLayerName(graph, layerIndex);
2108 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2109
2110 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2111 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2112
2113 RegisterInputSlots(graph, layerIndex, layer);
2114 RegisterOutputSlots(graph, layerIndex, layer);
2115}
2116
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002117void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
2118{
2119 CHECK_LAYERS(graph, 0, layerIndex);
2120
2121 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2122 CHECK_VALID_SIZE(inputs.size(), 1);
2123
2124 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2125 CHECK_VALID_SIZE(outputs.size(), 1);
2126
2127 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2128
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002129 armnn::ResizeDescriptor descriptor;
2130 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002131 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002132 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2133 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002134 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2135 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002136
2137 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002138 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002139
2140 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2141 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2142
2143 RegisterInputSlots(graph, layerIndex, layer);
2144 RegisterOutputSlots(graph, layerIndex, layer);
2145}
2146
Derek Lamberti8ddae332019-02-21 16:29:43 +00002147void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002148{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002149 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002150
Derek Lamberti8ddae332019-02-21 16:29:43 +00002151 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002152 CHECK_VALID_SIZE(inputs.size(), 1);
2153
Derek Lamberti8ddae332019-02-21 16:29:43 +00002154 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002155 CHECK_VALID_SIZE(outputs.size(), 1);
2156
2157 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002158 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002159 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002160
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002161 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2162
2163 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2164 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2165
Derek Lamberti8ddae332019-02-21 16:29:43 +00002166 RegisterInputSlots(graph, layerIndex, layer);
2167 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002168}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002169
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002170void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
2171{
2172 CHECK_LAYERS(graph, 0, layerIndex);
2173
2174 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2175 CHECK_VALID_SIZE(inputs.size(), 1);
2176
2177 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2178 CHECK_VALID_SIZE(outputs.size(), 1);
2179
2180 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2181 auto flatBufferPadList = flatBufferDescriptor->padList();
2182 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2183
2184 if (flatBufferPadList->Length() % 2 != 0)
2185 {
2186 throw ParseException(boost::str(
2187 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
2188 }
2189
2190 std::vector<std::pair<unsigned int, unsigned int>> padList;
2191 padList.reserve(flatBufferPadList->Length() / 2);
2192 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2193 {
2194 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2195 }
2196
2197 armnn::SpaceToBatchNdDescriptor descriptor;
2198 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2199 descriptor.m_BlockShape =
2200 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2201 descriptor.m_PadList = padList;
2202
2203 auto layerName = GetLayerName(graph, layerIndex);
2204 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2205
2206 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2207 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2208
2209 RegisterInputSlots(graph, layerIndex, layer);
2210 RegisterOutputSlots(graph, layerIndex, layer);
2211}
2212
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002213void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
2214{
2215 CHECK_LAYERS(graph, 0, layerIndex);
2216
2217 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2218 CHECK_VALID_SIZE(inputs.size(), 1);
2219
2220 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2221 CHECK_VALID_SIZE(outputs.size(), 1);
2222
2223 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2224
2225 armnn::SpaceToDepthDescriptor descriptor;
2226 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2227 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2228
2229 auto layerName = GetLayerName(graph, layerIndex);
2230 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2231
2232 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2233 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2234
2235 RegisterInputSlots(graph, layerIndex, layer);
2236 RegisterOutputSlots(graph, layerIndex, layer);
2237}
2238
Nina Drozd57728782019-02-27 10:53:27 +00002239armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
2240 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
2241 unsigned int layerIndex)
2242{
Jan Eilers8eb25602020-03-09 12:13:48 +00002243 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002244 armnn::NormalizationDescriptor desc;
2245
2246 switch (normalizationDescriptor->normChannelType())
2247 {
2248 case NormalizationAlgorithmChannel_Across:
2249 {
2250 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2251 break;
2252 }
2253 case NormalizationAlgorithmChannel_Within:
2254 {
2255 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2256 break;
2257 }
2258 default:
2259 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002260 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002261 }
2262 }
2263
2264 switch (normalizationDescriptor->normMethodType())
2265 {
2266 case NormalizationAlgorithmMethod_LocalBrightness:
2267 {
2268 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2269 break;
2270 }
2271 case NormalizationAlgorithmMethod_LocalContrast:
2272 {
2273 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2274 break;
2275 }
2276 default:
2277 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002278 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002279 }
2280 }
2281
2282 switch (normalizationDescriptor->dataLayout())
2283 {
2284 case DataLayout_NCHW:
2285 {
2286 desc.m_DataLayout = armnn::DataLayout::NCHW;
2287 break;
2288 }
2289 case DataLayout_NHWC:
2290 {
2291 desc.m_DataLayout = armnn::DataLayout::NHWC;
2292 break;
2293 }
2294 default:
2295 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002296 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002297 }
2298 }
2299
2300 desc.m_Alpha = normalizationDescriptor->alpha();
2301 desc.m_Beta = normalizationDescriptor->beta();
2302 desc.m_K = normalizationDescriptor->k();
2303 desc.m_NormSize = normalizationDescriptor->normSize();
2304
2305 return desc;
2306}
2307
2308void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2309{
2310 CHECK_LAYERS(graph, 0, layerIndex);
2311
2312 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2313
2314 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2315 CHECK_VALID_SIZE(inputs.size(), 1);
2316
2317 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2318 CHECK_VALID_SIZE(outputs.size(), 1);
2319
2320 auto outputInfo = ToTensorInfo(outputs[0]);
2321
2322 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2323 auto layerName = GetLayerName(graph, layerIndex);
2324
2325 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2326 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2327
2328 RegisterInputSlots(graph, layerIndex, layer);
2329 RegisterOutputSlots(graph, layerIndex, layer);
2330}
2331
Sadik Armagan8b42a382019-03-01 14:24:49 +00002332void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2333{
2334 CHECK_LAYERS(graph, 0, layerIndex);
2335 auto inputs = GetInputs(graph, layerIndex);
2336 CHECK_LOCATION();
2337 CHECK_VALID_SIZE(inputs.size(), 1);
2338
2339 auto outputs = GetOutputs(graph, layerIndex);
2340 CHECK_VALID_SIZE(outputs.size(), 1);
2341
2342 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002343
josh minor4a3c6102020-01-06 16:40:46 -06002344 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2345 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002346 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2347 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2348
2349 RegisterInputSlots(graph, layerIndex, layer);
2350 RegisterOutputSlots(graph, layerIndex, layer);
2351}
2352
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002353void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2354{
2355 CHECK_LAYERS(graph, 0, layerIndex);
2356
2357 auto inputs = GetInputs(graph, layerIndex);
2358 CHECK_VALID_SIZE(inputs.size(), 1);
2359
2360 auto outputs = GetOutputs(graph, layerIndex);
2361 CHECK_VALID_SIZE(outputs.size(), 1);
2362
2363 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2364
2365 auto fbBegin = fbDescriptor->begin();
2366 auto fbSize = fbDescriptor->size();
2367
2368 if (fbBegin->Length() != fbSize->Length())
2369 {
2370 throw ParseException(boost::str(
2371 boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString()));
2372 }
2373
2374 armnn::SliceDescriptor descriptor;
2375 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2376 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2377
2378 auto layerName = GetLayerName(graph, layerIndex);
2379 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2380
2381 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2382 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2383
2384 RegisterInputSlots(graph, layerIndex, layer);
2385 RegisterOutputSlots(graph, layerIndex, layer);
2386}
2387
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002388void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2389{
2390 CHECK_LAYERS(graph, 0, layerIndex);
2391
2392 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2393 CHECK_VALID_SIZE(inputs.size(), 1);
2394
2395 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2396 CHECK_VALID_SIZE(outputs.size(), 1);
2397
2398 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2399
2400 auto flatBufferBegin = flatBufferDescriptor->begin();
2401 auto flatBufferEnd = flatBufferDescriptor->end();
2402 auto flatBufferStride = flatBufferDescriptor->stride();
2403
2404 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2405 flatBufferBegin->Length() == flatBufferStride->Length()))
2406 {
2407 throw ParseException(boost::str(
2408 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2409 }
2410
2411 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2412 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2413 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2414
2415 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2416 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2417 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2418 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2419 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2420 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2421 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2422
2423 auto layerName = GetLayerName(graph, layerIndex);
2424 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2425
2426 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2427 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2428
2429 RegisterInputSlots(graph, layerIndex, layer);
2430 RegisterOutputSlots(graph, layerIndex, layer);
2431}
2432
Conor Kennedyda1f9752019-03-01 14:37:12 +00002433void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2434{
2435 CHECK_LAYERS(graph, 0, layerIndex);
2436 auto inputs = GetInputs(graph, layerIndex);
2437 CHECK_LOCATION();
2438 CHECK_VALID_SIZE(inputs.size(), 2);
2439
2440 auto outputs = GetOutputs(graph, layerIndex);
2441 CHECK_VALID_SIZE(outputs.size(), 1);
2442
2443 auto layerName = GetLayerName(graph, layerIndex);
2444 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2445
2446 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2447 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2448
2449 RegisterInputSlots(graph, layerIndex, layer);
2450 RegisterOutputSlots(graph, layerIndex, layer);
2451}
2452
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002453void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2454{
2455 CHECK_LAYERS(graph, 0, layerIndex);
2456
2457 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2458 CHECK_VALID_SIZE(inputs.size(), 2);
2459
2460 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2461 CHECK_VALID_SIZE(outputs.size(), 1);
2462
Teresa Charlin52664732020-06-29 16:27:03 +01002463 armnn::GatherDescriptor descriptor;
2464 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2465
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002466 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002467 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002468
2469 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002470 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2471
2472 RegisterInputSlots(graph, layerIndex, layer);
2473 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002474}
2475
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002476void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2477{
2478 CHECK_LAYERS(graph, 0, layerIndex);
2479
2480 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2481 CHECK_VALID_SIZE(inputs.size(), 1);
2482
2483 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2484 CHECK_VALID_SIZE(outputs.size(), 1);
2485
2486 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2487 auto flatBufferAxis = flatBufferDescriptor->axis();
2488 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2489
2490 armnn::MeanDescriptor descriptor;
2491 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2492 descriptor.m_KeepDims = flatBufferKeepDims;
2493
2494 auto layerName = GetLayerName(graph, layerIndex);
2495 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2496
2497 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2498 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2499
2500 RegisterInputSlots(graph, layerIndex, layer);
2501 RegisterOutputSlots(graph, layerIndex, layer);
2502}
2503
Jim Flynn18ce3382019-03-08 11:08:30 +00002504void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2505{
2506 CHECK_LAYERS(graph, 0, layerIndex);
2507
2508 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2509 CHECK_VALID_SIZE(inputs.size(), 1);
2510
2511 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2512
2513 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2514 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2515 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2516 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2517 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2518 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2519
2520 // Check numViews and numDimensions corresponds to the ones already serialized ...
2521 // numViews == flatBufferViewSizes.size();
2522 // foreach: numDimensions == flatBufferViewSizes[x].size();
2523
2524 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2525 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2526 {
2527 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2528 {
2529 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2530 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2531 }
2532 }
2533
2534 auto layerName = GetLayerName(graph, layerIndex);
2535 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2536
2537 // I could have as many outputs as views ...
2538 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2539 {
2540 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2541 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2542 }
2543
2544 RegisterInputSlots(graph, layerIndex, layer);
2545 RegisterOutputSlots(graph, layerIndex, layer);
2546}
2547
Jim Flynn11af3752019-03-19 17:22:29 +00002548armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2549{
2550 armnn::LstmDescriptor desc;
2551
2552 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2553 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2554 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2555 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2556 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2557 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002558 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002559
2560 return desc;
2561}
2562
2563void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2564{
2565 CHECK_LAYERS(graph, 0, layerIndex);
2566
2567 auto inputs = GetInputs(graph, layerIndex);
2568 CHECK_VALID_SIZE(inputs.size(), 3);
2569
2570 auto outputs = GetOutputs(graph, layerIndex);
2571 CHECK_VALID_SIZE(outputs.size(), 4);
2572
2573 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2574 auto layerName = GetLayerName(graph, layerIndex);
2575 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2576 auto flatBufferInputParams = flatBufferLayer->inputParams();
2577
2578 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2579
2580 armnn::LstmInputParams lstmInputParams;
2581
2582 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2583 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2584 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2585 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2586 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2587 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2588 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2589 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2590 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2591
2592 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2593 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2594 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2595 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2596 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2597 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2598 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2599 lstmInputParams.m_CellBias = &cellBias;
2600 lstmInputParams.m_OutputGateBias = &outputGateBias;
2601
2602 armnn::ConstTensor inputToInputWeights;
2603 armnn::ConstTensor recurrentToInputWeights;
2604 armnn::ConstTensor cellToInputWeights;
2605 armnn::ConstTensor inputGateBias;
2606 if (!lstmDescriptor.m_CifgEnabled)
2607 {
2608 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2609 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2610 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2611 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2612
2613 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2614 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2615 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2616 lstmInputParams.m_InputGateBias = &inputGateBias;
2617 }
2618
2619 armnn::ConstTensor projectionWeights;
2620 armnn::ConstTensor projectionBias;
2621 if (lstmDescriptor.m_ProjectionEnabled)
2622 {
2623 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2624 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2625
2626 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2627 lstmInputParams.m_ProjectionBias = &projectionBias;
2628 }
2629
2630 armnn::ConstTensor cellToForgetWeights;
2631 armnn::ConstTensor cellToOutputWeights;
2632 if (lstmDescriptor.m_PeepholeEnabled)
2633 {
2634 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2635 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2636
2637 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2638 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2639 }
2640
Jan Eilersf8c62972019-07-17 11:07:49 +01002641 armnn::ConstTensor inputLayerNormWeights;
2642 armnn::ConstTensor forgetLayerNormWeights;
2643 armnn::ConstTensor cellLayerNormWeights;
2644 armnn::ConstTensor outputLayerNormWeights;
2645 if (lstmDescriptor.m_LayerNormEnabled)
2646 {
2647 if (!lstmDescriptor.m_CifgEnabled)
2648 {
2649 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2650 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2651 }
2652 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2653 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2654 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2655
2656 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2657 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2658 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2659 }
2660
Jim Flynn11af3752019-03-19 17:22:29 +00002661 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2662
2663 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2664 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2665
2666 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2667 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2668
2669 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2670 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2671
2672 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2673 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2674
2675 RegisterInputSlots(graph, layerIndex, layer);
2676 RegisterOutputSlots(graph, layerIndex, layer);
2677}
2678
James Conroy8d333182020-05-13 10:27:58 +01002679armnn::QLstmDescriptor Deserializer::GetQLstmDescriptor(Deserializer::QLstmDescriptorPtr qLstmDescriptor)
2680{
2681 armnn::QLstmDescriptor desc;
2682
2683 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2684 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2685 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2686 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2687
2688 desc.m_CellClip = qLstmDescriptor->cellClip();
2689 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2690
2691 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2692 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2693 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2694 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2695
2696 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2697 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2698
2699 return desc;
2700}
2701
2702void Deserializer::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
2703{
2704 CHECK_LAYERS(graph, 0, layerIndex);
2705
2706 auto inputs = GetInputs(graph, layerIndex);
2707 CHECK_VALID_SIZE(inputs.size(), 3);
2708
2709 auto outputs = GetOutputs(graph, layerIndex);
2710 CHECK_VALID_SIZE(outputs.size(), 3);
2711
2712 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2713 auto layerName = GetLayerName(graph, layerIndex);
2714 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2715 auto flatBufferInputParams = flatBufferLayer->inputParams();
2716
2717 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2718 armnn::LstmInputParams qLstmInputParams;
2719
2720 // Mandatory params
2721 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2722 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2723 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2724 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2725 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2726 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2727 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2728 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2729 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2730
2731 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2732 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2733 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2734 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2735 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2736 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2737 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2738 qLstmInputParams.m_CellBias = &cellBias;
2739 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2740
2741 // Optional CIFG params
2742 armnn::ConstTensor inputToInputWeights;
2743 armnn::ConstTensor recurrentToInputWeights;
2744 armnn::ConstTensor inputGateBias;
2745
2746 if (!qLstmDescriptor.m_CifgEnabled)
2747 {
2748 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2749 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2750 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2751
2752 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2753 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2754 qLstmInputParams.m_InputGateBias = &inputGateBias;
2755 }
2756
2757 // Optional projection params
2758 armnn::ConstTensor projectionWeights;
2759 armnn::ConstTensor projectionBias;
2760
2761 if (qLstmDescriptor.m_ProjectionEnabled)
2762 {
2763 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2764 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2765
2766 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2767 qLstmInputParams.m_ProjectionBias = &projectionBias;
2768 }
2769
2770 // Optional peephole params
2771 armnn::ConstTensor cellToInputWeights;
2772 armnn::ConstTensor cellToForgetWeights;
2773 armnn::ConstTensor cellToOutputWeights;
2774
2775 if (qLstmDescriptor.m_PeepholeEnabled)
2776 {
2777 if (!qLstmDescriptor.m_CifgEnabled)
2778 {
2779 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2780 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2781 }
2782
2783 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2784 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2785
2786 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2787 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2788 }
2789
2790 // Optional layer norm params
2791 armnn::ConstTensor inputLayerNormWeights;
2792 armnn::ConstTensor forgetLayerNormWeights;
2793 armnn::ConstTensor cellLayerNormWeights;
2794 armnn::ConstTensor outputLayerNormWeights;
2795
2796 if (qLstmDescriptor.m_LayerNormEnabled)
2797 {
2798 if (!qLstmDescriptor.m_CifgEnabled)
2799 {
2800 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2801 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2802 }
2803
2804 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2805 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2806 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2807
2808 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2809 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2810 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2811 }
2812
2813 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
2814
2815 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
2816 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
2817
2818 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
2819 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
2820
2821 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
2822 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2823
2824 RegisterInputSlots(graph, layerIndex, layer);
2825 RegisterOutputSlots(graph, layerIndex, layer);
2826}
2827
Jan Eilers5b01a892019-07-23 09:47:43 +01002828void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2829{
2830 CHECK_LAYERS(graph, 0, layerIndex);
2831
2832 auto inputs = GetInputs(graph, layerIndex);
2833 CHECK_VALID_SIZE(inputs.size(), 3);
2834
2835 auto outputs = GetOutputs(graph, layerIndex);
2836 CHECK_VALID_SIZE(outputs.size(), 2);
2837
2838 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2839 auto layerName = GetLayerName(graph, layerIndex);
2840 auto flatBufferInputParams = flatBufferLayer->inputParams();
2841
2842 armnn::QuantizedLstmInputParams lstmInputParams;
2843
2844 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2845 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2846 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2847 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2848 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2849 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2850 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2851 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2852 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2853 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2854 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2855 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2856
2857 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2858 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2859 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2860 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2861 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2862 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2863 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2864 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2865 lstmInputParams.m_InputGateBias = &inputGateBias;
2866 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2867 lstmInputParams.m_CellBias = &cellBias;
2868 lstmInputParams.m_OutputGateBias = &outputGateBias;
2869
2870 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2871
2872 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2873 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2874
2875 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2876 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2877
2878 RegisterInputSlots(graph, layerIndex, layer);
2879 RegisterOutputSlots(graph, layerIndex, layer);
2880}
2881
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002882void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2883{
2884 CHECK_LAYERS(graph, 0, layerIndex);
2885
2886 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2887 CHECK_VALID_SIZE(inputs.size(), 1);
2888
2889 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2890 CHECK_VALID_SIZE(outputs.size(), 1);
2891
2892 const std::string layerName = GetLayerName(graph, layerIndex);
2893 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2894
2895 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2896 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2897
2898 RegisterInputSlots(graph, layerIndex, layer);
2899 RegisterOutputSlots(graph, layerIndex, layer);
2900}
2901
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002902void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2903{
2904 CHECK_LAYERS(graph, 0, layerIndex);
2905
2906 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2907 CHECK_VALID_SIZE(inputs.size(), 2);
2908
2909 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2910 CHECK_VALID_SIZE(outputs.size(), 1);
2911
2912 const std::string layerName = GetLayerName(graph, layerIndex);
2913 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2914
2915 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2916 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2917
2918 RegisterInputSlots(graph, layerIndex, layer);
2919 RegisterOutputSlots(graph, layerIndex, layer);
2920}
2921
Sadik Armaganeff363d2019-04-05 15:25:46 +01002922void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2923{
2924 CHECK_LAYERS(graph, 0, layerIndex);
2925 auto inputs = GetInputs(graph, layerIndex);
2926 CHECK_LOCATION();
2927 CHECK_VALID_SIZE(inputs.size(), 2);
2928
2929 auto outputs = GetOutputs(graph, layerIndex);
2930 CHECK_VALID_SIZE(outputs.size(), 2);
2931
2932 auto layerName = GetLayerName(graph, layerIndex);
2933 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2934
2935 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2936 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2937
2938 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2939 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2940
2941 RegisterInputSlots(graph, layerIndex, layer);
2942 RegisterOutputSlots(graph, layerIndex, layer);
2943}
2944
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002945void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2946{
2947 CHECK_LAYERS(graph, 0, layerIndex);
2948 auto inputs = GetInputs(graph, layerIndex);
2949 CHECK_LOCATION();
2950 CHECK_VALID_SIZE(inputs.size(), 2);
2951
2952 auto outputs = GetOutputs(graph, layerIndex);
2953 CHECK_VALID_SIZE(outputs.size(), 1);
2954
2955 auto layerName = GetLayerName(graph, layerIndex);
2956 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2957
2958 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2959 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2960
2961 RegisterInputSlots(graph, layerIndex, layer);
2962 RegisterOutputSlots(graph, layerIndex, layer);
2963}
2964
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002965void Deserializer::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
2966{
2967 CHECK_LAYERS(graph, 0, layerIndex);
2968
2969 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
2970
2971 auto inputs = GetInputs(graph, layerIndex);
2972 CHECK_VALID_SIZE(inputs.size(), 1);
2973
2974 auto outputs = GetOutputs(graph, layerIndex);
2975 CHECK_VALID_SIZE(outputs.size(), 1);
2976 auto outputInfo = ToTensorInfo(outputs[0]);
2977
2978 auto layerName = GetLayerName(graph, layerIndex);
2979 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2980
2981 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
2982 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2983
2984 RegisterInputSlots(graph, layerIndex, layer);
2985 RegisterOutputSlots(graph, layerIndex, layer);
2986}
2987
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002988void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2989{
2990 CHECK_LAYERS(graph, 0, layerIndex);
2991
2992 auto inputs = GetInputs(graph, layerIndex);
2993 CHECK_VALID_SIZE(inputs.size(), 1);
2994
2995 auto outputs = GetOutputs(graph, layerIndex);
2996 CHECK_VALID_SIZE(outputs.size(), 1);
2997
2998 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2999 auto layerName = GetLayerName(graph, layerIndex);
3000 auto serializerDescriptor = serializerLayer->descriptor();
3001
3002 armnn::TransposeConvolution2dDescriptor descriptor;
3003 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3004 descriptor.m_PadRight = serializerDescriptor->padRight();
3005 descriptor.m_PadTop = serializerDescriptor->padTop();
3006 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3007 descriptor.m_StrideX = serializerDescriptor->strideX();
3008 descriptor.m_StrideY = serializerDescriptor->strideY();;
3009 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3010 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3011
3012 // weights & biases
3013 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3014 armnn::Optional<armnn::ConstTensor> optionalBiases;
3015 if (descriptor.m_BiasEnabled)
3016 {
3017 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3018 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3019 }
3020
3021 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3022 weights,
3023 optionalBiases,
3024 layerName.c_str());
3025
3026 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3027 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3028
3029 RegisterInputSlots(graph, layerIndex, layer);
3030 RegisterOutputSlots(graph, layerIndex, layer);
3031}
3032
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003033void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
3034{
3035 CHECK_LAYERS(graph, 0, layerIndex);
3036 auto inputs = GetInputs(graph, layerIndex);
3037
3038 auto outputs = GetOutputs(graph, layerIndex);
3039 CHECK_VALID_SIZE(outputs.size(), 1);
3040
3041 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3042 unsigned int axis = flatBufferDescriptor->axis();
3043 unsigned int numInputs = flatBufferDescriptor->numInputs();
3044 CHECK_VALID_SIZE(inputs.size(), numInputs);
3045
3046 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3047 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3048 flatBufferInputShape->begin() + flatBufferInputShape->size());
3049
3050 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3051 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3052
3053 for (unsigned int i=0; i<inputs.size(); ++i)
3054 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003055 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003056 if (descriptor.m_InputShape != inputShape)
3057 {
3058 std::stringstream ss;
3059 ss << "Shape of input "
3060 << i
3061 << " "
3062 << inputShape
3063 << " does not equal defined input shape "
3064 << descriptor.m_InputShape
3065 << ": "
3066 << CHECK_LOCATION().AsString();
3067 throw ParseException(ss.str());
3068 }
3069 }
3070
3071 auto layerName = GetLayerName(graph, layerIndex);
3072 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3073
3074 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3075 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3076
3077 RegisterInputSlots(graph, layerIndex, layer);
3078 RegisterOutputSlots(graph, layerIndex, layer);
3079}
3080
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003081void Deserializer::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
3082{
3083 CHECK_LAYERS(graph, 0, layerIndex);
3084
3085 auto inputs = GetInputs(graph, layerIndex);
3086 auto outputs = GetOutputs(graph, layerIndex);
3087
3088 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3089 auto fbDescriptor = fbLayer->descriptor();
3090
3091 armnn::StandInDescriptor descriptor;
3092 descriptor.m_NumInputs = fbDescriptor->numInputs();
3093 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3094
3095 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3096 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3097
3098 const std::string layerName = GetLayerName(graph, layerIndex);
3099 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3100
3101 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3102 {
3103 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3104 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3105 }
3106
3107 RegisterInputSlots(graph, layerIndex, layer);
3108 RegisterOutputSlots(graph, layerIndex, layer);
3109}
3110
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003111} // namespace armnnDeserializer