blob: 42b0052b0332b2f13287bd8ab6bb6dfc6b345ef8 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000018
Kevin May43a799c2019-02-08 16:31:42 +000019#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000020#include <VerificationHelpers.hpp>
21
22#include <boost/filesystem.hpp>
23#include <boost/format.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010024#include <boost/numeric/conversion/cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000038namespace
39{
Kevin May43a799c2019-02-08 16:31:42 +000040
41const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
42
Derek Lamberti0028d1b2019-02-20 13:57:42 +000043 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000044 unsigned int layersIndex,
45 const CheckLocation& location)
46{
47 if (graph->layers() == nullptr)
48 {
49 throw ParseException(
50 boost::str(
51 boost::format("%1% was called with invalid (null) graph. "
52 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
53 "layers:%2% at %3%") %
54 location.m_Function %
55 layersIndex %
56 location.FileLine()));
57 }
58 else if (layersIndex >= graph->layers()->size())
59 {
60 throw ParseException(
61 boost::str(
62 boost::format("%1% was called with an invalid layers index. "
63 "layers:%2% at %3%") %
64 location.m_Function %
65 layersIndex %
66 location.FileLine()));
67 }
68}
69
Derek Lamberti0028d1b2019-02-20 13:57:42 +000070void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000071 unsigned int layersIndex,
72 unsigned int layerIndex,
73 const CheckLocation& location)
74{
75 if (graph->layers() == nullptr)
76 {
77 throw ParseException(
78 boost::str(
79 boost::format("%1% was called with invalid (null) graph. "
80 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000081 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000082 location.m_Function %
83 layersIndex %
84 location.FileLine()));
85 }
86 else if (layersIndex >= graph->layers()->size())
87 {
88 throw ParseException(
89 boost::str(
90 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000091 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000092 location.m_Function %
93 layersIndex %
94 location.FileLine()));
95 }
96 else if (layerIndex >= graph->layers()[layersIndex].size()
97 && layerIndex != VIRTUAL_LAYER_ID)
98 {
99 throw ParseException(
100 boost::str(
101 boost::format("%1% was called with an invalid layer index. "
102 "layers:%2% layer:%3% at %4%") %
103 location.m_Function %
104 layersIndex %
105 layerIndex %
106 location.FileLine()));
107 }
108}
109
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000110void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000111 const CheckLocation& location)
112{
113 if (rawPtr == nullptr)
114 {
115 throw ParseException(
116 boost::str(
117 boost::format("%1% was called with a null tensor pointer. "
118 "at %2%") %
119 location.m_Function %
120 location.FileLine()));
121
122 }
123}
124
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000125void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000126 const CheckLocation& location)
127{
128 if (rawPtr == nullptr)
129 {
130 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
131 location.m_Function %
132 location.FileLine()));
133 }
134}
135
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000136void CheckConstTensorSize(const unsigned int constTensorSize,
137 const unsigned int tensorSize,
138 const CheckLocation& location)
139{
140 if (constTensorSize != tensorSize)
141 {
142 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
143 location.m_Function %
144 location.FileLine()));
145 }
146}
147
Kevin May43a799c2019-02-08 16:31:42 +0000148#define CHECK_TENSOR_PTR(TENSOR_PTR) \
149 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
150
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000151#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
152 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
153
Mike Kellya0766c32019-02-19 17:22:07 +0000154#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
155 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
156
Kevin May43a799c2019-02-08 16:31:42 +0000157#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
158 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
159
160#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
161 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
162}
163
Saoirse Stewart263829c2019-02-19 15:54:14 +0000164bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
165{
166 const unsigned int actualSize = actual.GetNumDimensions();
167 if (actualSize != expected.size())
168 {
169 return false;
170 }
171
172 for (unsigned int i = 0u; i < actualSize; i++)
173 {
174 if (actual[i] != static_cast<unsigned int>(expected[i]))
175 {
176 return false;
177 }
178 }
179
180 return true;
181}
182
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000183Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000184: m_Network(nullptr, nullptr),
185//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000186m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000187{
188 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100189 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000190 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000191 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100192 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000193 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000194 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100195 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
Jim Flynne242f2d2019-05-22 14:24:13 +0100196 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000197 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000198 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100199 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000200 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000201 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000202 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000203 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
josh minor4a3c6102020-01-06 16:40:46 -0600204 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &Deserializer::ParseElementwiseUnary;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000205 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000206 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000207 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000208 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000209 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100210 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000211 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Sadik Armagan26257852019-10-14 13:00:47 +0100212 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000213 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000214 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000215 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
216 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100217 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100218 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000219 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000220 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000221 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000222 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000223 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100224 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000225 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100226 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000227 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000228 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100229 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000230 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100231 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000232 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000233 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100234 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000235 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100236 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100237 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000238 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000239 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100240 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100241 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000242 m_ParserFunctions[Layer_TransposeLayer] = &Deserializer::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000243}
244
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000245Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000246{
247 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
248
249 switch(layerType)
250 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100251 case Layer::Layer_AbsLayer:
252 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000253 case Layer::Layer_ActivationLayer:
254 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000255 case Layer::Layer_AdditionLayer:
256 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100257 case Layer::Layer_ArgMinMaxLayer:
258 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000259 case Layer::Layer_BatchToSpaceNdLayer:
260 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000261 case Layer::Layer_BatchNormalizationLayer:
262 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100263 case Layer::Layer_ComparisonLayer:
264 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100265 case Layer::Layer_ConcatLayer:
266 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000267 case Layer::Layer_ConstantLayer:
268 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000269 case Layer::Layer_Convolution2dLayer:
270 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100271 case Layer::Layer_DepthToSpaceLayer:
272 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000273 case Layer::Layer_DepthwiseConvolution2dLayer:
274 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000275 case Layer::Layer_DequantizeLayer:
276 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000277 case Layer::Layer_DetectionPostProcessLayer:
278 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000279 case Layer::Layer_DivisionLayer:
280 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000281 case Layer::Layer_EqualLayer:
282 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000283 case Layer::Layer_FullyConnectedLayer:
284 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000285 case Layer::Layer_FloorLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000287 case Layer::Layer_GatherLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000289 case Layer::Layer_GreaterLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000291 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000292 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100293 case Layer::Layer_InstanceNormalizationLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000295 case Layer::Layer_L2NormalizationLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100297 case Layer::Layer_LogSoftmaxLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000299 case Layer::Layer_LstmLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000301 case Layer::Layer_MeanLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000303 case Layer::Layer_MinimumLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000305 case Layer::Layer_MaximumLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100307 case Layer::Layer_MergeLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000309 case Layer::Layer_MergerLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000311 case Layer::Layer_MultiplicationLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000313 case Layer::Layer_NormalizationLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000315 case Layer::Layer_OutputLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000317 case Layer::Layer_PadLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000319 case Layer::Layer_PermuteLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000321 case Layer::Layer_Pooling2dLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100323 case Layer::Layer_PreluLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000325 case Layer::Layer_QuantizeLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100327 case Layer::Layer_QuantizedLstmLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000329 case Layer::Layer_ReshapeLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000331 case Layer::Layer_ResizeBilinearLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100333 case Layer::Layer_ResizeLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000335 case Layer::Layer_RsqrtLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100337 case Layer::Layer_SliceLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000339 case Layer::Layer_SoftmaxLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000341 case Layer::Layer_SpaceToBatchNdLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100343 case Layer::Layer_SpaceToDepthLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000345 case Layer::Layer_SplitterLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100347 case Layer::Layer_StackLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100349 case Layer::Layer_StandInLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000351 case Layer::Layer_StridedSliceLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000353 case Layer::Layer_SubtractionLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100355 case Layer::Layer_SwitchLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100357 case Layer::Layer_TransposeConvolution2dLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000359 case Layer::Layer_TransposeLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000361 case Layer::Layer_NONE:
362 default:
363 throw ParseException(boost::str(
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100364 boost::format("Layer type %1% not recognized") %
365 layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000366 }
367}
368
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000369std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
370{
371 auto layer = GetBaseLayer(graph, index);
372 assert(layer);
373 return layer->layerName()->str();
374}
375
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000376int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000377{
378 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
379
380 if (layerType == Layer::Layer_InputLayer)
381 {
382 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
383 }
384 else if ( layerType == Layer::Layer_OutputLayer )
385 {
386 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
387 }
388 return 0;
389}
390
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000391armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000392{
393 switch (dataLayout)
394 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000395 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000396 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000397 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000398 default:
399 return armnn::DataLayout::NCHW;
400 }
401}
402
Mike Kellyaf484012019-02-20 16:53:11 +0000403armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
404{
405 switch (function)
406 {
407 case armnnSerializer::ActivationFunction_Sigmoid:
408 return armnn::ActivationFunction::Sigmoid;
409 case armnnSerializer::ActivationFunction_TanH:
410 return armnn::ActivationFunction::TanH;
411 case armnnSerializer::ActivationFunction_Linear:
412 return armnn::ActivationFunction::Linear;
413 case armnnSerializer::ActivationFunction_ReLu:
414 return armnn::ActivationFunction::ReLu;
415 case armnnSerializer::ActivationFunction_BoundedReLu:
416 return armnn::ActivationFunction::BoundedReLu;
417 case armnnSerializer::ActivationFunction_LeakyReLu:
418 return armnn::ActivationFunction::LeakyReLu;
419 case armnnSerializer::ActivationFunction_Abs:
420 return armnn::ActivationFunction::Abs;
421 case armnnSerializer::ActivationFunction_Sqrt:
422 return armnn::ActivationFunction::Sqrt;
423 case armnnSerializer::ActivationFunction_Square:
424 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000425 case armnnSerializer::ActivationFunction_Elu:
426 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000427 case armnnSerializer::ActivationFunction_HardSwish:
428 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000429 default:
430 return armnn::ActivationFunction::Sigmoid;
431 }
432}
433
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100434armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
435{
436 switch (function)
437 {
438 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
439 return armnn::ArgMinMaxFunction::Max;
440 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
441 default:
442 return armnn::ArgMinMaxFunction::Min;
443 }
444}
445
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100446armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
447{
448 switch (operation)
449 {
450 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
451 return armnn::ComparisonOperation::Equal;
452 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
453 return armnn::ComparisonOperation::Greater;
454 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
455 return armnn::ComparisonOperation::GreaterOrEqual;
456 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
457 return armnn::ComparisonOperation::Less;
458 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
459 return armnn::ComparisonOperation::LessOrEqual;
460 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
461 default:
462 return armnn::ComparisonOperation::NotEqual;
463 }
464}
465
josh minor4a3c6102020-01-06 16:40:46 -0600466armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
467{
468 switch (operation)
469 {
470 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
471 return armnn::UnaryOperation::Abs;
472 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
473 return armnn::UnaryOperation::Rsqrt;
474 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
475 return armnn::UnaryOperation::Sqrt;
476 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
477 return armnn::UnaryOperation::Exp;
478 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
479 return armnn::UnaryOperation::Neg;
480 default:
481 throw armnn::InvalidArgumentException("Unary operation unknown");
482 }
483}
484
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100485armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
486{
487 switch (method)
488 {
489 case armnnSerializer::ResizeMethod_NearestNeighbor:
490 return armnn::ResizeMethod::NearestNeighbor;
491 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000492 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100493 default:
494 return armnn::ResizeMethod::NearestNeighbor;
495 }
496}
497
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000498armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000499{
500 armnn::DataType type;
501 CHECK_TENSOR_PTR(tensorPtr);
502
503 switch (tensorPtr->dataType())
504 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000505 case DataType_QAsymmS8:
506 type = armnn::DataType::QAsymmS8;
507 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000508 case DataType_QSymmS8:
509 type = armnn::DataType::QSymmS8;
510 break;
Kevin May43a799c2019-02-08 16:31:42 +0000511 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000512 case DataType_QAsymmU8:
513 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000514 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000515 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000516 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000517 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000518 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000519 case DataType_Signed32:
520 type = armnn::DataType::Signed32;
521 break;
Kevin May43a799c2019-02-08 16:31:42 +0000522 case DataType_Float32:
523 type = armnn::DataType::Float32;
524 break;
525 case DataType_Float16:
526 type = armnn::DataType::Float16;
527 break;
528 case DataType_Boolean:
529 type = armnn::DataType::Boolean;
530 break;
531 default:
532 {
533 CheckLocation location = CHECK_LOCATION();
534 throw ParseException(
535 boost::str(
536 boost::format("Unsupported data type %1% = %2%. %3%") %
537 tensorPtr->dataType() %
538 EnumNameDataType(tensorPtr->dataType()) %
539 location.AsString()));
540 }
541 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000542
Kevin May43a799c2019-02-08 16:31:42 +0000543
544 auto dimensions = tensorPtr->dimensions();
545 unsigned int size = dimensions->size();
546 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
547
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000548 auto quantizationScales = tensorPtr->quantizationScales();
549
550 if (quantizationScales)
551 {
552 unsigned int quantizationScalesSize = quantizationScales->size();
553 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
554 unsigned int quantizationDim = tensorPtr->quantizationDim();
555 armnn::TensorInfo result(size,
556 outputDims.data(),
557 type,
558 scales,
559 quantizationDim);
560 return result;
561 }
562
563 float quantizationScale = tensorPtr->quantizationScale();
564 int32_t quantizationOffset = tensorPtr->quantizationOffset();
565
Kevin May43a799c2019-02-08 16:31:42 +0000566 // two statements (on purpose) for easier debugging:
567 armnn::TensorInfo result(size,
568 outputDims.data(),
569 type,
570 quantizationScale,
571 quantizationOffset);
572 return result;
573}
574
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000575armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000576{
577 CHECK_CONST_TENSOR_PTR(constTensorPtr);
578 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
579
580 switch (constTensorPtr->data_type())
581 {
582 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000583 {
584 auto byteData = constTensorPtr->data_as_ByteData()->data();
585 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
586 return armnn::ConstTensor(tensorInfo, byteData->data());
587 }
Mike Kellya0766c32019-02-19 17:22:07 +0000588 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000589 {
590 auto shortData = constTensorPtr->data_as_ShortData()->data();
591 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
592 return armnn::ConstTensor(tensorInfo, shortData->data());
593 }
Mike Kellya0766c32019-02-19 17:22:07 +0000594 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000595 {
596 auto intData = constTensorPtr->data_as_IntData()->data();
597 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
598 return armnn::ConstTensor(tensorInfo, intData->data());
599 }
Mike Kellya0766c32019-02-19 17:22:07 +0000600 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000601 {
602 auto longData = constTensorPtr->data_as_LongData()->data();
603 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
604 return armnn::ConstTensor(tensorInfo, longData->data());
605 }
Mike Kellya0766c32019-02-19 17:22:07 +0000606 default:
607 {
608 CheckLocation location = CHECK_LOCATION();
609 throw ParseException(
610 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
611 constTensorPtr->data_type() %
612 EnumNameConstTensorData(constTensorPtr->data_type()) %
613 location.AsString()));
614 }
615 }
616}
617
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000618Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000619 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000620{
621 CHECK_LAYERS(graphPtr, 0, layerIndex);
622 auto layer = GetBaseLayer(graphPtr, layerIndex);
623 const auto& numInputs = layer->inputSlots()->size();
624
625 TensorRawPtrVector result(numInputs);
626
627 for (unsigned int i=0; i<numInputs; ++i)
628 {
629 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
630 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
631 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
632 }
633 return result;
634}
635
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000636Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000637 unsigned int layerIndex)
638{
639 CHECK_LAYERS(graphPtr, 0, layerIndex);
640 auto layer = GetBaseLayer(graphPtr, layerIndex);
641 const auto& numOutputs = layer->outputSlots()->size();
642
643 TensorRawPtrVector result(numOutputs);
644
645 for (unsigned int i=0; i<numOutputs; ++i)
646 {
647 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
648 }
649 return result;
650}
651
Derek Lamberti8ddae332019-02-21 16:29:43 +0000652void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000653{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000654 CHECK_LAYERS(graph, 0, layerIndex);
655 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000656 throw ParseException(
657 boost::str(
658 boost::format("Layer not supported. "
659 "layerIndex: %1% "
660 "layerName: %2% / %3%") %
661 layerIndex %
662 layerName %
663 CHECK_LOCATION().AsString()));
664}
665
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000666void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000667{
668 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000669 m_InputBindings.clear();
670 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000671}
672
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000673IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000674{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000675 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000676}
677
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000678IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000679{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000680 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000681}
682
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000683void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000684{
685 delete parser;
686}
687
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000688INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000689{
690 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000691 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
692 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000693}
694
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000695armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000696{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000697 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000698 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
699 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
700 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000701}
702
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000703Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000704{
705 if (binaryContent == nullptr)
706 {
707 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
708 CHECK_LOCATION().AsString()));
709 }
710 flatbuffers::Verifier verifier(binaryContent, len);
711 if (verifier.VerifyBuffer<SerializedGraph>() == false)
712 {
713 throw ParseException(
714 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
715 "flatbuffers format. size:%1% %2%") %
716 len %
717 CHECK_LOCATION().AsString()));
718 }
719 return GetSerializedGraph(binaryContent);
720}
721
Derek Lamberti8ddae332019-02-21 16:29:43 +0000722INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000723{
724 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100725 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000726 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000727 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000728 {
729 if (layer->layer_type() != Layer_InputLayer &&
730 layer->layer_type() != Layer_OutputLayer)
731 {
732 // lookup and call the parser function
733 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000734 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000735 }
736 ++layerIndex;
737 }
738
Derek Lamberti8ddae332019-02-21 16:29:43 +0000739 SetupInputLayers(graph);
740 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000741
742 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100743 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000744 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100745 Connections& connections = graphIt.second;
746 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000747 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100748 const unsigned int outputSlotIndex = outputIt.first;
749 IOutputSlot* outputSlot = outputIt.second;
750 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000751 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100752 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000753 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100754 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000755 }
Kevin May43a799c2019-02-08 16:31:42 +0000756 }
757 }
758 }
759
760 return std::move(m_Network);
761}
762
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000763BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000764 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000765{
Jan Eilers8eb25602020-03-09 12:13:48 +0000766 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000767 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000768 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000769 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000770 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000771 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000772 }
773 }
774 throw ParseException(
775 boost::str(
776 boost::format("No input binding found for layer:%1% / %2%") %
777 name %
778 CHECK_LOCATION().AsString()));
779}
780
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000781BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000782 const std::string& name) const
783{
Jan Eilers8eb25602020-03-09 12:13:48 +0000784 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000785 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000786 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000787 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000788 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000789 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000790 }
791 }
792 throw ParseException(
793 boost::str(
794 boost::format("No output binding found for layer:%1% / %2%") %
795 name %
796 CHECK_LOCATION().AsString()));
797}
798
Tee Jungaa920c52019-11-05 10:48:25 +0000799unsigned int Deserializer::GetInputLayerInVector(GraphPtr graph, int targetId)
800{
801 for (unsigned int i = 0; i < graph->layers()->size(); i++)
802 {
803 auto layer = graph->layers()->Get(i);
804 if (layer->layer_type() == Layer::Layer_InputLayer)
805 {
806 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
807 if (layerBindingId == targetId)
808 {
809 return i;
810 }
811 }
812 }
813 throw ParseException("Input layer with given layerBindingId not found");
814}
815
816unsigned int Deserializer::GetOutputLayerInVector(GraphPtr graph, int targetId)
817{
818 for (unsigned int i = 0; i < graph->layers()->size(); i++)
819 {
820 auto layer = graph->layers()->Get(i);
821 if (layer->layer_type() == Layer::Layer_OutputLayer)
822 {
823 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
824 if (layerBindingId == targetId)
825 {
826 return i;
827 }
828 }
829 }
830 throw ParseException("Output layer with given layerBindingId not found");
831}
832
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100833unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
834{
835 for (unsigned int i = 0; i < graph->layers()->size(); i++)
836 {
837 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
838 if (layer->index() == targetIndex)
839 {
840 return i;
841 }
842 }
843 throw ParseException("Layer with given index not found");
844}
845
Tee Jungaa920c52019-11-05 10:48:25 +0000846Deserializer::FeatureVersions Deserializer::GetFeatureVersions(GraphPtr graph)
847{
848 Deserializer::FeatureVersions versions;
849
850 if (graph->featureVersions())
851 {
852 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
853 }
854
855 return versions;
856}
857
Derek Lamberti8ddae332019-02-21 16:29:43 +0000858void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000859{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000860 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100861 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000862 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100863 m_InputBindings.reserve(numInputs);
864
865 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000866 {
Tee Jungaa920c52019-11-05 10:48:25 +0000867 unsigned int inputLayerIndex = 0xFFFFFFFF;
868 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
869 {
870 const unsigned int inputId = boost::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
871 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
872 }
873 else
874 {
875 const int inputId = graph->inputIds()->Get(i);
876 inputLayerIndex = GetInputLayerInVector(graph, inputId);
877 }
878
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100879 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000880
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100881 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
882 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100883 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000884
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100885 IConnectableLayer* inputLayer =
886 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000887
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100888 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
889 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
890 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
891
Derek Lamberti8ddae332019-02-21 16:29:43 +0000892 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100893 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000894 }
895}
896
Derek Lamberti8ddae332019-02-21 16:29:43 +0000897void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000898{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000899 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100900 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000901 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100902 m_OutputBindings.reserve(numOutputs);
903
904 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000905 {
Tee Jungaa920c52019-11-05 10:48:25 +0000906 unsigned int outputLayerIndex = 0xFFFFFFFF;
907 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
908 {
909 const unsigned int outputId = boost::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
910 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
911 }
912 else
913 {
914 const int outputId = graph->outputIds()->Get(i);
915 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
916 }
917
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100918 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000919
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100920 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
921 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100922 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000923
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100924 IConnectableLayer* outputLayer =
925 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000926
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100927 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
928
929 unsigned int sourceLayerIndex =
930 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
931 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
932 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
933
Derek Lamberti8ddae332019-02-21 16:29:43 +0000934 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100935 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000936 }
937}
938
Derek Lamberti8ddae332019-02-21 16:29:43 +0000939void Deserializer::RegisterOutputSlots(GraphPtr graph,
940 uint32_t layerIndex,
941 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000942{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000943 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100944 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100945 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
946 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000947 {
948 throw ParseException(
949 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
950 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100951 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000952 layer->GetNumOutputSlots() %
953 layerIndex %
954 CHECK_LOCATION().AsString()));
955 }
956
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100957 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000958 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100959 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
960 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
961 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
962 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000963 }
964}
965
Derek Lamberti8ddae332019-02-21 16:29:43 +0000966void Deserializer::RegisterInputSlots(GraphPtr graph,
967 uint32_t layerIndex,
968 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000969{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000970 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100971 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100972 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
973 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000974 {
975 throw ParseException(
976 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
977 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100978 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000979 layer->GetNumInputSlots() %
980 layerIndex %
981 CHECK_LOCATION().AsString()));
982 }
983
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100984 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000985 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100986 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
987 auto fbConnection = fbInputSlot->connection();
988 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
989 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000990 }
991}
992
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000993void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
994 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100995 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000996{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100997 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000998 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100999 m_GraphConnections[sourceLayerIndex] = Connections();
1000 }
1001
1002 Connections& connections = m_GraphConnections[sourceLayerIndex];
1003 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1004 {
1005 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001006 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001007 else
1008 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001009 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001010 }
1011}
Kevin May43a799c2019-02-08 16:31:42 +00001012
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001013void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001014 uint32_t outputSlotIndex,
1015 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001016{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001017 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1018 {
1019 m_GraphConnections[sourceLayerIndex] = Connections();
1020 }
1021
1022 Connections& connections = m_GraphConnections[sourceLayerIndex];
1023 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1024 {
1025 throw ParseException("Same output slot index processed twice");
1026 }
1027
1028 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001029}
1030
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001031void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
1032{
1033 CHECK_LAYERS(graph, 0, layerIndex);
1034 auto inputs = GetInputs(graph, layerIndex);
1035 CHECK_LOCATION();
1036 CHECK_VALID_SIZE(inputs.size(), 1);
1037
1038 auto outputs = GetOutputs(graph, layerIndex);
1039 CHECK_VALID_SIZE(outputs.size(), 1);
1040
1041 auto layerName = GetLayerName(graph, layerIndex);
1042
josh minor4a3c6102020-01-06 16:40:46 -06001043 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1044 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001045 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1046 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1047
1048 RegisterInputSlots(graph, layerIndex, layer);
1049 RegisterOutputSlots(graph, layerIndex, layer);
1050}
1051
Derek Lamberti8ddae332019-02-21 16:29:43 +00001052void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001053{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001054 CHECK_LAYERS(graph, 0, layerIndex);
1055 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001056 CHECK_LOCATION();
1057 CHECK_VALID_SIZE(inputs.size(), 1);
1058
Derek Lamberti8ddae332019-02-21 16:29:43 +00001059 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001060 CHECK_VALID_SIZE(outputs.size(), 1);
1061
Derek Lamberti8ddae332019-02-21 16:29:43 +00001062 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001063 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001064 auto serializerDescriptor = serializerLayer->descriptor();
1065
1066 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001067 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001068 descriptor.m_A = serializerDescriptor->a();
1069 descriptor.m_B = serializerDescriptor->b();
1070
1071 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1072 layerName.c_str());
1073 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1074 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1075
Derek Lamberti8ddae332019-02-21 16:29:43 +00001076 RegisterInputSlots(graph, layerIndex, layer);
1077 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001078}
1079
Derek Lamberti8ddae332019-02-21 16:29:43 +00001080void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001081{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001082 CHECK_LAYERS(graph, 0, layerIndex);
1083 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001084 CHECK_LOCATION();
1085 CHECK_VALID_SIZE(inputs.size(), 2);
1086
Derek Lamberti8ddae332019-02-21 16:29:43 +00001087 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001088 CHECK_VALID_SIZE(outputs.size(), 1);
1089
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001090 auto layerName = GetLayerName(graph, layerIndex);
1091 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001092
1093 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1094 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1095
Derek Lamberti8ddae332019-02-21 16:29:43 +00001096 RegisterInputSlots(graph, layerIndex, layer);
1097 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001098}
1099
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001100void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
1101{
1102 CHECK_LAYERS(graph, 0, layerIndex);
1103 auto inputs = GetInputs(graph, layerIndex);
1104 CHECK_LOCATION();
1105 CHECK_VALID_SIZE(inputs.size(), 1);
1106
1107 auto outputs = GetOutputs(graph, layerIndex);
1108 CHECK_VALID_SIZE(outputs.size(), 1);
1109
1110 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1111 auto serializerDescriptor = serializerLayer->descriptor();
1112
1113 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001114 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001115 descriptor.m_Axis = serializerDescriptor->axis();
1116 auto layerName = GetLayerName(graph, layerIndex);
1117 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1118
1119 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1120 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1121
1122 RegisterInputSlots(graph, layerIndex, layer);
1123 RegisterOutputSlots(graph, layerIndex, layer);
1124}
1125
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001126void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
1127{
1128 CHECK_LAYERS(graph, 0, layerIndex);
1129
1130 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1131 CHECK_VALID_SIZE(inputs.size(), 1);
1132
1133 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1134 CHECK_VALID_SIZE(outputs.size(), 1);
1135
1136 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1137 auto flatBufferCrops = flatBufferDescriptor->crops();
1138 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1139
1140 if (flatBufferCrops->Length() % 2 != 0)
1141 {
1142 throw ParseException(boost::str(
1143 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1144 }
1145
1146 std::vector<std::pair<unsigned int, unsigned int>> crops;
1147 crops.reserve(flatBufferCrops->Length() / 2);
1148 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1149 {
1150 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1151 }
1152
1153 armnn::BatchToSpaceNdDescriptor descriptor;
1154 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1155 descriptor.m_BlockShape =
1156 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1157 descriptor.m_Crops = crops;
1158
1159 auto layerName = GetLayerName(graph, layerIndex);
1160 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1161
1162 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1163 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1164
1165 RegisterInputSlots(graph, layerIndex, layer);
1166 RegisterOutputSlots(graph, layerIndex, layer);
1167}
1168
ruoyan018e7fa232019-02-28 15:09:07 +00001169void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1170{
1171 CHECK_LAYERS(graph, 0, layerIndex);
1172
1173 auto inputs = GetInputs(graph, layerIndex);
1174 CHECK_VALID_SIZE(inputs.size(), 1);
1175
1176 auto outputs = GetOutputs(graph, layerIndex);
1177 CHECK_VALID_SIZE(outputs.size(), 1);
1178 auto outputInfo = ToTensorInfo(outputs[0]);
1179
ruoyan015c7ab052019-03-04 14:48:02 +00001180 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001181
1182 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1183 auto serializerDescriptor = serializerLayer->descriptor();
1184
1185 armnn::BatchNormalizationDescriptor descriptor;
1186 descriptor.m_Eps = serializerDescriptor->eps();
1187 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1188
1189 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1190 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1191 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1192 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1193
1194 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1195 mean,
1196 variance,
1197 beta,
1198 gamma,
1199 layerName.c_str());
1200 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1201
1202 RegisterInputSlots(graph, layerIndex, layer);
1203 RegisterOutputSlots(graph, layerIndex, layer);
1204}
1205
Conor Kennedy76277882019-02-26 08:29:54 +00001206void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1207{
1208 CHECK_LAYERS(graph, 0, layerIndex);
1209 CHECK_LOCATION();
1210
1211 auto outputs = GetOutputs(graph, layerIndex);
1212 CHECK_VALID_SIZE(outputs.size(), 1);
1213
1214 auto layerName = GetLayerName(graph, layerIndex);
1215
1216 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1217 auto serializerInput = serializerLayer->input();
1218
1219 armnn::ConstTensor input = ToConstTensor(serializerInput);
1220
1221 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1222
1223 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1224 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1225
1226 RegisterOutputSlots(graph, layerIndex, layer);
1227}
1228
Derek Lamberti8ddae332019-02-21 16:29:43 +00001229void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001230{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001231 CHECK_LAYERS(graph, 0, layerIndex);
1232 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001233 CHECK_LOCATION();
1234 CHECK_VALID_SIZE(inputs.size(), 1);
1235
Derek Lamberti8ddae332019-02-21 16:29:43 +00001236 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001237 CHECK_VALID_SIZE(outputs.size(), 1);
1238
Derek Lamberti8ddae332019-02-21 16:29:43 +00001239 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001240 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001241 auto serializerDescriptor = serializerLayer->descriptor();
1242
1243 armnn::Convolution2dDescriptor descriptor;
1244 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1245 descriptor.m_PadRight = serializerDescriptor->padRight();
1246 descriptor.m_PadTop = serializerDescriptor->padTop();
1247 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1248 descriptor.m_StrideX = serializerDescriptor->strideX();
1249 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001250 descriptor.m_DilationX = serializerDescriptor->dilationX();
1251 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001252 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1253 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1254
1255 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1256 armnn::ConstTensor biases;
1257
Matteo Martincighfc598e12019-05-14 10:36:13 +01001258 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001259 if (descriptor.m_BiasEnabled)
1260 {
1261 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001262 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001263 }
1264 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1265 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001266 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001267 layerName.c_str());
1268 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1269 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1270
Derek Lamberti8ddae332019-02-21 16:29:43 +00001271 RegisterInputSlots(graph, layerIndex, layer);
1272 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001273}
1274
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001275void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1276{
1277 CHECK_LAYERS(graph, 0, layerIndex);
1278
1279 auto inputs = GetInputs(graph, layerIndex);
1280 CHECK_VALID_SIZE(inputs.size(), 1);
1281
1282 auto outputs = GetOutputs(graph, layerIndex);
1283 CHECK_VALID_SIZE(outputs.size(), 1);
1284
1285 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1286
1287 armnn::DepthToSpaceDescriptor descriptor;
1288 descriptor.m_BlockSize = fbDescriptor->blockSize();
1289 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1290
1291 auto layerName = GetLayerName(graph, layerIndex);
1292 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1293
1294 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1295 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1296
1297 RegisterInputSlots(graph, layerIndex, layer);
1298 RegisterOutputSlots(graph, layerIndex, layer);
1299}
1300
Derek Lamberti8ddae332019-02-21 16:29:43 +00001301void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001302{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001303 CHECK_LAYERS(graph, 0, layerIndex);
1304 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001305 CHECK_LOCATION();
1306 CHECK_VALID_SIZE(inputs.size(), 1);
1307
Derek Lamberti8ddae332019-02-21 16:29:43 +00001308 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001309 CHECK_VALID_SIZE(outputs.size(), 1);
1310
Derek Lamberti8ddae332019-02-21 16:29:43 +00001311 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001312 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001313 auto serializerDescriptor = serializerLayer->descriptor();
1314
1315 armnn::DepthwiseConvolution2dDescriptor descriptor;
1316 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1317 descriptor.m_PadRight = serializerDescriptor->padRight();
1318 descriptor.m_PadTop = serializerDescriptor->padTop();
1319 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1320 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001321 descriptor.m_StrideY = serializerDescriptor->strideY();
1322 descriptor.m_DilationX = serializerDescriptor->dilationX();
1323 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001324 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1325 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1326
1327 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1328 armnn::ConstTensor biases;
1329
Matteo Martincighfc598e12019-05-14 10:36:13 +01001330 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001331 if (descriptor.m_BiasEnabled)
1332 {
1333 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001334 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001335 }
1336 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1337 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001338 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001339 layerName.c_str());
1340
1341 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1342 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1343
Derek Lamberti8ddae332019-02-21 16:29:43 +00001344 RegisterInputSlots(graph, layerIndex, layer);
1345 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001346}
1347
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001348void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1349{
1350 CHECK_LAYERS(graph, 0, layerIndex);
1351 auto inputs = GetInputs(graph, layerIndex);
1352 CHECK_LOCATION();
1353 CHECK_VALID_SIZE(inputs.size(), 2);
1354
1355 auto outputs = GetOutputs(graph, layerIndex);
1356 CHECK_VALID_SIZE(outputs.size(), 4);
1357
1358 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1359 auto layerName = GetLayerName(graph, layerIndex);
1360 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1361
1362 armnn::DetectionPostProcessDescriptor descriptor;
1363 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1364 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1365 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1366 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1367 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1368 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1369 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1370 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1371 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1372 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1373 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1374
1375 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1376
1377 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1378 anchors,
1379 layerName.c_str());
1380
1381 for (unsigned int i = 0; i < 4; i++)
1382 {
1383 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1384 }
1385
1386 RegisterInputSlots(graph, layerIndex, layer);
1387 RegisterOutputSlots(graph, layerIndex, layer);
1388}
1389
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001390void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1391{
1392 CHECK_LAYERS(graph, 0, layerIndex);
1393 auto inputs = GetInputs(graph, layerIndex);
1394 CHECK_LOCATION();
1395 CHECK_VALID_SIZE(inputs.size(), 2);
1396
1397 auto outputs = GetOutputs(graph, layerIndex);
1398 CHECK_VALID_SIZE(outputs.size(), 1);
1399
1400 auto layerName = GetLayerName(graph, layerIndex);
1401 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1402
1403 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1404 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1405
1406 RegisterInputSlots(graph, layerIndex, layer);
1407 RegisterOutputSlots(graph, layerIndex, layer);
1408}
1409
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001410void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1411{
1412 CHECK_LAYERS(graph, 0, layerIndex);
1413 auto inputs = GetInputs(graph, layerIndex);
1414 CHECK_LOCATION();
1415 CHECK_VALID_SIZE(inputs.size(), 2);
1416
1417 auto outputs = GetOutputs(graph, layerIndex);
1418 CHECK_VALID_SIZE(outputs.size(), 1);
1419
1420 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001421 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1422 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001423
1424 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1425 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1426
1427 RegisterInputSlots(graph, layerIndex, layer);
1428 RegisterOutputSlots(graph, layerIndex, layer);
1429}
1430
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001431void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1432{
1433 CHECK_LAYERS(graph, 0, layerIndex);
1434 auto inputs = GetInputs(graph, layerIndex);
1435 CHECK_LOCATION();
1436 CHECK_VALID_SIZE(inputs.size(), 2);
1437
1438 auto outputs = GetOutputs(graph, layerIndex);
1439 CHECK_VALID_SIZE(outputs.size(), 1);
1440
1441 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001442 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1443 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001444
1445 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1446 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1447
1448 RegisterInputSlots(graph, layerIndex, layer);
1449 RegisterOutputSlots(graph, layerIndex, layer);
1450}
1451
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001452void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1453{
1454 CHECK_LAYERS(graph, 0, layerIndex);
1455
1456 auto inputs = GetInputs(graph, layerIndex);
1457 CHECK_VALID_SIZE(inputs.size(), 1);
1458
1459 auto outputs = GetOutputs(graph, layerIndex);
1460 CHECK_VALID_SIZE(outputs.size(), 1);
1461
1462 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1463 auto fbDescriptor = fbLayer->descriptor();
1464
1465 armnn::InstanceNormalizationDescriptor descriptor;
1466 descriptor.m_Gamma = fbDescriptor->gamma();
1467 descriptor.m_Beta = fbDescriptor->beta();
1468 descriptor.m_Eps = fbDescriptor->eps();
1469 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1470
1471 const std::string layerName = GetLayerName(graph, layerIndex);
1472 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1473
1474 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1475 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1476
1477 RegisterInputSlots(graph, layerIndex, layer);
1478 RegisterOutputSlots(graph, layerIndex, layer);
1479}
1480
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001481void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1482{
1483 CHECK_LAYERS(graph, 0, layerIndex);
1484
1485 auto inputs = GetInputs(graph, layerIndex);
1486 CHECK_VALID_SIZE(inputs.size(), 1);
1487
1488 auto outputs = GetOutputs(graph, layerIndex);
1489 CHECK_VALID_SIZE(outputs.size(), 1);
1490 auto outputInfo = ToTensorInfo(outputs[0]);
1491
1492 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1493 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1494
1495 auto layerName = GetLayerName(graph, layerIndex);
1496 armnn::L2NormalizationDescriptor descriptor;
1497 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001498 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001499
1500 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1501 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1502
1503 RegisterInputSlots(graph, layerIndex, layer);
1504 RegisterOutputSlots(graph, layerIndex, layer);
1505}
1506
Sadik Armagan26257852019-10-14 13:00:47 +01001507void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1508{
1509 CHECK_LAYERS(graph, 0, layerIndex);
1510
1511 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1512 CHECK_VALID_SIZE(inputs.size(), 1);
1513
1514 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1515 CHECK_VALID_SIZE(outputs.size(), 1);
1516
1517 armnn::LogSoftmaxDescriptor descriptor;
1518 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1519 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1520 auto layerName = GetLayerName(graph, layerIndex);
1521
1522 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1523
1524 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1525 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1526
1527 RegisterInputSlots(graph, layerIndex, layer);
1528 RegisterOutputSlots(graph, layerIndex, layer);
1529}
1530
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001531void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1532{
1533 CHECK_LAYERS(graph, 0, layerIndex);
1534 auto inputs = GetInputs(graph, layerIndex);
1535 CHECK_LOCATION();
1536 CHECK_VALID_SIZE(inputs.size(), 2);
1537
1538 auto outputs = GetOutputs(graph, layerIndex);
1539 CHECK_VALID_SIZE(outputs.size(), 1);
1540
1541 auto layerName = GetLayerName(graph, layerIndex);
1542 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1543
1544 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1545 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1546
1547 RegisterInputSlots(graph, layerIndex, layer);
1548 RegisterOutputSlots(graph, layerIndex, layer);
1549}
1550
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001551void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1552{
1553 CHECK_LAYERS(graph, 0, layerIndex);
1554 auto inputs = GetInputs(graph, layerIndex);
1555 CHECK_LOCATION();
1556 CHECK_VALID_SIZE(inputs.size(), 2);
1557
1558 auto outputs = GetOutputs(graph, layerIndex);
1559 CHECK_VALID_SIZE(outputs.size(), 1);
1560
1561 auto layerName = GetLayerName(graph, layerIndex);
1562 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1563
1564 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1565 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1566
1567 RegisterInputSlots(graph, layerIndex, layer);
1568 RegisterOutputSlots(graph, layerIndex, layer);
1569}
1570
Jim Flynne242f2d2019-05-22 14:24:13 +01001571const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1572 unsigned int layerIndex)
1573{
1574 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1575
1576 switch (layerType)
1577 {
1578 case Layer::Layer_ConcatLayer:
1579 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1580 case Layer::Layer_MergerLayer:
1581 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1582 default:
1583 throw armnn::Exception("unknown layer type, should be concat or merger");
1584 }
1585}
1586
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001587void Deserializer::ParseComparison(GraphPtr graph, unsigned int layerIndex)
1588{
1589 CHECK_LAYERS(graph, 0, layerIndex);
1590 CHECK_LOCATION();
1591
1592 auto inputs = GetInputs(graph, layerIndex);
1593 CHECK_VALID_SIZE(inputs.size(), 2);
1594
1595 auto outputs = GetOutputs(graph, layerIndex);
1596 CHECK_VALID_SIZE(outputs.size(), 1);
1597
1598 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1599 auto fbDescriptor = fbLayer->descriptor();
1600
1601 armnn::ComparisonDescriptor descriptor;
1602 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1603
1604 const std::string& layerName = GetLayerName(graph, layerIndex);
1605 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1606
1607 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1608 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1609
1610 RegisterInputSlots(graph, layerIndex, layer);
1611 RegisterOutputSlots(graph, layerIndex, layer);
1612}
1613
josh minor4a3c6102020-01-06 16:40:46 -06001614void Deserializer::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
1615{
1616 CHECK_LAYERS(graph, 0, layerIndex);
1617 CHECK_LOCATION();
1618
1619 auto inputs = GetInputs(graph, layerIndex);
1620 CHECK_VALID_SIZE(inputs.size(), 1);
1621
1622 auto outputs = GetOutputs(graph, layerIndex);
1623 CHECK_VALID_SIZE(outputs.size(), 1);
1624
1625 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1626 auto fbDescriptor = fbLayer->descriptor();
1627
1628 armnn::ElementwiseUnaryDescriptor descriptor;
1629 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1630
1631 const std::string& layerName = GetLayerName(graph, layerIndex);
1632 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1633
1634 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1635 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1636
1637 RegisterInputSlots(graph, layerIndex, layer);
1638 RegisterOutputSlots(graph, layerIndex, layer);
1639}
1640
Jim Flynn906f9462019-05-10 13:55:21 +01001641void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001642{
1643 CHECK_LAYERS(graph, 0, layerIndex);
1644 CHECK_LOCATION();
1645
1646 auto outputs = GetOutputs(graph, layerIndex);
1647 CHECK_VALID_SIZE(outputs.size(), 1);
1648
Jim Flynnac25a1b2019-02-28 10:40:49 +00001649 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001650 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1651 unsigned int numViews = originsDescriptor->numViews();
1652 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001653
1654 // can now check the number of inputs == number of views
1655 auto inputs = GetInputs(graph, layerIndex);
1656 CHECK_VALID_SIZE(inputs.size(), numViews);
1657
1658 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001659 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001660 for (unsigned int v = 0; v < numViews; ++v)
1661 {
1662 auto originPtr = originsPtr->Get(v);
1663 for (unsigned int d = 0; d < numDimensions; ++d)
1664 {
1665 uint32_t value = originPtr->data()->Get(d);
1666 descriptor.SetViewOriginCoord(v, d, value);
1667 }
1668 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001669 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001670
Jim Flynn906f9462019-05-10 13:55:21 +01001671 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001672 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1673 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1674
1675 RegisterInputSlots(graph, layerIndex, layer);
1676 RegisterOutputSlots(graph, layerIndex, layer);
1677}
1678
Derek Lamberti8ddae332019-02-21 16:29:43 +00001679void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001680{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001681 CHECK_LAYERS(graph, 0, layerIndex);
1682 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001683 CHECK_LOCATION();
1684 CHECK_VALID_SIZE(inputs.size(), 2);
1685
Derek Lamberti8ddae332019-02-21 16:29:43 +00001686 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001687 CHECK_VALID_SIZE(outputs.size(), 1);
1688
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001689 auto layerName = GetLayerName(graph, layerIndex);
1690 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001691
1692 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1693 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1694
Derek Lamberti8ddae332019-02-21 16:29:43 +00001695 RegisterInputSlots(graph, layerIndex, layer);
1696 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001697}
1698
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001699void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1700{
1701 CHECK_LAYERS(graph, 0, layerIndex);
1702 CHECK_LOCATION();
1703
1704 auto inputs = GetInputs(graph, layerIndex);
1705 CHECK_VALID_SIZE(inputs.size(), 1);
1706
1707 auto outputs = GetOutputs(graph, layerIndex);
1708 CHECK_VALID_SIZE(outputs.size(), 1);
1709
1710 auto layerName = GetLayerName(graph, layerIndex);
1711
1712 armnn::IConnectableLayer* layer;
1713
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001714 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001715
1716 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1717 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1718
1719 RegisterInputSlots(graph, layerIndex, layer);
1720 RegisterOutputSlots(graph, layerIndex, layer);
1721}
1722
Derek Lamberti8ddae332019-02-21 16:29:43 +00001723void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001724{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001725 CHECK_LAYERS(graph, 0, layerIndex);
1726 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001727 CHECK_LOCATION();
1728 CHECK_VALID_SIZE(inputs.size(), 1);
1729
Derek Lamberti8ddae332019-02-21 16:29:43 +00001730 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001731 CHECK_VALID_SIZE(outputs.size(), 1);
1732
Derek Lamberti8ddae332019-02-21 16:29:43 +00001733 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001734 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001735 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1736
1737 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1738 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1739 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1740
1741 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1742
1743 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001744 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001745 if (flatBufferDescriptor->biasEnabled())
1746 {
1747 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001748 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001749 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001750 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1751 weightsTensor,
1752 optionalBiases,
1753 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001754
1755 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1756 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1757
Derek Lamberti8ddae332019-02-21 16:29:43 +00001758 RegisterInputSlots(graph, layerIndex, layer);
1759 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001760}
1761
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001762void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1763{
1764 CHECK_LAYERS(graph, 0, layerIndex);
1765
1766 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1767 CHECK_VALID_SIZE(inputs.size(), 1);
1768
1769 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1770 CHECK_VALID_SIZE(outputs.size(), 1);
1771
1772 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1773 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001774 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001775
1776 if (flatBufferPadList->Length() % 2 != 0)
1777 {
1778 throw ParseException(boost::str(
1779 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1780 }
1781
1782 std::vector<std::pair<unsigned int, unsigned int>> padList;
1783 padList.reserve(flatBufferPadList->Length() / 2);
1784 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1785 {
1786 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1787 }
1788
David Monahan34757812019-06-19 11:47:21 +01001789 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001790
1791 auto layerName = GetLayerName(graph, layerIndex);
1792 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1793
1794 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1795 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1796
1797 RegisterInputSlots(graph, layerIndex, layer);
1798 RegisterOutputSlots(graph, layerIndex, layer);
1799}
1800
Derek Lamberti8ddae332019-02-21 16:29:43 +00001801void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001802{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001803 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001804
1805 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001806 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001807
Derek Lamberti8ddae332019-02-21 16:29:43 +00001808 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001809 CHECK_VALID_SIZE(inputs.size(), 1);
1810
Derek Lamberti8ddae332019-02-21 16:29:43 +00001811 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001812 CHECK_VALID_SIZE(outputs.size(), 1);
1813 auto outputInfo = ToTensorInfo(outputs[0]);
1814
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001815 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001816 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1817
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001818 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001819 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1820
Derek Lamberti8ddae332019-02-21 16:29:43 +00001821 RegisterInputSlots(graph, layerIndex, layer);
1822 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001823}
1824
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001825armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001826 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001827{
Jan Eilers8eb25602020-03-09 12:13:48 +00001828 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001829 armnn::Pooling2dDescriptor desc;
1830
1831 switch (pooling2dDesc->poolType())
1832 {
1833 case PoolingAlgorithm_Average:
1834 {
1835 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001836 break;
1837 }
1838 case PoolingAlgorithm_Max:
1839 {
1840 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001841 break;
1842 }
1843 default:
1844 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001845 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001846 }
1847 }
1848
1849 switch (pooling2dDesc->outputShapeRounding())
1850 {
1851 case OutputShapeRounding_Floor:
1852 {
1853 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1854 break;
1855 }
1856 case OutputShapeRounding_Ceiling:
1857 {
1858 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1859 break;
1860 }
1861 default:
1862 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001863 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001864 }
1865 }
1866
1867 switch (pooling2dDesc->paddingMethod())
1868 {
1869 case PaddingMethod_Exclude:
1870 {
1871 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1872 break;
1873 }
1874 case PaddingMethod_IgnoreValue:
1875 {
1876 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1877 break;
1878 }
1879 default:
1880 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001881 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001882 }
1883 }
1884
1885 switch (pooling2dDesc->dataLayout())
1886 {
1887 case DataLayout_NCHW:
1888 {
1889 desc.m_DataLayout = armnn::DataLayout::NCHW;
1890 break;
1891 }
1892 case DataLayout_NHWC:
1893 {
1894 desc.m_DataLayout = armnn::DataLayout::NHWC;
1895 break;
1896 }
1897 default:
1898 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001899 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001900 }
1901 }
1902
1903 desc.m_PadRight = pooling2dDesc->padRight();
1904 desc.m_PadLeft = pooling2dDesc->padLeft();
1905 desc.m_PadBottom = pooling2dDesc->padBottom();
1906 desc.m_PadTop = pooling2dDesc->padTop();
1907 desc.m_StrideX = pooling2dDesc->strideX();
1908 desc.m_StrideY = pooling2dDesc->strideY();
1909 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1910 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1911
1912 return desc;
1913}
1914
Derek Lamberti8ddae332019-02-21 16:29:43 +00001915void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001916{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001917 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001918
Derek Lamberti8ddae332019-02-21 16:29:43 +00001919 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001920 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001921 CHECK_VALID_SIZE(inputs.size(), 1);
1922
Derek Lamberti8ddae332019-02-21 16:29:43 +00001923 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001924 CHECK_VALID_SIZE(outputs.size(), 1);
1925 auto outputInfo = ToTensorInfo(outputs[0]);
1926
1927 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001928 auto layerName = GetLayerName(graph, layerIndex);
1929 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001930 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1931
Derek Lamberti8ddae332019-02-21 16:29:43 +00001932 RegisterInputSlots(graph, layerIndex, layer);
1933 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001934}
1935
Derek Lamberti87acb272019-03-27 16:51:31 +00001936void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1937{
1938 CHECK_LAYERS(graph, 0, layerIndex);
1939
1940 auto inputs = GetInputs(graph, layerIndex);
1941 CHECK_VALID_SIZE(inputs.size(), 1);
1942
1943 auto outputs = GetOutputs(graph, layerIndex);
1944 CHECK_VALID_SIZE(outputs.size(), 1);
1945 auto outputInfo = ToTensorInfo(outputs[0]);
1946
1947 auto layerName = GetLayerName(graph, layerIndex);
1948 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1949 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1950
1951 RegisterInputSlots(graph, layerIndex, layer);
1952 RegisterOutputSlots(graph, layerIndex, layer);
1953}
1954
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001955armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001956 const std::vector<uint32_t>& targetDimsIn)
1957{
1958 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1959 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1960
1961 if (stretchDim != targetDimsIn.end())
1962 {
1963 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1964 {
1965 throw ParseException(boost::str(
1966 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1967 }
1968
1969 auto targetNumElements =
1970 boost::numeric_cast<unsigned int>(
1971 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1972
1973 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1974 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1975 }
1976
1977 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1978
1979 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1980 reshapeInfo.SetShape(outputShape);
1981
1982 return reshapeInfo;
1983}
1984
Derek Lamberti8ddae332019-02-21 16:29:43 +00001985void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001986{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001987 CHECK_LAYERS(graph, 0, layerIndex);
1988 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001989
Derek Lamberti8ddae332019-02-21 16:29:43 +00001990 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001991 CHECK_VALID_SIZE(outputs.size(), 1);
1992
1993 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1994 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1995
Derek Lamberti8ddae332019-02-21 16:29:43 +00001996 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001997 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1998
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001999 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002000 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2001
2002 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2003 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2004
2005 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2006 {
2007 std::stringstream ss;
2008 ss << "New shape defined in reshape parameters "
2009 << reshapeOutputTensorShape
2010 << " does not equal output shape "
2011 << actualOutputTensorInfo.GetShape()
2012 << ": "
2013 << CHECK_LOCATION().AsString();
2014 throw ParseException(ss.str());
2015 }
2016
2017 armnn::ReshapeDescriptor reshapeDesc;
2018 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2019
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002020 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002021 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2022 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2023
Derek Lamberti8ddae332019-02-21 16:29:43 +00002024 RegisterInputSlots(graph, layerIndex, layer);
2025 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002026}
2027
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002028void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
2029{
2030 CHECK_LAYERS(graph, 0, layerIndex);
2031
2032 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2033 CHECK_VALID_SIZE(inputs.size(), 1);
2034
2035 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2036 CHECK_VALID_SIZE(outputs.size(), 1);
2037
2038 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2039
2040 armnn::ResizeDescriptor descriptor;
2041 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2042 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2043 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2044 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2045
2046 auto layerName = GetLayerName(graph, layerIndex);
2047 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2048
2049 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2050 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2051
2052 RegisterInputSlots(graph, layerIndex, layer);
2053 RegisterOutputSlots(graph, layerIndex, layer);
2054}
2055
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002056void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
2057{
2058 CHECK_LAYERS(graph, 0, layerIndex);
2059
2060 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2061 CHECK_VALID_SIZE(inputs.size(), 1);
2062
2063 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2064 CHECK_VALID_SIZE(outputs.size(), 1);
2065
2066 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2067
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002068 armnn::ResizeDescriptor descriptor;
2069 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002070 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002071 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2072 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002073
2074 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002075 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002076
2077 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2078 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2079
2080 RegisterInputSlots(graph, layerIndex, layer);
2081 RegisterOutputSlots(graph, layerIndex, layer);
2082}
2083
Derek Lamberti8ddae332019-02-21 16:29:43 +00002084void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002085{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002086 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002087
Derek Lamberti8ddae332019-02-21 16:29:43 +00002088 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002089 CHECK_VALID_SIZE(inputs.size(), 1);
2090
Derek Lamberti8ddae332019-02-21 16:29:43 +00002091 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002092 CHECK_VALID_SIZE(outputs.size(), 1);
2093
2094 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002095 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002096 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002097
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002098 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2099
2100 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2101 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2102
Derek Lamberti8ddae332019-02-21 16:29:43 +00002103 RegisterInputSlots(graph, layerIndex, layer);
2104 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002105}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002106
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002107void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
2108{
2109 CHECK_LAYERS(graph, 0, layerIndex);
2110
2111 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2112 CHECK_VALID_SIZE(inputs.size(), 1);
2113
2114 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2115 CHECK_VALID_SIZE(outputs.size(), 1);
2116
2117 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2118 auto flatBufferPadList = flatBufferDescriptor->padList();
2119 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2120
2121 if (flatBufferPadList->Length() % 2 != 0)
2122 {
2123 throw ParseException(boost::str(
2124 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
2125 }
2126
2127 std::vector<std::pair<unsigned int, unsigned int>> padList;
2128 padList.reserve(flatBufferPadList->Length() / 2);
2129 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2130 {
2131 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2132 }
2133
2134 armnn::SpaceToBatchNdDescriptor descriptor;
2135 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2136 descriptor.m_BlockShape =
2137 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2138 descriptor.m_PadList = padList;
2139
2140 auto layerName = GetLayerName(graph, layerIndex);
2141 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2142
2143 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2144 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2145
2146 RegisterInputSlots(graph, layerIndex, layer);
2147 RegisterOutputSlots(graph, layerIndex, layer);
2148}
2149
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002150void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
2151{
2152 CHECK_LAYERS(graph, 0, layerIndex);
2153
2154 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2155 CHECK_VALID_SIZE(inputs.size(), 1);
2156
2157 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2158 CHECK_VALID_SIZE(outputs.size(), 1);
2159
2160 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2161
2162 armnn::SpaceToDepthDescriptor descriptor;
2163 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2164 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2165
2166 auto layerName = GetLayerName(graph, layerIndex);
2167 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2168
2169 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2170 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2171
2172 RegisterInputSlots(graph, layerIndex, layer);
2173 RegisterOutputSlots(graph, layerIndex, layer);
2174}
2175
Nina Drozd57728782019-02-27 10:53:27 +00002176armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
2177 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
2178 unsigned int layerIndex)
2179{
Jan Eilers8eb25602020-03-09 12:13:48 +00002180 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002181 armnn::NormalizationDescriptor desc;
2182
2183 switch (normalizationDescriptor->normChannelType())
2184 {
2185 case NormalizationAlgorithmChannel_Across:
2186 {
2187 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2188 break;
2189 }
2190 case NormalizationAlgorithmChannel_Within:
2191 {
2192 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2193 break;
2194 }
2195 default:
2196 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002197 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002198 }
2199 }
2200
2201 switch (normalizationDescriptor->normMethodType())
2202 {
2203 case NormalizationAlgorithmMethod_LocalBrightness:
2204 {
2205 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2206 break;
2207 }
2208 case NormalizationAlgorithmMethod_LocalContrast:
2209 {
2210 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2211 break;
2212 }
2213 default:
2214 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002215 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002216 }
2217 }
2218
2219 switch (normalizationDescriptor->dataLayout())
2220 {
2221 case DataLayout_NCHW:
2222 {
2223 desc.m_DataLayout = armnn::DataLayout::NCHW;
2224 break;
2225 }
2226 case DataLayout_NHWC:
2227 {
2228 desc.m_DataLayout = armnn::DataLayout::NHWC;
2229 break;
2230 }
2231 default:
2232 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002233 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002234 }
2235 }
2236
2237 desc.m_Alpha = normalizationDescriptor->alpha();
2238 desc.m_Beta = normalizationDescriptor->beta();
2239 desc.m_K = normalizationDescriptor->k();
2240 desc.m_NormSize = normalizationDescriptor->normSize();
2241
2242 return desc;
2243}
2244
2245void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2246{
2247 CHECK_LAYERS(graph, 0, layerIndex);
2248
2249 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2250
2251 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2252 CHECK_VALID_SIZE(inputs.size(), 1);
2253
2254 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2255 CHECK_VALID_SIZE(outputs.size(), 1);
2256
2257 auto outputInfo = ToTensorInfo(outputs[0]);
2258
2259 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2260 auto layerName = GetLayerName(graph, layerIndex);
2261
2262 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2263 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2264
2265 RegisterInputSlots(graph, layerIndex, layer);
2266 RegisterOutputSlots(graph, layerIndex, layer);
2267}
2268
Sadik Armagan8b42a382019-03-01 14:24:49 +00002269void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2270{
2271 CHECK_LAYERS(graph, 0, layerIndex);
2272 auto inputs = GetInputs(graph, layerIndex);
2273 CHECK_LOCATION();
2274 CHECK_VALID_SIZE(inputs.size(), 1);
2275
2276 auto outputs = GetOutputs(graph, layerIndex);
2277 CHECK_VALID_SIZE(outputs.size(), 1);
2278
2279 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002280
josh minor4a3c6102020-01-06 16:40:46 -06002281 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2282 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002283 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2284 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2285
2286 RegisterInputSlots(graph, layerIndex, layer);
2287 RegisterOutputSlots(graph, layerIndex, layer);
2288}
2289
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002290void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2291{
2292 CHECK_LAYERS(graph, 0, layerIndex);
2293
2294 auto inputs = GetInputs(graph, layerIndex);
2295 CHECK_VALID_SIZE(inputs.size(), 1);
2296
2297 auto outputs = GetOutputs(graph, layerIndex);
2298 CHECK_VALID_SIZE(outputs.size(), 1);
2299
2300 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2301
2302 auto fbBegin = fbDescriptor->begin();
2303 auto fbSize = fbDescriptor->size();
2304
2305 if (fbBegin->Length() != fbSize->Length())
2306 {
2307 throw ParseException(boost::str(
2308 boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString()));
2309 }
2310
2311 armnn::SliceDescriptor descriptor;
2312 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2313 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2314
2315 auto layerName = GetLayerName(graph, layerIndex);
2316 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2317
2318 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2319 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2320
2321 RegisterInputSlots(graph, layerIndex, layer);
2322 RegisterOutputSlots(graph, layerIndex, layer);
2323}
2324
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002325void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2326{
2327 CHECK_LAYERS(graph, 0, layerIndex);
2328
2329 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2330 CHECK_VALID_SIZE(inputs.size(), 1);
2331
2332 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2333 CHECK_VALID_SIZE(outputs.size(), 1);
2334
2335 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2336
2337 auto flatBufferBegin = flatBufferDescriptor->begin();
2338 auto flatBufferEnd = flatBufferDescriptor->end();
2339 auto flatBufferStride = flatBufferDescriptor->stride();
2340
2341 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2342 flatBufferBegin->Length() == flatBufferStride->Length()))
2343 {
2344 throw ParseException(boost::str(
2345 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2346 }
2347
2348 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2349 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2350 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2351
2352 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2353 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2354 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2355 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2356 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2357 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2358 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2359
2360 auto layerName = GetLayerName(graph, layerIndex);
2361 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2362
2363 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2364 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2365
2366 RegisterInputSlots(graph, layerIndex, layer);
2367 RegisterOutputSlots(graph, layerIndex, layer);
2368}
2369
Conor Kennedyda1f9752019-03-01 14:37:12 +00002370void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2371{
2372 CHECK_LAYERS(graph, 0, layerIndex);
2373 auto inputs = GetInputs(graph, layerIndex);
2374 CHECK_LOCATION();
2375 CHECK_VALID_SIZE(inputs.size(), 2);
2376
2377 auto outputs = GetOutputs(graph, layerIndex);
2378 CHECK_VALID_SIZE(outputs.size(), 1);
2379
2380 auto layerName = GetLayerName(graph, layerIndex);
2381 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2382
2383 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2384 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2385
2386 RegisterInputSlots(graph, layerIndex, layer);
2387 RegisterOutputSlots(graph, layerIndex, layer);
2388}
2389
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002390void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2391{
2392 CHECK_LAYERS(graph, 0, layerIndex);
2393
2394 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2395 CHECK_VALID_SIZE(inputs.size(), 2);
2396
2397 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2398 CHECK_VALID_SIZE(outputs.size(), 1);
2399
2400 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002401 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2402
2403 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002404 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2405
2406 RegisterInputSlots(graph, layerIndex, layer);
2407 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002408}
2409
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002410void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2411{
2412 CHECK_LAYERS(graph, 0, layerIndex);
2413
2414 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2415 CHECK_VALID_SIZE(inputs.size(), 1);
2416
2417 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2418 CHECK_VALID_SIZE(outputs.size(), 1);
2419
2420 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2421 auto flatBufferAxis = flatBufferDescriptor->axis();
2422 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2423
2424 armnn::MeanDescriptor descriptor;
2425 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2426 descriptor.m_KeepDims = flatBufferKeepDims;
2427
2428 auto layerName = GetLayerName(graph, layerIndex);
2429 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2430
2431 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2432 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2433
2434 RegisterInputSlots(graph, layerIndex, layer);
2435 RegisterOutputSlots(graph, layerIndex, layer);
2436}
2437
Jim Flynn18ce3382019-03-08 11:08:30 +00002438void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2439{
2440 CHECK_LAYERS(graph, 0, layerIndex);
2441
2442 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2443 CHECK_VALID_SIZE(inputs.size(), 1);
2444
2445 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2446
2447 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2448 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2449 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2450 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2451 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2452 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2453
2454 // Check numViews and numDimensions corresponds to the ones already serialized ...
2455 // numViews == flatBufferViewSizes.size();
2456 // foreach: numDimensions == flatBufferViewSizes[x].size();
2457
2458 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2459 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2460 {
2461 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2462 {
2463 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2464 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2465 }
2466 }
2467
2468 auto layerName = GetLayerName(graph, layerIndex);
2469 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2470
2471 // I could have as many outputs as views ...
2472 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2473 {
2474 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2475 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2476 }
2477
2478 RegisterInputSlots(graph, layerIndex, layer);
2479 RegisterOutputSlots(graph, layerIndex, layer);
2480}
2481
Jim Flynn11af3752019-03-19 17:22:29 +00002482armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2483{
2484 armnn::LstmDescriptor desc;
2485
2486 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2487 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2488 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2489 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2490 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2491 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002492 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002493
2494 return desc;
2495}
2496
2497void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2498{
2499 CHECK_LAYERS(graph, 0, layerIndex);
2500
2501 auto inputs = GetInputs(graph, layerIndex);
2502 CHECK_VALID_SIZE(inputs.size(), 3);
2503
2504 auto outputs = GetOutputs(graph, layerIndex);
2505 CHECK_VALID_SIZE(outputs.size(), 4);
2506
2507 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2508 auto layerName = GetLayerName(graph, layerIndex);
2509 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2510 auto flatBufferInputParams = flatBufferLayer->inputParams();
2511
2512 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2513
2514 armnn::LstmInputParams lstmInputParams;
2515
2516 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2517 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2518 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2519 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2520 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2521 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2522 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2523 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2524 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2525
2526 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2527 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2528 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2529 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2530 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2531 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2532 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2533 lstmInputParams.m_CellBias = &cellBias;
2534 lstmInputParams.m_OutputGateBias = &outputGateBias;
2535
2536 armnn::ConstTensor inputToInputWeights;
2537 armnn::ConstTensor recurrentToInputWeights;
2538 armnn::ConstTensor cellToInputWeights;
2539 armnn::ConstTensor inputGateBias;
2540 if (!lstmDescriptor.m_CifgEnabled)
2541 {
2542 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2543 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2544 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2545 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2546
2547 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2548 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2549 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2550 lstmInputParams.m_InputGateBias = &inputGateBias;
2551 }
2552
2553 armnn::ConstTensor projectionWeights;
2554 armnn::ConstTensor projectionBias;
2555 if (lstmDescriptor.m_ProjectionEnabled)
2556 {
2557 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2558 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2559
2560 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2561 lstmInputParams.m_ProjectionBias = &projectionBias;
2562 }
2563
2564 armnn::ConstTensor cellToForgetWeights;
2565 armnn::ConstTensor cellToOutputWeights;
2566 if (lstmDescriptor.m_PeepholeEnabled)
2567 {
2568 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2569 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2570
2571 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2572 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2573 }
2574
Jan Eilersf8c62972019-07-17 11:07:49 +01002575 armnn::ConstTensor inputLayerNormWeights;
2576 armnn::ConstTensor forgetLayerNormWeights;
2577 armnn::ConstTensor cellLayerNormWeights;
2578 armnn::ConstTensor outputLayerNormWeights;
2579 if (lstmDescriptor.m_LayerNormEnabled)
2580 {
2581 if (!lstmDescriptor.m_CifgEnabled)
2582 {
2583 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2584 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2585 }
2586 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2587 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2588 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2589
2590 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2591 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2592 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2593 }
2594
Jim Flynn11af3752019-03-19 17:22:29 +00002595 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2596
2597 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2598 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2599
2600 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2601 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2602
2603 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2604 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2605
2606 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2607 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2608
2609 RegisterInputSlots(graph, layerIndex, layer);
2610 RegisterOutputSlots(graph, layerIndex, layer);
2611}
2612
Jan Eilers5b01a892019-07-23 09:47:43 +01002613void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2614{
2615 CHECK_LAYERS(graph, 0, layerIndex);
2616
2617 auto inputs = GetInputs(graph, layerIndex);
2618 CHECK_VALID_SIZE(inputs.size(), 3);
2619
2620 auto outputs = GetOutputs(graph, layerIndex);
2621 CHECK_VALID_SIZE(outputs.size(), 2);
2622
2623 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2624 auto layerName = GetLayerName(graph, layerIndex);
2625 auto flatBufferInputParams = flatBufferLayer->inputParams();
2626
2627 armnn::QuantizedLstmInputParams lstmInputParams;
2628
2629 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2630 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2631 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2632 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2633 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2634 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2635 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2636 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2637 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2638 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2639 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2640 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2641
2642 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2643 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2644 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2645 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2646 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2647 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2648 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2649 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2650 lstmInputParams.m_InputGateBias = &inputGateBias;
2651 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2652 lstmInputParams.m_CellBias = &cellBias;
2653 lstmInputParams.m_OutputGateBias = &outputGateBias;
2654
2655 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2656
2657 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2658 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2659
2660 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2661 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2662
2663 RegisterInputSlots(graph, layerIndex, layer);
2664 RegisterOutputSlots(graph, layerIndex, layer);
2665}
2666
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002667void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2668{
2669 CHECK_LAYERS(graph, 0, layerIndex);
2670
2671 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2672 CHECK_VALID_SIZE(inputs.size(), 1);
2673
2674 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2675 CHECK_VALID_SIZE(outputs.size(), 1);
2676
2677 const std::string layerName = GetLayerName(graph, layerIndex);
2678 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2679
2680 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2681 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2682
2683 RegisterInputSlots(graph, layerIndex, layer);
2684 RegisterOutputSlots(graph, layerIndex, layer);
2685}
2686
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002687void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2688{
2689 CHECK_LAYERS(graph, 0, layerIndex);
2690
2691 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2692 CHECK_VALID_SIZE(inputs.size(), 2);
2693
2694 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2695 CHECK_VALID_SIZE(outputs.size(), 1);
2696
2697 const std::string layerName = GetLayerName(graph, layerIndex);
2698 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2699
2700 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2701 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2702
2703 RegisterInputSlots(graph, layerIndex, layer);
2704 RegisterOutputSlots(graph, layerIndex, layer);
2705}
2706
Sadik Armaganeff363d2019-04-05 15:25:46 +01002707void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2708{
2709 CHECK_LAYERS(graph, 0, layerIndex);
2710 auto inputs = GetInputs(graph, layerIndex);
2711 CHECK_LOCATION();
2712 CHECK_VALID_SIZE(inputs.size(), 2);
2713
2714 auto outputs = GetOutputs(graph, layerIndex);
2715 CHECK_VALID_SIZE(outputs.size(), 2);
2716
2717 auto layerName = GetLayerName(graph, layerIndex);
2718 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2719
2720 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2721 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2722
2723 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2724 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2725
2726 RegisterInputSlots(graph, layerIndex, layer);
2727 RegisterOutputSlots(graph, layerIndex, layer);
2728}
2729
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002730void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2731{
2732 CHECK_LAYERS(graph, 0, layerIndex);
2733 auto inputs = GetInputs(graph, layerIndex);
2734 CHECK_LOCATION();
2735 CHECK_VALID_SIZE(inputs.size(), 2);
2736
2737 auto outputs = GetOutputs(graph, layerIndex);
2738 CHECK_VALID_SIZE(outputs.size(), 1);
2739
2740 auto layerName = GetLayerName(graph, layerIndex);
2741 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2742
2743 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2744 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2745
2746 RegisterInputSlots(graph, layerIndex, layer);
2747 RegisterOutputSlots(graph, layerIndex, layer);
2748}
2749
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002750void Deserializer::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
2751{
2752 CHECK_LAYERS(graph, 0, layerIndex);
2753
2754 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
2755
2756 auto inputs = GetInputs(graph, layerIndex);
2757 CHECK_VALID_SIZE(inputs.size(), 1);
2758
2759 auto outputs = GetOutputs(graph, layerIndex);
2760 CHECK_VALID_SIZE(outputs.size(), 1);
2761 auto outputInfo = ToTensorInfo(outputs[0]);
2762
2763 auto layerName = GetLayerName(graph, layerIndex);
2764 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2765
2766 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
2767 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2768
2769 RegisterInputSlots(graph, layerIndex, layer);
2770 RegisterOutputSlots(graph, layerIndex, layer);
2771}
2772
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002773void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2774{
2775 CHECK_LAYERS(graph, 0, layerIndex);
2776
2777 auto inputs = GetInputs(graph, layerIndex);
2778 CHECK_VALID_SIZE(inputs.size(), 1);
2779
2780 auto outputs = GetOutputs(graph, layerIndex);
2781 CHECK_VALID_SIZE(outputs.size(), 1);
2782
2783 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2784 auto layerName = GetLayerName(graph, layerIndex);
2785 auto serializerDescriptor = serializerLayer->descriptor();
2786
2787 armnn::TransposeConvolution2dDescriptor descriptor;
2788 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2789 descriptor.m_PadRight = serializerDescriptor->padRight();
2790 descriptor.m_PadTop = serializerDescriptor->padTop();
2791 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2792 descriptor.m_StrideX = serializerDescriptor->strideX();
2793 descriptor.m_StrideY = serializerDescriptor->strideY();;
2794 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2795 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2796
2797 // weights & biases
2798 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2799 armnn::Optional<armnn::ConstTensor> optionalBiases;
2800 if (descriptor.m_BiasEnabled)
2801 {
2802 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2803 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2804 }
2805
2806 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2807 weights,
2808 optionalBiases,
2809 layerName.c_str());
2810
2811 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2812 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2813
2814 RegisterInputSlots(graph, layerIndex, layer);
2815 RegisterOutputSlots(graph, layerIndex, layer);
2816}
2817
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002818void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2819{
2820 CHECK_LAYERS(graph, 0, layerIndex);
2821 auto inputs = GetInputs(graph, layerIndex);
2822
2823 auto outputs = GetOutputs(graph, layerIndex);
2824 CHECK_VALID_SIZE(outputs.size(), 1);
2825
2826 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2827 unsigned int axis = flatBufferDescriptor->axis();
2828 unsigned int numInputs = flatBufferDescriptor->numInputs();
2829 CHECK_VALID_SIZE(inputs.size(), numInputs);
2830
2831 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2832 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2833 flatBufferInputShape->begin() + flatBufferInputShape->size());
2834
2835 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2836 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2837
2838 for (unsigned int i=0; i<inputs.size(); ++i)
2839 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01002840 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002841 if (descriptor.m_InputShape != inputShape)
2842 {
2843 std::stringstream ss;
2844 ss << "Shape of input "
2845 << i
2846 << " "
2847 << inputShape
2848 << " does not equal defined input shape "
2849 << descriptor.m_InputShape
2850 << ": "
2851 << CHECK_LOCATION().AsString();
2852 throw ParseException(ss.str());
2853 }
2854 }
2855
2856 auto layerName = GetLayerName(graph, layerIndex);
2857 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2858
2859 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2860 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2861
2862 RegisterInputSlots(graph, layerIndex, layer);
2863 RegisterOutputSlots(graph, layerIndex, layer);
2864}
2865
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01002866void Deserializer::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
2867{
2868 CHECK_LAYERS(graph, 0, layerIndex);
2869
2870 auto inputs = GetInputs(graph, layerIndex);
2871 auto outputs = GetOutputs(graph, layerIndex);
2872
2873 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
2874 auto fbDescriptor = fbLayer->descriptor();
2875
2876 armnn::StandInDescriptor descriptor;
2877 descriptor.m_NumInputs = fbDescriptor->numInputs();
2878 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
2879
2880 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
2881 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
2882
2883 const std::string layerName = GetLayerName(graph, layerIndex);
2884 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
2885
2886 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
2887 {
2888 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
2889 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
2890 }
2891
2892 RegisterInputSlots(graph, layerIndex, layer);
2893 RegisterOutputSlots(graph, layerIndex, layer);
2894}
2895
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002896} // namespace armnnDeserializer