blob: 3bbd71a972415b6d5de50eb9b65943ef648d3fa9 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
15
Kevin May43a799c2019-02-08 16:31:42 +000016#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000017#include <VerificationHelpers.hpp>
18
19#include <boost/filesystem.hpp>
20#include <boost/format.hpp>
21#include <boost/core/ignore_unused.hpp>
22#include <boost/assert.hpp>
23#include <boost/format.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010024#include <boost/format.hpp>
25#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000026#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000027
Kevin May43a799c2019-02-08 16:31:42 +000028#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000029#include <algorithm>
30#include <limits>
31#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000032
33using armnn::ParseException;
34using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000036
Derek Lamberti0028d1b2019-02-20 13:57:42 +000037namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000038{
Kevin May43a799c2019-02-08 16:31:42 +000039
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000040namespace
41{
Kevin May43a799c2019-02-08 16:31:42 +000042
43const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
44
Derek Lamberti0028d1b2019-02-20 13:57:42 +000045 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000046 unsigned int layersIndex,
47 const CheckLocation& location)
48{
49 if (graph->layers() == nullptr)
50 {
51 throw ParseException(
52 boost::str(
53 boost::format("%1% was called with invalid (null) graph. "
54 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
55 "layers:%2% at %3%") %
56 location.m_Function %
57 layersIndex %
58 location.FileLine()));
59 }
60 else if (layersIndex >= graph->layers()->size())
61 {
62 throw ParseException(
63 boost::str(
64 boost::format("%1% was called with an invalid layers index. "
65 "layers:%2% at %3%") %
66 location.m_Function %
67 layersIndex %
68 location.FileLine()));
69 }
70}
71
Derek Lamberti0028d1b2019-02-20 13:57:42 +000072void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000073 unsigned int layersIndex,
74 unsigned int layerIndex,
75 const CheckLocation& location)
76{
77 if (graph->layers() == nullptr)
78 {
79 throw ParseException(
80 boost::str(
81 boost::format("%1% was called with invalid (null) graph. "
82 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000083 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000084 location.m_Function %
85 layersIndex %
86 location.FileLine()));
87 }
88 else if (layersIndex >= graph->layers()->size())
89 {
90 throw ParseException(
91 boost::str(
92 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000093 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000094 location.m_Function %
95 layersIndex %
96 location.FileLine()));
97 }
98 else if (layerIndex >= graph->layers()[layersIndex].size()
99 && layerIndex != VIRTUAL_LAYER_ID)
100 {
101 throw ParseException(
102 boost::str(
103 boost::format("%1% was called with an invalid layer index. "
104 "layers:%2% layer:%3% at %4%") %
105 location.m_Function %
106 layersIndex %
107 layerIndex %
108 location.FileLine()));
109 }
110}
111
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000112void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000113 const CheckLocation& location)
114{
115 if (rawPtr == nullptr)
116 {
117 throw ParseException(
118 boost::str(
119 boost::format("%1% was called with a null tensor pointer. "
120 "at %2%") %
121 location.m_Function %
122 location.FileLine()));
123
124 }
125}
126
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000127void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000128 const CheckLocation& location)
129{
130 if (rawPtr == nullptr)
131 {
132 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
133 location.m_Function %
134 location.FileLine()));
135 }
136}
137
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000138void CheckConstTensorSize(const unsigned int constTensorSize,
139 const unsigned int tensorSize,
140 const CheckLocation& location)
141{
142 if (constTensorSize != tensorSize)
143 {
144 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
145 location.m_Function %
146 location.FileLine()));
147 }
148}
149
Kevin May43a799c2019-02-08 16:31:42 +0000150#define CHECK_TENSOR_PTR(TENSOR_PTR) \
151 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
152
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000153#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
154 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
155
Mike Kellya0766c32019-02-19 17:22:07 +0000156#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
157 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
158
Kevin May43a799c2019-02-08 16:31:42 +0000159#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
160 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
161
162#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
163 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
164}
165
Saoirse Stewart263829c2019-02-19 15:54:14 +0000166bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
167{
168 const unsigned int actualSize = actual.GetNumDimensions();
169 if (actualSize != expected.size())
170 {
171 return false;
172 }
173
174 for (unsigned int i = 0u; i < actualSize; i++)
175 {
176 if (actual[i] != static_cast<unsigned int>(expected[i]))
177 {
178 return false;
179 }
180 }
181
182 return true;
183}
184
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000185Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000186: m_Network(nullptr, nullptr),
187//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000188m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000189{
190 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100191 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000192 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000193 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100194 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000195 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000196 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100197 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
Jim Flynne242f2d2019-05-22 14:24:13 +0100198 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000199 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000200 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100201 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000202 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000203 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000204 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000205 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
josh minor4a3c6102020-01-06 16:40:46 -0600206 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &Deserializer::ParseElementwiseUnary;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000207 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000208 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000209 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000210 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000211 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100212 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000213 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Sadik Armagan26257852019-10-14 13:00:47 +0100214 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000215 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000216 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000217 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
218 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100219 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100220 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000221 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000222 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000223 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000224 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000225 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100226 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000227 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100228 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000229 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000230 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100231 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000232 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100233 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000234 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000235 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100236 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000237 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100238 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100239 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000240 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000241 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100242 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100243 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Kevin May43a799c2019-02-08 16:31:42 +0000244}
245
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000246Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000247{
248 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
249
250 switch(layerType)
251 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100252 case Layer::Layer_AbsLayer:
253 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000254 case Layer::Layer_ActivationLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000256 case Layer::Layer_AdditionLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100258 case Layer::Layer_ArgMinMaxLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000260 case Layer::Layer_BatchToSpaceNdLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000262 case Layer::Layer_BatchNormalizationLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100264 case Layer::Layer_ComparisonLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100266 case Layer::Layer_ConcatLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000268 case Layer::Layer_ConstantLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000270 case Layer::Layer_Convolution2dLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100272 case Layer::Layer_DepthToSpaceLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000274 case Layer::Layer_DepthwiseConvolution2dLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000276 case Layer::Layer_DequantizeLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000278 case Layer::Layer_DetectionPostProcessLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000280 case Layer::Layer_DivisionLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000282 case Layer::Layer_EqualLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000284 case Layer::Layer_FullyConnectedLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000286 case Layer::Layer_FloorLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000288 case Layer::Layer_GatherLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000290 case Layer::Layer_GreaterLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000292 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000293 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100294 case Layer::Layer_InstanceNormalizationLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000296 case Layer::Layer_L2NormalizationLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100298 case Layer::Layer_LogSoftmaxLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000300 case Layer::Layer_LstmLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000302 case Layer::Layer_MeanLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000304 case Layer::Layer_MinimumLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000306 case Layer::Layer_MaximumLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100308 case Layer::Layer_MergeLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000310 case Layer::Layer_MergerLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000312 case Layer::Layer_MultiplicationLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000314 case Layer::Layer_NormalizationLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000316 case Layer::Layer_OutputLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000318 case Layer::Layer_PadLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000320 case Layer::Layer_PermuteLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000322 case Layer::Layer_Pooling2dLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100324 case Layer::Layer_PreluLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000326 case Layer::Layer_QuantizeLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100328 case Layer::Layer_QuantizedLstmLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000330 case Layer::Layer_ReshapeLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000332 case Layer::Layer_ResizeBilinearLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100334 case Layer::Layer_ResizeLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000336 case Layer::Layer_RsqrtLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100338 case Layer::Layer_SliceLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000340 case Layer::Layer_SoftmaxLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000342 case Layer::Layer_SpaceToBatchNdLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100344 case Layer::Layer_SpaceToDepthLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000346 case Layer::Layer_SplitterLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100348 case Layer::Layer_StackLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100350 case Layer::Layer_StandInLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000352 case Layer::Layer_StridedSliceLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000354 case Layer::Layer_SubtractionLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100356 case Layer::Layer_SwitchLayer:
357 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100358 case Layer::Layer_TransposeConvolution2dLayer:
359 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000360 case Layer::Layer_NONE:
361 default:
362 throw ParseException(boost::str(
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100363 boost::format("Layer type %1% not recognized") %
364 layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000365 }
366}
367
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000368std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
369{
370 auto layer = GetBaseLayer(graph, index);
371 assert(layer);
372 return layer->layerName()->str();
373}
374
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000375int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000376{
377 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
378
379 if (layerType == Layer::Layer_InputLayer)
380 {
381 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
382 }
383 else if ( layerType == Layer::Layer_OutputLayer )
384 {
385 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
386 }
387 return 0;
388}
389
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000390armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000391{
392 switch (dataLayout)
393 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000394 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000395 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000396 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000397 default:
398 return armnn::DataLayout::NCHW;
399 }
400}
401
Mike Kellyaf484012019-02-20 16:53:11 +0000402armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
403{
404 switch (function)
405 {
406 case armnnSerializer::ActivationFunction_Sigmoid:
407 return armnn::ActivationFunction::Sigmoid;
408 case armnnSerializer::ActivationFunction_TanH:
409 return armnn::ActivationFunction::TanH;
410 case armnnSerializer::ActivationFunction_Linear:
411 return armnn::ActivationFunction::Linear;
412 case armnnSerializer::ActivationFunction_ReLu:
413 return armnn::ActivationFunction::ReLu;
414 case armnnSerializer::ActivationFunction_BoundedReLu:
415 return armnn::ActivationFunction::BoundedReLu;
416 case armnnSerializer::ActivationFunction_LeakyReLu:
417 return armnn::ActivationFunction::LeakyReLu;
418 case armnnSerializer::ActivationFunction_Abs:
419 return armnn::ActivationFunction::Abs;
420 case armnnSerializer::ActivationFunction_Sqrt:
421 return armnn::ActivationFunction::Sqrt;
422 case armnnSerializer::ActivationFunction_Square:
423 return armnn::ActivationFunction::Square;
424 default:
425 return armnn::ActivationFunction::Sigmoid;
426 }
427}
428
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100429armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
430{
431 switch (function)
432 {
433 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
434 return armnn::ArgMinMaxFunction::Max;
435 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
436 default:
437 return armnn::ArgMinMaxFunction::Min;
438 }
439}
440
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100441armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
442{
443 switch (operation)
444 {
445 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
446 return armnn::ComparisonOperation::Equal;
447 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
448 return armnn::ComparisonOperation::Greater;
449 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
450 return armnn::ComparisonOperation::GreaterOrEqual;
451 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
452 return armnn::ComparisonOperation::Less;
453 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
454 return armnn::ComparisonOperation::LessOrEqual;
455 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
456 default:
457 return armnn::ComparisonOperation::NotEqual;
458 }
459}
460
josh minor4a3c6102020-01-06 16:40:46 -0600461armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
462{
463 switch (operation)
464 {
465 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
466 return armnn::UnaryOperation::Abs;
467 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
468 return armnn::UnaryOperation::Rsqrt;
469 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
470 return armnn::UnaryOperation::Sqrt;
471 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
472 return armnn::UnaryOperation::Exp;
473 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
474 return armnn::UnaryOperation::Neg;
475 default:
476 throw armnn::InvalidArgumentException("Unary operation unknown");
477 }
478}
479
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100480armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
481{
482 switch (method)
483 {
484 case armnnSerializer::ResizeMethod_NearestNeighbor:
485 return armnn::ResizeMethod::NearestNeighbor;
486 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000487 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100488 default:
489 return armnn::ResizeMethod::NearestNeighbor;
490 }
491}
492
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000493armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000494{
495 armnn::DataType type;
496 CHECK_TENSOR_PTR(tensorPtr);
497
498 switch (tensorPtr->dataType())
499 {
500 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000501 case DataType_QAsymmU8:
502 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000503 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000504 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000505 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000506 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000507 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000508 case DataType_Signed32:
509 type = armnn::DataType::Signed32;
510 break;
Kevin May43a799c2019-02-08 16:31:42 +0000511 case DataType_Float32:
512 type = armnn::DataType::Float32;
513 break;
514 case DataType_Float16:
515 type = armnn::DataType::Float16;
516 break;
517 case DataType_Boolean:
518 type = armnn::DataType::Boolean;
519 break;
520 default:
521 {
522 CheckLocation location = CHECK_LOCATION();
523 throw ParseException(
524 boost::str(
525 boost::format("Unsupported data type %1% = %2%. %3%") %
526 tensorPtr->dataType() %
527 EnumNameDataType(tensorPtr->dataType()) %
528 location.AsString()));
529 }
530 }
531 float quantizationScale = tensorPtr->quantizationScale();
532 int32_t quantizationOffset = tensorPtr->quantizationOffset();
533
534 auto dimensions = tensorPtr->dimensions();
535 unsigned int size = dimensions->size();
536 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
537
538 // two statements (on purpose) for easier debugging:
539 armnn::TensorInfo result(size,
540 outputDims.data(),
541 type,
542 quantizationScale,
543 quantizationOffset);
544 return result;
545}
546
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000547armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000548{
549 CHECK_CONST_TENSOR_PTR(constTensorPtr);
550 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
551
552 switch (constTensorPtr->data_type())
553 {
554 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000555 {
556 auto byteData = constTensorPtr->data_as_ByteData()->data();
557 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
558 return armnn::ConstTensor(tensorInfo, byteData->data());
559 }
Mike Kellya0766c32019-02-19 17:22:07 +0000560 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000561 {
562 auto shortData = constTensorPtr->data_as_ShortData()->data();
563 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
564 return armnn::ConstTensor(tensorInfo, shortData->data());
565 }
Mike Kellya0766c32019-02-19 17:22:07 +0000566 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000567 {
568 auto intData = constTensorPtr->data_as_IntData()->data();
569 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
570 return armnn::ConstTensor(tensorInfo, intData->data());
571 }
Mike Kellya0766c32019-02-19 17:22:07 +0000572 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000573 {
574 auto longData = constTensorPtr->data_as_LongData()->data();
575 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
576 return armnn::ConstTensor(tensorInfo, longData->data());
577 }
Mike Kellya0766c32019-02-19 17:22:07 +0000578 default:
579 {
580 CheckLocation location = CHECK_LOCATION();
581 throw ParseException(
582 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
583 constTensorPtr->data_type() %
584 EnumNameConstTensorData(constTensorPtr->data_type()) %
585 location.AsString()));
586 }
587 }
588}
589
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000590Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000591 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000592{
593 CHECK_LAYERS(graphPtr, 0, layerIndex);
594 auto layer = GetBaseLayer(graphPtr, layerIndex);
595 const auto& numInputs = layer->inputSlots()->size();
596
597 TensorRawPtrVector result(numInputs);
598
599 for (unsigned int i=0; i<numInputs; ++i)
600 {
601 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
602 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
603 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
604 }
605 return result;
606}
607
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000608Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000609 unsigned int layerIndex)
610{
611 CHECK_LAYERS(graphPtr, 0, layerIndex);
612 auto layer = GetBaseLayer(graphPtr, layerIndex);
613 const auto& numOutputs = layer->outputSlots()->size();
614
615 TensorRawPtrVector result(numOutputs);
616
617 for (unsigned int i=0; i<numOutputs; ++i)
618 {
619 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
620 }
621 return result;
622}
623
Derek Lamberti8ddae332019-02-21 16:29:43 +0000624void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000625{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000626 CHECK_LAYERS(graph, 0, layerIndex);
627 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000628 throw ParseException(
629 boost::str(
630 boost::format("Layer not supported. "
631 "layerIndex: %1% "
632 "layerName: %2% / %3%") %
633 layerIndex %
634 layerName %
635 CHECK_LOCATION().AsString()));
636}
637
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000638void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000639{
640 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000641 m_InputBindings.clear();
642 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000643}
644
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000645IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000646{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000647 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000648}
649
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000650IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000651{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000652 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000653}
654
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000655void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000656{
657 delete parser;
658}
659
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000660INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000661{
662 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000663 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
664 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000665}
666
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000667armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000668{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000669 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000670 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
671 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
672 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000673}
674
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000675Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000676{
677 if (binaryContent == nullptr)
678 {
679 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
680 CHECK_LOCATION().AsString()));
681 }
682 flatbuffers::Verifier verifier(binaryContent, len);
683 if (verifier.VerifyBuffer<SerializedGraph>() == false)
684 {
685 throw ParseException(
686 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
687 "flatbuffers format. size:%1% %2%") %
688 len %
689 CHECK_LOCATION().AsString()));
690 }
691 return GetSerializedGraph(binaryContent);
692}
693
Derek Lamberti8ddae332019-02-21 16:29:43 +0000694INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000695{
696 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000697 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000698 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000699 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000700 {
701 if (layer->layer_type() != Layer_InputLayer &&
702 layer->layer_type() != Layer_OutputLayer)
703 {
704 // lookup and call the parser function
705 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000706 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000707 }
708 ++layerIndex;
709 }
710
Derek Lamberti8ddae332019-02-21 16:29:43 +0000711 SetupInputLayers(graph);
712 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000713
714 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100715 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000716 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100717 Connections& connections = graphIt.second;
718 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000719 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100720 const unsigned int outputSlotIndex = outputIt.first;
721 IOutputSlot* outputSlot = outputIt.second;
722 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000723 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100724 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000725 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100726 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000727 }
Kevin May43a799c2019-02-08 16:31:42 +0000728 }
729 }
730 }
731
732 return std::move(m_Network);
733}
734
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000735BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000736 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000737{
Derek Lamberti859f9ce2019-12-10 22:05:21 +0000738 boost::ignore_unused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000739 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000740 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000741 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000742 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000743 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000744 }
745 }
746 throw ParseException(
747 boost::str(
748 boost::format("No input binding found for layer:%1% / %2%") %
749 name %
750 CHECK_LOCATION().AsString()));
751}
752
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000753BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000754 const std::string& name) const
755{
Derek Lamberti859f9ce2019-12-10 22:05:21 +0000756 boost::ignore_unused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000757 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000758 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000759 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000760 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000761 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000762 }
763 }
764 throw ParseException(
765 boost::str(
766 boost::format("No output binding found for layer:%1% / %2%") %
767 name %
768 CHECK_LOCATION().AsString()));
769}
770
Tee Jungaa920c52019-11-05 10:48:25 +0000771unsigned int Deserializer::GetInputLayerInVector(GraphPtr graph, int targetId)
772{
773 for (unsigned int i = 0; i < graph->layers()->size(); i++)
774 {
775 auto layer = graph->layers()->Get(i);
776 if (layer->layer_type() == Layer::Layer_InputLayer)
777 {
778 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
779 if (layerBindingId == targetId)
780 {
781 return i;
782 }
783 }
784 }
785 throw ParseException("Input layer with given layerBindingId not found");
786}
787
788unsigned int Deserializer::GetOutputLayerInVector(GraphPtr graph, int targetId)
789{
790 for (unsigned int i = 0; i < graph->layers()->size(); i++)
791 {
792 auto layer = graph->layers()->Get(i);
793 if (layer->layer_type() == Layer::Layer_OutputLayer)
794 {
795 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
796 if (layerBindingId == targetId)
797 {
798 return i;
799 }
800 }
801 }
802 throw ParseException("Output layer with given layerBindingId not found");
803}
804
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100805unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
806{
807 for (unsigned int i = 0; i < graph->layers()->size(); i++)
808 {
809 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
810 if (layer->index() == targetIndex)
811 {
812 return i;
813 }
814 }
815 throw ParseException("Layer with given index not found");
816}
817
Tee Jungaa920c52019-11-05 10:48:25 +0000818Deserializer::FeatureVersions Deserializer::GetFeatureVersions(GraphPtr graph)
819{
820 Deserializer::FeatureVersions versions;
821
822 if (graph->featureVersions())
823 {
824 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
825 }
826
827 return versions;
828}
829
Derek Lamberti8ddae332019-02-21 16:29:43 +0000830void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000831{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000832 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100833 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000834 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100835 m_InputBindings.reserve(numInputs);
836
837 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000838 {
Tee Jungaa920c52019-11-05 10:48:25 +0000839 unsigned int inputLayerIndex = 0xFFFFFFFF;
840 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
841 {
842 const unsigned int inputId = boost::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
843 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
844 }
845 else
846 {
847 const int inputId = graph->inputIds()->Get(i);
848 inputLayerIndex = GetInputLayerInVector(graph, inputId);
849 }
850
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100851 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000852
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100853 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
854 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
855 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000856
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100857 IConnectableLayer* inputLayer =
858 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000859
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100860 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
861 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
862 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
863
Derek Lamberti8ddae332019-02-21 16:29:43 +0000864 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100865 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000866 }
867}
868
Derek Lamberti8ddae332019-02-21 16:29:43 +0000869void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000870{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000871 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100872 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000873 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100874 m_OutputBindings.reserve(numOutputs);
875
876 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000877 {
Tee Jungaa920c52019-11-05 10:48:25 +0000878 unsigned int outputLayerIndex = 0xFFFFFFFF;
879 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
880 {
881 const unsigned int outputId = boost::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
882 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
883 }
884 else
885 {
886 const int outputId = graph->outputIds()->Get(i);
887 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
888 }
889
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100890 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000891
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100892 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
893 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
894 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000895
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100896 IConnectableLayer* outputLayer =
897 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000898
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100899 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
900
901 unsigned int sourceLayerIndex =
902 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
903 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
904 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
905
Derek Lamberti8ddae332019-02-21 16:29:43 +0000906 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100907 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000908 }
909}
910
Derek Lamberti8ddae332019-02-21 16:29:43 +0000911void Deserializer::RegisterOutputSlots(GraphPtr graph,
912 uint32_t layerIndex,
913 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000914{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000915 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000916 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100917 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
918 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000919 {
920 throw ParseException(
921 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
922 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100923 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000924 layer->GetNumOutputSlots() %
925 layerIndex %
926 CHECK_LOCATION().AsString()));
927 }
928
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100929 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000930 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100931 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
932 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
933 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
934 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000935 }
936}
937
Derek Lamberti8ddae332019-02-21 16:29:43 +0000938void Deserializer::RegisterInputSlots(GraphPtr graph,
939 uint32_t layerIndex,
940 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000941{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000942 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000943 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100944 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
945 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000946 {
947 throw ParseException(
948 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
949 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100950 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000951 layer->GetNumInputSlots() %
952 layerIndex %
953 CHECK_LOCATION().AsString()));
954 }
955
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100956 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000957 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100958 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
959 auto fbConnection = fbInputSlot->connection();
960 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
961 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000962 }
963}
964
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000965void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
966 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100967 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000968{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100969 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000970 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100971 m_GraphConnections[sourceLayerIndex] = Connections();
972 }
973
974 Connections& connections = m_GraphConnections[sourceLayerIndex];
975 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
976 {
977 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000978 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000979 else
980 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100981 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000982 }
983}
Kevin May43a799c2019-02-08 16:31:42 +0000984
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000985void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100986 uint32_t outputSlotIndex,
987 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000988{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100989 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
990 {
991 m_GraphConnections[sourceLayerIndex] = Connections();
992 }
993
994 Connections& connections = m_GraphConnections[sourceLayerIndex];
995 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
996 {
997 throw ParseException("Same output slot index processed twice");
998 }
999
1000 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001001}
1002
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001003void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
1004{
1005 CHECK_LAYERS(graph, 0, layerIndex);
1006 auto inputs = GetInputs(graph, layerIndex);
1007 CHECK_LOCATION();
1008 CHECK_VALID_SIZE(inputs.size(), 1);
1009
1010 auto outputs = GetOutputs(graph, layerIndex);
1011 CHECK_VALID_SIZE(outputs.size(), 1);
1012
1013 auto layerName = GetLayerName(graph, layerIndex);
1014
josh minor4a3c6102020-01-06 16:40:46 -06001015 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1016 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001017 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1018 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1019
1020 RegisterInputSlots(graph, layerIndex, layer);
1021 RegisterOutputSlots(graph, layerIndex, layer);
1022}
1023
Derek Lamberti8ddae332019-02-21 16:29:43 +00001024void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001025{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001026 CHECK_LAYERS(graph, 0, layerIndex);
1027 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001028 CHECK_LOCATION();
1029 CHECK_VALID_SIZE(inputs.size(), 1);
1030
Derek Lamberti8ddae332019-02-21 16:29:43 +00001031 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001032 CHECK_VALID_SIZE(outputs.size(), 1);
1033
Derek Lamberti8ddae332019-02-21 16:29:43 +00001034 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001035 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001036 auto serializerDescriptor = serializerLayer->descriptor();
1037
1038 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001039 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001040 descriptor.m_A = serializerDescriptor->a();
1041 descriptor.m_B = serializerDescriptor->b();
1042
1043 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1044 layerName.c_str());
1045 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1046 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1047
Derek Lamberti8ddae332019-02-21 16:29:43 +00001048 RegisterInputSlots(graph, layerIndex, layer);
1049 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001050}
1051
Derek Lamberti8ddae332019-02-21 16:29:43 +00001052void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001053{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001054 CHECK_LAYERS(graph, 0, layerIndex);
1055 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001056 CHECK_LOCATION();
1057 CHECK_VALID_SIZE(inputs.size(), 2);
1058
Derek Lamberti8ddae332019-02-21 16:29:43 +00001059 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001060 CHECK_VALID_SIZE(outputs.size(), 1);
1061
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001062 auto layerName = GetLayerName(graph, layerIndex);
1063 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001064
1065 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1066 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1067
Derek Lamberti8ddae332019-02-21 16:29:43 +00001068 RegisterInputSlots(graph, layerIndex, layer);
1069 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001070}
1071
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001072void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
1073{
1074 CHECK_LAYERS(graph, 0, layerIndex);
1075 auto inputs = GetInputs(graph, layerIndex);
1076 CHECK_LOCATION();
1077 CHECK_VALID_SIZE(inputs.size(), 1);
1078
1079 auto outputs = GetOutputs(graph, layerIndex);
1080 CHECK_VALID_SIZE(outputs.size(), 1);
1081
1082 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1083 auto serializerDescriptor = serializerLayer->descriptor();
1084
1085 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001086 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001087 descriptor.m_Axis = serializerDescriptor->axis();
1088 auto layerName = GetLayerName(graph, layerIndex);
1089 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1090
1091 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1092 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1093
1094 RegisterInputSlots(graph, layerIndex, layer);
1095 RegisterOutputSlots(graph, layerIndex, layer);
1096}
1097
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001098void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
1099{
1100 CHECK_LAYERS(graph, 0, layerIndex);
1101
1102 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1103 CHECK_VALID_SIZE(inputs.size(), 1);
1104
1105 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1106 CHECK_VALID_SIZE(outputs.size(), 1);
1107
1108 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1109 auto flatBufferCrops = flatBufferDescriptor->crops();
1110 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1111
1112 if (flatBufferCrops->Length() % 2 != 0)
1113 {
1114 throw ParseException(boost::str(
1115 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1116 }
1117
1118 std::vector<std::pair<unsigned int, unsigned int>> crops;
1119 crops.reserve(flatBufferCrops->Length() / 2);
1120 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1121 {
1122 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1123 }
1124
1125 armnn::BatchToSpaceNdDescriptor descriptor;
1126 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1127 descriptor.m_BlockShape =
1128 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1129 descriptor.m_Crops = crops;
1130
1131 auto layerName = GetLayerName(graph, layerIndex);
1132 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1133
1134 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1135 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1136
1137 RegisterInputSlots(graph, layerIndex, layer);
1138 RegisterOutputSlots(graph, layerIndex, layer);
1139}
1140
ruoyan018e7fa232019-02-28 15:09:07 +00001141void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1142{
1143 CHECK_LAYERS(graph, 0, layerIndex);
1144
1145 auto inputs = GetInputs(graph, layerIndex);
1146 CHECK_VALID_SIZE(inputs.size(), 1);
1147
1148 auto outputs = GetOutputs(graph, layerIndex);
1149 CHECK_VALID_SIZE(outputs.size(), 1);
1150 auto outputInfo = ToTensorInfo(outputs[0]);
1151
ruoyan015c7ab052019-03-04 14:48:02 +00001152 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001153
1154 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1155 auto serializerDescriptor = serializerLayer->descriptor();
1156
1157 armnn::BatchNormalizationDescriptor descriptor;
1158 descriptor.m_Eps = serializerDescriptor->eps();
1159 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1160
1161 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1162 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1163 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1164 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1165
1166 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1167 mean,
1168 variance,
1169 beta,
1170 gamma,
1171 layerName.c_str());
1172 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1173
1174 RegisterInputSlots(graph, layerIndex, layer);
1175 RegisterOutputSlots(graph, layerIndex, layer);
1176}
1177
Conor Kennedy76277882019-02-26 08:29:54 +00001178void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1179{
1180 CHECK_LAYERS(graph, 0, layerIndex);
1181 CHECK_LOCATION();
1182
1183 auto outputs = GetOutputs(graph, layerIndex);
1184 CHECK_VALID_SIZE(outputs.size(), 1);
1185
1186 auto layerName = GetLayerName(graph, layerIndex);
1187
1188 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1189 auto serializerInput = serializerLayer->input();
1190
1191 armnn::ConstTensor input = ToConstTensor(serializerInput);
1192
1193 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1194
1195 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1196 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1197
1198 RegisterOutputSlots(graph, layerIndex, layer);
1199}
1200
Derek Lamberti8ddae332019-02-21 16:29:43 +00001201void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001202{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001203 CHECK_LAYERS(graph, 0, layerIndex);
1204 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001205 CHECK_LOCATION();
1206 CHECK_VALID_SIZE(inputs.size(), 1);
1207
Derek Lamberti8ddae332019-02-21 16:29:43 +00001208 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001209 CHECK_VALID_SIZE(outputs.size(), 1);
1210
Derek Lamberti8ddae332019-02-21 16:29:43 +00001211 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001212 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001213 auto serializerDescriptor = serializerLayer->descriptor();
1214
1215 armnn::Convolution2dDescriptor descriptor;
1216 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1217 descriptor.m_PadRight = serializerDescriptor->padRight();
1218 descriptor.m_PadTop = serializerDescriptor->padTop();
1219 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1220 descriptor.m_StrideX = serializerDescriptor->strideX();
1221 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001222 descriptor.m_DilationX = serializerDescriptor->dilationX();
1223 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001224 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1225 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1226
1227 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1228 armnn::ConstTensor biases;
1229
Matteo Martincighfc598e12019-05-14 10:36:13 +01001230 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001231 if (descriptor.m_BiasEnabled)
1232 {
1233 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001234 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001235 }
1236 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1237 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001238 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001239 layerName.c_str());
1240 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1241 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1242
Derek Lamberti8ddae332019-02-21 16:29:43 +00001243 RegisterInputSlots(graph, layerIndex, layer);
1244 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001245}
1246
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001247void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1248{
1249 CHECK_LAYERS(graph, 0, layerIndex);
1250
1251 auto inputs = GetInputs(graph, layerIndex);
1252 CHECK_VALID_SIZE(inputs.size(), 1);
1253
1254 auto outputs = GetOutputs(graph, layerIndex);
1255 CHECK_VALID_SIZE(outputs.size(), 1);
1256
1257 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1258
1259 armnn::DepthToSpaceDescriptor descriptor;
1260 descriptor.m_BlockSize = fbDescriptor->blockSize();
1261 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1262
1263 auto layerName = GetLayerName(graph, layerIndex);
1264 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1265
1266 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1267 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1268
1269 RegisterInputSlots(graph, layerIndex, layer);
1270 RegisterOutputSlots(graph, layerIndex, layer);
1271}
1272
Derek Lamberti8ddae332019-02-21 16:29:43 +00001273void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001274{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001275 CHECK_LAYERS(graph, 0, layerIndex);
1276 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001277 CHECK_LOCATION();
1278 CHECK_VALID_SIZE(inputs.size(), 1);
1279
Derek Lamberti8ddae332019-02-21 16:29:43 +00001280 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001281 CHECK_VALID_SIZE(outputs.size(), 1);
1282
Derek Lamberti8ddae332019-02-21 16:29:43 +00001283 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001284 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001285 auto serializerDescriptor = serializerLayer->descriptor();
1286
1287 armnn::DepthwiseConvolution2dDescriptor descriptor;
1288 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1289 descriptor.m_PadRight = serializerDescriptor->padRight();
1290 descriptor.m_PadTop = serializerDescriptor->padTop();
1291 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1292 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001293 descriptor.m_StrideY = serializerDescriptor->strideY();
1294 descriptor.m_DilationX = serializerDescriptor->dilationX();
1295 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001296 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1297 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1298
1299 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1300 armnn::ConstTensor biases;
1301
Matteo Martincighfc598e12019-05-14 10:36:13 +01001302 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001303 if (descriptor.m_BiasEnabled)
1304 {
1305 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001306 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001307 }
1308 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1309 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001310 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001311 layerName.c_str());
1312
1313 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1314 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1315
Derek Lamberti8ddae332019-02-21 16:29:43 +00001316 RegisterInputSlots(graph, layerIndex, layer);
1317 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001318}
1319
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001320void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1321{
1322 CHECK_LAYERS(graph, 0, layerIndex);
1323 auto inputs = GetInputs(graph, layerIndex);
1324 CHECK_LOCATION();
1325 CHECK_VALID_SIZE(inputs.size(), 2);
1326
1327 auto outputs = GetOutputs(graph, layerIndex);
1328 CHECK_VALID_SIZE(outputs.size(), 4);
1329
1330 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1331 auto layerName = GetLayerName(graph, layerIndex);
1332 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1333
1334 armnn::DetectionPostProcessDescriptor descriptor;
1335 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1336 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1337 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1338 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1339 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1340 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1341 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1342 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1343 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1344 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1345 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1346
1347 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1348
1349 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1350 anchors,
1351 layerName.c_str());
1352
1353 for (unsigned int i = 0; i < 4; i++)
1354 {
1355 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1356 }
1357
1358 RegisterInputSlots(graph, layerIndex, layer);
1359 RegisterOutputSlots(graph, layerIndex, layer);
1360}
1361
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001362void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1363{
1364 CHECK_LAYERS(graph, 0, layerIndex);
1365 auto inputs = GetInputs(graph, layerIndex);
1366 CHECK_LOCATION();
1367 CHECK_VALID_SIZE(inputs.size(), 2);
1368
1369 auto outputs = GetOutputs(graph, layerIndex);
1370 CHECK_VALID_SIZE(outputs.size(), 1);
1371
1372 auto layerName = GetLayerName(graph, layerIndex);
1373 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1374
1375 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1376 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1377
1378 RegisterInputSlots(graph, layerIndex, layer);
1379 RegisterOutputSlots(graph, layerIndex, layer);
1380}
1381
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001382void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1383{
1384 CHECK_LAYERS(graph, 0, layerIndex);
1385 auto inputs = GetInputs(graph, layerIndex);
1386 CHECK_LOCATION();
1387 CHECK_VALID_SIZE(inputs.size(), 2);
1388
1389 auto outputs = GetOutputs(graph, layerIndex);
1390 CHECK_VALID_SIZE(outputs.size(), 1);
1391
1392 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001393 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1394 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001395
1396 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1397 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1398
1399 RegisterInputSlots(graph, layerIndex, layer);
1400 RegisterOutputSlots(graph, layerIndex, layer);
1401}
1402
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001403void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1404{
1405 CHECK_LAYERS(graph, 0, layerIndex);
1406 auto inputs = GetInputs(graph, layerIndex);
1407 CHECK_LOCATION();
1408 CHECK_VALID_SIZE(inputs.size(), 2);
1409
1410 auto outputs = GetOutputs(graph, layerIndex);
1411 CHECK_VALID_SIZE(outputs.size(), 1);
1412
1413 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001414 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1415 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001416
1417 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1418 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1419
1420 RegisterInputSlots(graph, layerIndex, layer);
1421 RegisterOutputSlots(graph, layerIndex, layer);
1422}
1423
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001424void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1425{
1426 CHECK_LAYERS(graph, 0, layerIndex);
1427
1428 auto inputs = GetInputs(graph, layerIndex);
1429 CHECK_VALID_SIZE(inputs.size(), 1);
1430
1431 auto outputs = GetOutputs(graph, layerIndex);
1432 CHECK_VALID_SIZE(outputs.size(), 1);
1433
1434 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1435 auto fbDescriptor = fbLayer->descriptor();
1436
1437 armnn::InstanceNormalizationDescriptor descriptor;
1438 descriptor.m_Gamma = fbDescriptor->gamma();
1439 descriptor.m_Beta = fbDescriptor->beta();
1440 descriptor.m_Eps = fbDescriptor->eps();
1441 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1442
1443 const std::string layerName = GetLayerName(graph, layerIndex);
1444 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1445
1446 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1447 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1448
1449 RegisterInputSlots(graph, layerIndex, layer);
1450 RegisterOutputSlots(graph, layerIndex, layer);
1451}
1452
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001453void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1454{
1455 CHECK_LAYERS(graph, 0, layerIndex);
1456
1457 auto inputs = GetInputs(graph, layerIndex);
1458 CHECK_VALID_SIZE(inputs.size(), 1);
1459
1460 auto outputs = GetOutputs(graph, layerIndex);
1461 CHECK_VALID_SIZE(outputs.size(), 1);
1462 auto outputInfo = ToTensorInfo(outputs[0]);
1463
1464 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1465 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1466
1467 auto layerName = GetLayerName(graph, layerIndex);
1468 armnn::L2NormalizationDescriptor descriptor;
1469 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001470 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001471
1472 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1473 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1474
1475 RegisterInputSlots(graph, layerIndex, layer);
1476 RegisterOutputSlots(graph, layerIndex, layer);
1477}
1478
Sadik Armagan26257852019-10-14 13:00:47 +01001479void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1480{
1481 CHECK_LAYERS(graph, 0, layerIndex);
1482
1483 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1484 CHECK_VALID_SIZE(inputs.size(), 1);
1485
1486 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1487 CHECK_VALID_SIZE(outputs.size(), 1);
1488
1489 armnn::LogSoftmaxDescriptor descriptor;
1490 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1491 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1492 auto layerName = GetLayerName(graph, layerIndex);
1493
1494 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1495
1496 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1497 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1498
1499 RegisterInputSlots(graph, layerIndex, layer);
1500 RegisterOutputSlots(graph, layerIndex, layer);
1501}
1502
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001503void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1504{
1505 CHECK_LAYERS(graph, 0, layerIndex);
1506 auto inputs = GetInputs(graph, layerIndex);
1507 CHECK_LOCATION();
1508 CHECK_VALID_SIZE(inputs.size(), 2);
1509
1510 auto outputs = GetOutputs(graph, layerIndex);
1511 CHECK_VALID_SIZE(outputs.size(), 1);
1512
1513 auto layerName = GetLayerName(graph, layerIndex);
1514 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1515
1516 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1517 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1518
1519 RegisterInputSlots(graph, layerIndex, layer);
1520 RegisterOutputSlots(graph, layerIndex, layer);
1521}
1522
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001523void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1524{
1525 CHECK_LAYERS(graph, 0, layerIndex);
1526 auto inputs = GetInputs(graph, layerIndex);
1527 CHECK_LOCATION();
1528 CHECK_VALID_SIZE(inputs.size(), 2);
1529
1530 auto outputs = GetOutputs(graph, layerIndex);
1531 CHECK_VALID_SIZE(outputs.size(), 1);
1532
1533 auto layerName = GetLayerName(graph, layerIndex);
1534 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1535
1536 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1537 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1538
1539 RegisterInputSlots(graph, layerIndex, layer);
1540 RegisterOutputSlots(graph, layerIndex, layer);
1541}
1542
Jim Flynne242f2d2019-05-22 14:24:13 +01001543const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1544 unsigned int layerIndex)
1545{
1546 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1547
1548 switch (layerType)
1549 {
1550 case Layer::Layer_ConcatLayer:
1551 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1552 case Layer::Layer_MergerLayer:
1553 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1554 default:
1555 throw armnn::Exception("unknown layer type, should be concat or merger");
1556 }
1557}
1558
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001559void Deserializer::ParseComparison(GraphPtr graph, unsigned int layerIndex)
1560{
1561 CHECK_LAYERS(graph, 0, layerIndex);
1562 CHECK_LOCATION();
1563
1564 auto inputs = GetInputs(graph, layerIndex);
1565 CHECK_VALID_SIZE(inputs.size(), 2);
1566
1567 auto outputs = GetOutputs(graph, layerIndex);
1568 CHECK_VALID_SIZE(outputs.size(), 1);
1569
1570 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1571 auto fbDescriptor = fbLayer->descriptor();
1572
1573 armnn::ComparisonDescriptor descriptor;
1574 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1575
1576 const std::string& layerName = GetLayerName(graph, layerIndex);
1577 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1578
1579 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1580 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1581
1582 RegisterInputSlots(graph, layerIndex, layer);
1583 RegisterOutputSlots(graph, layerIndex, layer);
1584}
1585
josh minor4a3c6102020-01-06 16:40:46 -06001586void Deserializer::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
1587{
1588 CHECK_LAYERS(graph, 0, layerIndex);
1589 CHECK_LOCATION();
1590
1591 auto inputs = GetInputs(graph, layerIndex);
1592 CHECK_VALID_SIZE(inputs.size(), 1);
1593
1594 auto outputs = GetOutputs(graph, layerIndex);
1595 CHECK_VALID_SIZE(outputs.size(), 1);
1596
1597 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1598 auto fbDescriptor = fbLayer->descriptor();
1599
1600 armnn::ElementwiseUnaryDescriptor descriptor;
1601 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1602
1603 const std::string& layerName = GetLayerName(graph, layerIndex);
1604 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1605
1606 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1607 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1608
1609 RegisterInputSlots(graph, layerIndex, layer);
1610 RegisterOutputSlots(graph, layerIndex, layer);
1611}
1612
Jim Flynn906f9462019-05-10 13:55:21 +01001613void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001614{
1615 CHECK_LAYERS(graph, 0, layerIndex);
1616 CHECK_LOCATION();
1617
1618 auto outputs = GetOutputs(graph, layerIndex);
1619 CHECK_VALID_SIZE(outputs.size(), 1);
1620
Jim Flynnac25a1b2019-02-28 10:40:49 +00001621 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001622 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1623 unsigned int numViews = originsDescriptor->numViews();
1624 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001625
1626 // can now check the number of inputs == number of views
1627 auto inputs = GetInputs(graph, layerIndex);
1628 CHECK_VALID_SIZE(inputs.size(), numViews);
1629
1630 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001631 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001632 for (unsigned int v = 0; v < numViews; ++v)
1633 {
1634 auto originPtr = originsPtr->Get(v);
1635 for (unsigned int d = 0; d < numDimensions; ++d)
1636 {
1637 uint32_t value = originPtr->data()->Get(d);
1638 descriptor.SetViewOriginCoord(v, d, value);
1639 }
1640 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001641 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001642
Jim Flynn906f9462019-05-10 13:55:21 +01001643 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001644 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1645 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1646
1647 RegisterInputSlots(graph, layerIndex, layer);
1648 RegisterOutputSlots(graph, layerIndex, layer);
1649}
1650
Derek Lamberti8ddae332019-02-21 16:29:43 +00001651void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001652{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001653 CHECK_LAYERS(graph, 0, layerIndex);
1654 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001655 CHECK_LOCATION();
1656 CHECK_VALID_SIZE(inputs.size(), 2);
1657
Derek Lamberti8ddae332019-02-21 16:29:43 +00001658 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001659 CHECK_VALID_SIZE(outputs.size(), 1);
1660
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001661 auto layerName = GetLayerName(graph, layerIndex);
1662 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001663
1664 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1665 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1666
Derek Lamberti8ddae332019-02-21 16:29:43 +00001667 RegisterInputSlots(graph, layerIndex, layer);
1668 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001669}
1670
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001671void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1672{
1673 CHECK_LAYERS(graph, 0, layerIndex);
1674 CHECK_LOCATION();
1675
1676 auto inputs = GetInputs(graph, layerIndex);
1677 CHECK_VALID_SIZE(inputs.size(), 1);
1678
1679 auto outputs = GetOutputs(graph, layerIndex);
1680 CHECK_VALID_SIZE(outputs.size(), 1);
1681
1682 auto layerName = GetLayerName(graph, layerIndex);
1683
1684 armnn::IConnectableLayer* layer;
1685
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001686 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001687
1688 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1689 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1690
1691 RegisterInputSlots(graph, layerIndex, layer);
1692 RegisterOutputSlots(graph, layerIndex, layer);
1693}
1694
Derek Lamberti8ddae332019-02-21 16:29:43 +00001695void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001696{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001697 CHECK_LAYERS(graph, 0, layerIndex);
1698 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001699 CHECK_LOCATION();
1700 CHECK_VALID_SIZE(inputs.size(), 1);
1701
Derek Lamberti8ddae332019-02-21 16:29:43 +00001702 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001703 CHECK_VALID_SIZE(outputs.size(), 1);
1704
Derek Lamberti8ddae332019-02-21 16:29:43 +00001705 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001706 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001707 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1708
1709 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1710 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1711 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1712
1713 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1714
1715 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001716 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001717 if (flatBufferDescriptor->biasEnabled())
1718 {
1719 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001720 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001721 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001722 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1723 weightsTensor,
1724 optionalBiases,
1725 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001726
1727 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1728 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1729
Derek Lamberti8ddae332019-02-21 16:29:43 +00001730 RegisterInputSlots(graph, layerIndex, layer);
1731 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001732}
1733
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001734void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1735{
1736 CHECK_LAYERS(graph, 0, layerIndex);
1737
1738 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1739 CHECK_VALID_SIZE(inputs.size(), 1);
1740
1741 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1742 CHECK_VALID_SIZE(outputs.size(), 1);
1743
1744 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1745 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001746 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001747
1748 if (flatBufferPadList->Length() % 2 != 0)
1749 {
1750 throw ParseException(boost::str(
1751 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1752 }
1753
1754 std::vector<std::pair<unsigned int, unsigned int>> padList;
1755 padList.reserve(flatBufferPadList->Length() / 2);
1756 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1757 {
1758 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1759 }
1760
David Monahan34757812019-06-19 11:47:21 +01001761 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001762
1763 auto layerName = GetLayerName(graph, layerIndex);
1764 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1765
1766 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1767 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1768
1769 RegisterInputSlots(graph, layerIndex, layer);
1770 RegisterOutputSlots(graph, layerIndex, layer);
1771}
1772
Derek Lamberti8ddae332019-02-21 16:29:43 +00001773void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001774{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001775 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001776
1777 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001778 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001779
Derek Lamberti8ddae332019-02-21 16:29:43 +00001780 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001781 CHECK_VALID_SIZE(inputs.size(), 1);
1782
Derek Lamberti8ddae332019-02-21 16:29:43 +00001783 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001784 CHECK_VALID_SIZE(outputs.size(), 1);
1785 auto outputInfo = ToTensorInfo(outputs[0]);
1786
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001787 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001788 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1789
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001790 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001791 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1792
Derek Lamberti8ddae332019-02-21 16:29:43 +00001793 RegisterInputSlots(graph, layerIndex, layer);
1794 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001795}
1796
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001797armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001798 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001799{
Derek Lamberti859f9ce2019-12-10 22:05:21 +00001800 boost::ignore_unused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001801 armnn::Pooling2dDescriptor desc;
1802
1803 switch (pooling2dDesc->poolType())
1804 {
1805 case PoolingAlgorithm_Average:
1806 {
1807 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001808 break;
1809 }
1810 case PoolingAlgorithm_Max:
1811 {
1812 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001813 break;
1814 }
1815 default:
1816 {
1817 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1818 }
1819 }
1820
1821 switch (pooling2dDesc->outputShapeRounding())
1822 {
1823 case OutputShapeRounding_Floor:
1824 {
1825 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1826 break;
1827 }
1828 case OutputShapeRounding_Ceiling:
1829 {
1830 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1831 break;
1832 }
1833 default:
1834 {
1835 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1836 }
1837 }
1838
1839 switch (pooling2dDesc->paddingMethod())
1840 {
1841 case PaddingMethod_Exclude:
1842 {
1843 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1844 break;
1845 }
1846 case PaddingMethod_IgnoreValue:
1847 {
1848 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1849 break;
1850 }
1851 default:
1852 {
1853 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1854 }
1855 }
1856
1857 switch (pooling2dDesc->dataLayout())
1858 {
1859 case DataLayout_NCHW:
1860 {
1861 desc.m_DataLayout = armnn::DataLayout::NCHW;
1862 break;
1863 }
1864 case DataLayout_NHWC:
1865 {
1866 desc.m_DataLayout = armnn::DataLayout::NHWC;
1867 break;
1868 }
1869 default:
1870 {
1871 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1872 }
1873 }
1874
1875 desc.m_PadRight = pooling2dDesc->padRight();
1876 desc.m_PadLeft = pooling2dDesc->padLeft();
1877 desc.m_PadBottom = pooling2dDesc->padBottom();
1878 desc.m_PadTop = pooling2dDesc->padTop();
1879 desc.m_StrideX = pooling2dDesc->strideX();
1880 desc.m_StrideY = pooling2dDesc->strideY();
1881 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1882 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1883
1884 return desc;
1885}
1886
Derek Lamberti8ddae332019-02-21 16:29:43 +00001887void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001888{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001889 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001890
Derek Lamberti8ddae332019-02-21 16:29:43 +00001891 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001892 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001893 CHECK_VALID_SIZE(inputs.size(), 1);
1894
Derek Lamberti8ddae332019-02-21 16:29:43 +00001895 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001896 CHECK_VALID_SIZE(outputs.size(), 1);
1897 auto outputInfo = ToTensorInfo(outputs[0]);
1898
1899 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001900 auto layerName = GetLayerName(graph, layerIndex);
1901 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001902 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1903
Derek Lamberti8ddae332019-02-21 16:29:43 +00001904 RegisterInputSlots(graph, layerIndex, layer);
1905 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001906}
1907
Derek Lamberti87acb272019-03-27 16:51:31 +00001908void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1909{
1910 CHECK_LAYERS(graph, 0, layerIndex);
1911
1912 auto inputs = GetInputs(graph, layerIndex);
1913 CHECK_VALID_SIZE(inputs.size(), 1);
1914
1915 auto outputs = GetOutputs(graph, layerIndex);
1916 CHECK_VALID_SIZE(outputs.size(), 1);
1917 auto outputInfo = ToTensorInfo(outputs[0]);
1918
1919 auto layerName = GetLayerName(graph, layerIndex);
1920 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1921 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1922
1923 RegisterInputSlots(graph, layerIndex, layer);
1924 RegisterOutputSlots(graph, layerIndex, layer);
1925}
1926
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001927armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001928 const std::vector<uint32_t>& targetDimsIn)
1929{
1930 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1931 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1932
1933 if (stretchDim != targetDimsIn.end())
1934 {
1935 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1936 {
1937 throw ParseException(boost::str(
1938 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1939 }
1940
1941 auto targetNumElements =
1942 boost::numeric_cast<unsigned int>(
1943 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1944
1945 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1946 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1947 }
1948
1949 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1950
1951 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1952 reshapeInfo.SetShape(outputShape);
1953
1954 return reshapeInfo;
1955}
1956
Derek Lamberti8ddae332019-02-21 16:29:43 +00001957void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001958{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001959 CHECK_LAYERS(graph, 0, layerIndex);
1960 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001961
Derek Lamberti8ddae332019-02-21 16:29:43 +00001962 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001963 CHECK_VALID_SIZE(outputs.size(), 1);
1964
1965 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1966 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1967
Derek Lamberti8ddae332019-02-21 16:29:43 +00001968 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001969 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1970
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001971 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001972 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1973
1974 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1975 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1976
1977 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1978 {
1979 std::stringstream ss;
1980 ss << "New shape defined in reshape parameters "
1981 << reshapeOutputTensorShape
1982 << " does not equal output shape "
1983 << actualOutputTensorInfo.GetShape()
1984 << ": "
1985 << CHECK_LOCATION().AsString();
1986 throw ParseException(ss.str());
1987 }
1988
1989 armnn::ReshapeDescriptor reshapeDesc;
1990 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
1991
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001992 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001993 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
1994 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
1995
Derek Lamberti8ddae332019-02-21 16:29:43 +00001996 RegisterInputSlots(graph, layerIndex, layer);
1997 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001998}
1999
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002000void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
2001{
2002 CHECK_LAYERS(graph, 0, layerIndex);
2003
2004 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2005 CHECK_VALID_SIZE(inputs.size(), 1);
2006
2007 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2008 CHECK_VALID_SIZE(outputs.size(), 1);
2009
2010 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2011
2012 armnn::ResizeDescriptor descriptor;
2013 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2014 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2015 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2016 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2017
2018 auto layerName = GetLayerName(graph, layerIndex);
2019 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2020
2021 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2022 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2023
2024 RegisterInputSlots(graph, layerIndex, layer);
2025 RegisterOutputSlots(graph, layerIndex, layer);
2026}
2027
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002028void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
2029{
2030 CHECK_LAYERS(graph, 0, layerIndex);
2031
2032 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2033 CHECK_VALID_SIZE(inputs.size(), 1);
2034
2035 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2036 CHECK_VALID_SIZE(outputs.size(), 1);
2037
2038 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2039
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002040 armnn::ResizeDescriptor descriptor;
2041 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002042 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002043 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2044 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002045
2046 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002047 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002048
2049 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2050 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2051
2052 RegisterInputSlots(graph, layerIndex, layer);
2053 RegisterOutputSlots(graph, layerIndex, layer);
2054}
2055
Derek Lamberti8ddae332019-02-21 16:29:43 +00002056void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002057{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002058 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002059
Derek Lamberti8ddae332019-02-21 16:29:43 +00002060 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002061 CHECK_VALID_SIZE(inputs.size(), 1);
2062
Derek Lamberti8ddae332019-02-21 16:29:43 +00002063 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002064 CHECK_VALID_SIZE(outputs.size(), 1);
2065
2066 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002067 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002068 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002069
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002070 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2071
2072 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2073 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2074
Derek Lamberti8ddae332019-02-21 16:29:43 +00002075 RegisterInputSlots(graph, layerIndex, layer);
2076 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002077}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002078
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002079void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
2080{
2081 CHECK_LAYERS(graph, 0, layerIndex);
2082
2083 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2084 CHECK_VALID_SIZE(inputs.size(), 1);
2085
2086 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2087 CHECK_VALID_SIZE(outputs.size(), 1);
2088
2089 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2090 auto flatBufferPadList = flatBufferDescriptor->padList();
2091 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2092
2093 if (flatBufferPadList->Length() % 2 != 0)
2094 {
2095 throw ParseException(boost::str(
2096 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
2097 }
2098
2099 std::vector<std::pair<unsigned int, unsigned int>> padList;
2100 padList.reserve(flatBufferPadList->Length() / 2);
2101 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2102 {
2103 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2104 }
2105
2106 armnn::SpaceToBatchNdDescriptor descriptor;
2107 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2108 descriptor.m_BlockShape =
2109 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2110 descriptor.m_PadList = padList;
2111
2112 auto layerName = GetLayerName(graph, layerIndex);
2113 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2114
2115 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2116 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2117
2118 RegisterInputSlots(graph, layerIndex, layer);
2119 RegisterOutputSlots(graph, layerIndex, layer);
2120}
2121
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002122void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
2123{
2124 CHECK_LAYERS(graph, 0, layerIndex);
2125
2126 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2127 CHECK_VALID_SIZE(inputs.size(), 1);
2128
2129 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2130 CHECK_VALID_SIZE(outputs.size(), 1);
2131
2132 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2133
2134 armnn::SpaceToDepthDescriptor descriptor;
2135 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2136 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2137
2138 auto layerName = GetLayerName(graph, layerIndex);
2139 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2140
2141 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2142 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2143
2144 RegisterInputSlots(graph, layerIndex, layer);
2145 RegisterOutputSlots(graph, layerIndex, layer);
2146}
2147
Nina Drozd57728782019-02-27 10:53:27 +00002148armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
2149 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
2150 unsigned int layerIndex)
2151{
Derek Lamberti859f9ce2019-12-10 22:05:21 +00002152 boost::ignore_unused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002153 armnn::NormalizationDescriptor desc;
2154
2155 switch (normalizationDescriptor->normChannelType())
2156 {
2157 case NormalizationAlgorithmChannel_Across:
2158 {
2159 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2160 break;
2161 }
2162 case NormalizationAlgorithmChannel_Within:
2163 {
2164 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2165 break;
2166 }
2167 default:
2168 {
2169 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
2170 }
2171 }
2172
2173 switch (normalizationDescriptor->normMethodType())
2174 {
2175 case NormalizationAlgorithmMethod_LocalBrightness:
2176 {
2177 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2178 break;
2179 }
2180 case NormalizationAlgorithmMethod_LocalContrast:
2181 {
2182 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2183 break;
2184 }
2185 default:
2186 {
2187 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
2188 }
2189 }
2190
2191 switch (normalizationDescriptor->dataLayout())
2192 {
2193 case DataLayout_NCHW:
2194 {
2195 desc.m_DataLayout = armnn::DataLayout::NCHW;
2196 break;
2197 }
2198 case DataLayout_NHWC:
2199 {
2200 desc.m_DataLayout = armnn::DataLayout::NHWC;
2201 break;
2202 }
2203 default:
2204 {
2205 BOOST_ASSERT_MSG(false, "Unsupported data layout");
2206 }
2207 }
2208
2209 desc.m_Alpha = normalizationDescriptor->alpha();
2210 desc.m_Beta = normalizationDescriptor->beta();
2211 desc.m_K = normalizationDescriptor->k();
2212 desc.m_NormSize = normalizationDescriptor->normSize();
2213
2214 return desc;
2215}
2216
2217void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2218{
2219 CHECK_LAYERS(graph, 0, layerIndex);
2220
2221 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2222
2223 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2224 CHECK_VALID_SIZE(inputs.size(), 1);
2225
2226 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2227 CHECK_VALID_SIZE(outputs.size(), 1);
2228
2229 auto outputInfo = ToTensorInfo(outputs[0]);
2230
2231 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2232 auto layerName = GetLayerName(graph, layerIndex);
2233
2234 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2235 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2236
2237 RegisterInputSlots(graph, layerIndex, layer);
2238 RegisterOutputSlots(graph, layerIndex, layer);
2239}
2240
Sadik Armagan8b42a382019-03-01 14:24:49 +00002241void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2242{
2243 CHECK_LAYERS(graph, 0, layerIndex);
2244 auto inputs = GetInputs(graph, layerIndex);
2245 CHECK_LOCATION();
2246 CHECK_VALID_SIZE(inputs.size(), 1);
2247
2248 auto outputs = GetOutputs(graph, layerIndex);
2249 CHECK_VALID_SIZE(outputs.size(), 1);
2250
2251 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002252
josh minor4a3c6102020-01-06 16:40:46 -06002253 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2254 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002255 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2256 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2257
2258 RegisterInputSlots(graph, layerIndex, layer);
2259 RegisterOutputSlots(graph, layerIndex, layer);
2260}
2261
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002262void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2263{
2264 CHECK_LAYERS(graph, 0, layerIndex);
2265
2266 auto inputs = GetInputs(graph, layerIndex);
2267 CHECK_VALID_SIZE(inputs.size(), 1);
2268
2269 auto outputs = GetOutputs(graph, layerIndex);
2270 CHECK_VALID_SIZE(outputs.size(), 1);
2271
2272 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2273
2274 auto fbBegin = fbDescriptor->begin();
2275 auto fbSize = fbDescriptor->size();
2276
2277 if (fbBegin->Length() != fbSize->Length())
2278 {
2279 throw ParseException(boost::str(
2280 boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString()));
2281 }
2282
2283 armnn::SliceDescriptor descriptor;
2284 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2285 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2286
2287 auto layerName = GetLayerName(graph, layerIndex);
2288 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2289
2290 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2291 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2292
2293 RegisterInputSlots(graph, layerIndex, layer);
2294 RegisterOutputSlots(graph, layerIndex, layer);
2295}
2296
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002297void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2298{
2299 CHECK_LAYERS(graph, 0, layerIndex);
2300
2301 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2302 CHECK_VALID_SIZE(inputs.size(), 1);
2303
2304 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2305 CHECK_VALID_SIZE(outputs.size(), 1);
2306
2307 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2308
2309 auto flatBufferBegin = flatBufferDescriptor->begin();
2310 auto flatBufferEnd = flatBufferDescriptor->end();
2311 auto flatBufferStride = flatBufferDescriptor->stride();
2312
2313 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2314 flatBufferBegin->Length() == flatBufferStride->Length()))
2315 {
2316 throw ParseException(boost::str(
2317 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2318 }
2319
2320 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2321 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2322 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2323
2324 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2325 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2326 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2327 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2328 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2329 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2330 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2331
2332 auto layerName = GetLayerName(graph, layerIndex);
2333 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2334
2335 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2336 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2337
2338 RegisterInputSlots(graph, layerIndex, layer);
2339 RegisterOutputSlots(graph, layerIndex, layer);
2340}
2341
Conor Kennedyda1f9752019-03-01 14:37:12 +00002342void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2343{
2344 CHECK_LAYERS(graph, 0, layerIndex);
2345 auto inputs = GetInputs(graph, layerIndex);
2346 CHECK_LOCATION();
2347 CHECK_VALID_SIZE(inputs.size(), 2);
2348
2349 auto outputs = GetOutputs(graph, layerIndex);
2350 CHECK_VALID_SIZE(outputs.size(), 1);
2351
2352 auto layerName = GetLayerName(graph, layerIndex);
2353 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2354
2355 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2356 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2357
2358 RegisterInputSlots(graph, layerIndex, layer);
2359 RegisterOutputSlots(graph, layerIndex, layer);
2360}
2361
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002362void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2363{
2364 CHECK_LAYERS(graph, 0, layerIndex);
2365
2366 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2367 CHECK_VALID_SIZE(inputs.size(), 2);
2368
2369 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2370 CHECK_VALID_SIZE(outputs.size(), 1);
2371
2372 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002373 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2374
2375 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002376 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2377
2378 RegisterInputSlots(graph, layerIndex, layer);
2379 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002380}
2381
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002382void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2383{
2384 CHECK_LAYERS(graph, 0, layerIndex);
2385
2386 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2387 CHECK_VALID_SIZE(inputs.size(), 1);
2388
2389 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2390 CHECK_VALID_SIZE(outputs.size(), 1);
2391
2392 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2393 auto flatBufferAxis = flatBufferDescriptor->axis();
2394 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2395
2396 armnn::MeanDescriptor descriptor;
2397 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2398 descriptor.m_KeepDims = flatBufferKeepDims;
2399
2400 auto layerName = GetLayerName(graph, layerIndex);
2401 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2402
2403 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2404 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2405
2406 RegisterInputSlots(graph, layerIndex, layer);
2407 RegisterOutputSlots(graph, layerIndex, layer);
2408}
2409
Jim Flynn18ce3382019-03-08 11:08:30 +00002410void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2411{
2412 CHECK_LAYERS(graph, 0, layerIndex);
2413
2414 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2415 CHECK_VALID_SIZE(inputs.size(), 1);
2416
2417 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2418
2419 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2420 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2421 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2422 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2423 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2424 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2425
2426 // Check numViews and numDimensions corresponds to the ones already serialized ...
2427 // numViews == flatBufferViewSizes.size();
2428 // foreach: numDimensions == flatBufferViewSizes[x].size();
2429
2430 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2431 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2432 {
2433 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2434 {
2435 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2436 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2437 }
2438 }
2439
2440 auto layerName = GetLayerName(graph, layerIndex);
2441 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2442
2443 // I could have as many outputs as views ...
2444 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2445 {
2446 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2447 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2448 }
2449
2450 RegisterInputSlots(graph, layerIndex, layer);
2451 RegisterOutputSlots(graph, layerIndex, layer);
2452}
2453
Jim Flynn11af3752019-03-19 17:22:29 +00002454armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2455{
2456 armnn::LstmDescriptor desc;
2457
2458 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2459 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2460 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2461 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2462 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2463 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002464 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002465
2466 return desc;
2467}
2468
2469void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2470{
2471 CHECK_LAYERS(graph, 0, layerIndex);
2472
2473 auto inputs = GetInputs(graph, layerIndex);
2474 CHECK_VALID_SIZE(inputs.size(), 3);
2475
2476 auto outputs = GetOutputs(graph, layerIndex);
2477 CHECK_VALID_SIZE(outputs.size(), 4);
2478
2479 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2480 auto layerName = GetLayerName(graph, layerIndex);
2481 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2482 auto flatBufferInputParams = flatBufferLayer->inputParams();
2483
2484 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2485
2486 armnn::LstmInputParams lstmInputParams;
2487
2488 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2489 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2490 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2491 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2492 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2493 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2494 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2495 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2496 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2497
2498 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2499 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2500 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2501 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2502 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2503 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2504 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2505 lstmInputParams.m_CellBias = &cellBias;
2506 lstmInputParams.m_OutputGateBias = &outputGateBias;
2507
2508 armnn::ConstTensor inputToInputWeights;
2509 armnn::ConstTensor recurrentToInputWeights;
2510 armnn::ConstTensor cellToInputWeights;
2511 armnn::ConstTensor inputGateBias;
2512 if (!lstmDescriptor.m_CifgEnabled)
2513 {
2514 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2515 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2516 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2517 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2518
2519 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2520 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2521 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2522 lstmInputParams.m_InputGateBias = &inputGateBias;
2523 }
2524
2525 armnn::ConstTensor projectionWeights;
2526 armnn::ConstTensor projectionBias;
2527 if (lstmDescriptor.m_ProjectionEnabled)
2528 {
2529 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2530 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2531
2532 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2533 lstmInputParams.m_ProjectionBias = &projectionBias;
2534 }
2535
2536 armnn::ConstTensor cellToForgetWeights;
2537 armnn::ConstTensor cellToOutputWeights;
2538 if (lstmDescriptor.m_PeepholeEnabled)
2539 {
2540 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2541 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2542
2543 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2544 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2545 }
2546
Jan Eilersf8c62972019-07-17 11:07:49 +01002547 armnn::ConstTensor inputLayerNormWeights;
2548 armnn::ConstTensor forgetLayerNormWeights;
2549 armnn::ConstTensor cellLayerNormWeights;
2550 armnn::ConstTensor outputLayerNormWeights;
2551 if (lstmDescriptor.m_LayerNormEnabled)
2552 {
2553 if (!lstmDescriptor.m_CifgEnabled)
2554 {
2555 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2556 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2557 }
2558 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2559 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2560 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2561
2562 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2563 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2564 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2565 }
2566
Jim Flynn11af3752019-03-19 17:22:29 +00002567 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2568
2569 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2570 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2571
2572 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2573 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2574
2575 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2576 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2577
2578 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2579 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2580
2581 RegisterInputSlots(graph, layerIndex, layer);
2582 RegisterOutputSlots(graph, layerIndex, layer);
2583}
2584
Jan Eilers5b01a892019-07-23 09:47:43 +01002585void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2586{
2587 CHECK_LAYERS(graph, 0, layerIndex);
2588
2589 auto inputs = GetInputs(graph, layerIndex);
2590 CHECK_VALID_SIZE(inputs.size(), 3);
2591
2592 auto outputs = GetOutputs(graph, layerIndex);
2593 CHECK_VALID_SIZE(outputs.size(), 2);
2594
2595 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2596 auto layerName = GetLayerName(graph, layerIndex);
2597 auto flatBufferInputParams = flatBufferLayer->inputParams();
2598
2599 armnn::QuantizedLstmInputParams lstmInputParams;
2600
2601 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2602 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2603 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2604 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2605 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2606 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2607 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2608 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2609 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2610 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2611 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2612 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2613
2614 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2615 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2616 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2617 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2618 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2619 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2620 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2621 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2622 lstmInputParams.m_InputGateBias = &inputGateBias;
2623 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2624 lstmInputParams.m_CellBias = &cellBias;
2625 lstmInputParams.m_OutputGateBias = &outputGateBias;
2626
2627 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2628
2629 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2630 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2631
2632 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2633 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2634
2635 RegisterInputSlots(graph, layerIndex, layer);
2636 RegisterOutputSlots(graph, layerIndex, layer);
2637}
2638
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002639void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2640{
2641 CHECK_LAYERS(graph, 0, layerIndex);
2642
2643 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2644 CHECK_VALID_SIZE(inputs.size(), 1);
2645
2646 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2647 CHECK_VALID_SIZE(outputs.size(), 1);
2648
2649 const std::string layerName = GetLayerName(graph, layerIndex);
2650 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2651
2652 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2653 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2654
2655 RegisterInputSlots(graph, layerIndex, layer);
2656 RegisterOutputSlots(graph, layerIndex, layer);
2657}
2658
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002659void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2660{
2661 CHECK_LAYERS(graph, 0, layerIndex);
2662
2663 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2664 CHECK_VALID_SIZE(inputs.size(), 2);
2665
2666 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2667 CHECK_VALID_SIZE(outputs.size(), 1);
2668
2669 const std::string layerName = GetLayerName(graph, layerIndex);
2670 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2671
2672 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2673 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2674
2675 RegisterInputSlots(graph, layerIndex, layer);
2676 RegisterOutputSlots(graph, layerIndex, layer);
2677}
2678
Sadik Armaganeff363d2019-04-05 15:25:46 +01002679void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2680{
2681 CHECK_LAYERS(graph, 0, layerIndex);
2682 auto inputs = GetInputs(graph, layerIndex);
2683 CHECK_LOCATION();
2684 CHECK_VALID_SIZE(inputs.size(), 2);
2685
2686 auto outputs = GetOutputs(graph, layerIndex);
2687 CHECK_VALID_SIZE(outputs.size(), 2);
2688
2689 auto layerName = GetLayerName(graph, layerIndex);
2690 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2691
2692 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2693 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2694
2695 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2696 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2697
2698 RegisterInputSlots(graph, layerIndex, layer);
2699 RegisterOutputSlots(graph, layerIndex, layer);
2700}
2701
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002702void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2703{
2704 CHECK_LAYERS(graph, 0, layerIndex);
2705 auto inputs = GetInputs(graph, layerIndex);
2706 CHECK_LOCATION();
2707 CHECK_VALID_SIZE(inputs.size(), 2);
2708
2709 auto outputs = GetOutputs(graph, layerIndex);
2710 CHECK_VALID_SIZE(outputs.size(), 1);
2711
2712 auto layerName = GetLayerName(graph, layerIndex);
2713 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2714
2715 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2716 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2717
2718 RegisterInputSlots(graph, layerIndex, layer);
2719 RegisterOutputSlots(graph, layerIndex, layer);
2720}
2721
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002722void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2723{
2724 CHECK_LAYERS(graph, 0, layerIndex);
2725
2726 auto inputs = GetInputs(graph, layerIndex);
2727 CHECK_VALID_SIZE(inputs.size(), 1);
2728
2729 auto outputs = GetOutputs(graph, layerIndex);
2730 CHECK_VALID_SIZE(outputs.size(), 1);
2731
2732 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2733 auto layerName = GetLayerName(graph, layerIndex);
2734 auto serializerDescriptor = serializerLayer->descriptor();
2735
2736 armnn::TransposeConvolution2dDescriptor descriptor;
2737 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2738 descriptor.m_PadRight = serializerDescriptor->padRight();
2739 descriptor.m_PadTop = serializerDescriptor->padTop();
2740 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2741 descriptor.m_StrideX = serializerDescriptor->strideX();
2742 descriptor.m_StrideY = serializerDescriptor->strideY();;
2743 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2744 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2745
2746 // weights & biases
2747 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2748 armnn::Optional<armnn::ConstTensor> optionalBiases;
2749 if (descriptor.m_BiasEnabled)
2750 {
2751 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2752 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2753 }
2754
2755 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2756 weights,
2757 optionalBiases,
2758 layerName.c_str());
2759
2760 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2761 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2762
2763 RegisterInputSlots(graph, layerIndex, layer);
2764 RegisterOutputSlots(graph, layerIndex, layer);
2765}
2766
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002767void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2768{
2769 CHECK_LAYERS(graph, 0, layerIndex);
2770 auto inputs = GetInputs(graph, layerIndex);
2771
2772 auto outputs = GetOutputs(graph, layerIndex);
2773 CHECK_VALID_SIZE(outputs.size(), 1);
2774
2775 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2776 unsigned int axis = flatBufferDescriptor->axis();
2777 unsigned int numInputs = flatBufferDescriptor->numInputs();
2778 CHECK_VALID_SIZE(inputs.size(), numInputs);
2779
2780 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2781 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2782 flatBufferInputShape->begin() + flatBufferInputShape->size());
2783
2784 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2785 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2786
2787 for (unsigned int i=0; i<inputs.size(); ++i)
2788 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01002789 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002790 if (descriptor.m_InputShape != inputShape)
2791 {
2792 std::stringstream ss;
2793 ss << "Shape of input "
2794 << i
2795 << " "
2796 << inputShape
2797 << " does not equal defined input shape "
2798 << descriptor.m_InputShape
2799 << ": "
2800 << CHECK_LOCATION().AsString();
2801 throw ParseException(ss.str());
2802 }
2803 }
2804
2805 auto layerName = GetLayerName(graph, layerIndex);
2806 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2807
2808 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2809 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2810
2811 RegisterInputSlots(graph, layerIndex, layer);
2812 RegisterOutputSlots(graph, layerIndex, layer);
2813}
2814
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01002815void Deserializer::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
2816{
2817 CHECK_LAYERS(graph, 0, layerIndex);
2818
2819 auto inputs = GetInputs(graph, layerIndex);
2820 auto outputs = GetOutputs(graph, layerIndex);
2821
2822 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
2823 auto fbDescriptor = fbLayer->descriptor();
2824
2825 armnn::StandInDescriptor descriptor;
2826 descriptor.m_NumInputs = fbDescriptor->numInputs();
2827 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
2828
2829 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
2830 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
2831
2832 const std::string layerName = GetLayerName(graph, layerIndex);
2833 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
2834
2835 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
2836 {
2837 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
2838 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
2839 }
2840
2841 RegisterInputSlots(graph, layerIndex, layer);
2842 RegisterOutputSlots(graph, layerIndex, layer);
2843}
2844
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002845} // namespace armnnDeserializer