blob: bc6fbf0194151161c7c7400c3500ccdb4a6da157 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
2// Copyright © 2017 Arm Ltd. All rights reserved.
3// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000016#include <armnn/utility/IgnoreUnused.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000017
Kevin May43a799c2019-02-08 16:31:42 +000018#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000019#include <VerificationHelpers.hpp>
20
21#include <boost/filesystem.hpp>
22#include <boost/format.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000023#include <boost/assert.hpp>
24#include <boost/format.hpp>
Aron Virginas-Tard4f0fea2019-04-09 14:08:06 +010025#include <boost/format.hpp>
26#include <boost/numeric/conversion/cast.hpp>
Jim Flynn18ce3382019-03-08 11:08:30 +000027#include <boost/polymorphic_cast.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000028
Kevin May43a799c2019-02-08 16:31:42 +000029#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000030#include <algorithm>
31#include <limits>
32#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000033
34using armnn::ParseException;
35using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000036using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000037
Derek Lamberti0028d1b2019-02-20 13:57:42 +000038namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000039{
Kevin May43a799c2019-02-08 16:31:42 +000040
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000041namespace
42{
Kevin May43a799c2019-02-08 16:31:42 +000043
44const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
45
Derek Lamberti0028d1b2019-02-20 13:57:42 +000046 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000047 unsigned int layersIndex,
48 const CheckLocation& location)
49{
50 if (graph->layers() == nullptr)
51 {
52 throw ParseException(
53 boost::str(
54 boost::format("%1% was called with invalid (null) graph. "
55 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
56 "layers:%2% at %3%") %
57 location.m_Function %
58 layersIndex %
59 location.FileLine()));
60 }
61 else if (layersIndex >= graph->layers()->size())
62 {
63 throw ParseException(
64 boost::str(
65 boost::format("%1% was called with an invalid layers index. "
66 "layers:%2% at %3%") %
67 location.m_Function %
68 layersIndex %
69 location.FileLine()));
70 }
71}
72
Derek Lamberti0028d1b2019-02-20 13:57:42 +000073void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000074 unsigned int layersIndex,
75 unsigned int layerIndex,
76 const CheckLocation& location)
77{
78 if (graph->layers() == nullptr)
79 {
80 throw ParseException(
81 boost::str(
82 boost::format("%1% was called with invalid (null) graph. "
83 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000084 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000085 location.m_Function %
86 layersIndex %
87 location.FileLine()));
88 }
89 else if (layersIndex >= graph->layers()->size())
90 {
91 throw ParseException(
92 boost::str(
93 boost::format("%1% was called with an invalid layers index. "
Nattapat Chaimanowong43e78642019-02-13 15:56:24 +000094 "layers:%2% at %3%") %
Kevin May43a799c2019-02-08 16:31:42 +000095 location.m_Function %
96 layersIndex %
97 location.FileLine()));
98 }
99 else if (layerIndex >= graph->layers()[layersIndex].size()
100 && layerIndex != VIRTUAL_LAYER_ID)
101 {
102 throw ParseException(
103 boost::str(
104 boost::format("%1% was called with an invalid layer index. "
105 "layers:%2% layer:%3% at %4%") %
106 location.m_Function %
107 layersIndex %
108 layerIndex %
109 location.FileLine()));
110 }
111}
112
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000113void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000114 const CheckLocation& location)
115{
116 if (rawPtr == nullptr)
117 {
118 throw ParseException(
119 boost::str(
120 boost::format("%1% was called with a null tensor pointer. "
121 "at %2%") %
122 location.m_Function %
123 location.FileLine()));
124
125 }
126}
127
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000128void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000129 const CheckLocation& location)
130{
131 if (rawPtr == nullptr)
132 {
133 throw ParseException(boost::str(boost::format("%1% was called with a null const tensor pointer. at %2%") %
134 location.m_Function %
135 location.FileLine()));
136 }
137}
138
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000139void CheckConstTensorSize(const unsigned int constTensorSize,
140 const unsigned int tensorSize,
141 const CheckLocation& location)
142{
143 if (constTensorSize != tensorSize)
144 {
145 throw ParseException(boost::str(boost::format("%1% wrong number of components supplied to tensor. at:%2%") %
146 location.m_Function %
147 location.FileLine()));
148 }
149}
150
Kevin May43a799c2019-02-08 16:31:42 +0000151#define CHECK_TENSOR_PTR(TENSOR_PTR) \
152 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
153
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000154#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
155 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
156
Mike Kellya0766c32019-02-19 17:22:07 +0000157#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
158 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
159
Kevin May43a799c2019-02-08 16:31:42 +0000160#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
161 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
162
163#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
164 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
165}
166
Saoirse Stewart263829c2019-02-19 15:54:14 +0000167bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
168{
169 const unsigned int actualSize = actual.GetNumDimensions();
170 if (actualSize != expected.size())
171 {
172 return false;
173 }
174
175 for (unsigned int i = 0u; i < actualSize; i++)
176 {
177 if (actual[i] != static_cast<unsigned int>(expected[i]))
178 {
179 return false;
180 }
181 }
182
183 return true;
184}
185
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000186Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000187: m_Network(nullptr, nullptr),
188//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000189m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000190{
191 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100192 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000193 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000194 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100195 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000196 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000197 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100198 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
Jim Flynne242f2d2019-05-22 14:24:13 +0100199 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000200 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000201 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100202 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000203 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000204 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000205 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000206 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
josh minor4a3c6102020-01-06 16:40:46 -0600207 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &Deserializer::ParseElementwiseUnary;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000208 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000209 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000210 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000211 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000212 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100213 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000214 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
Sadik Armagan26257852019-10-14 13:00:47 +0100215 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000216 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000217 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000218 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
219 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100220 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100221 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000222 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000223 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000224 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000225 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000226 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100227 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
Derek Lamberti87acb272019-03-27 16:51:31 +0000228 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100229 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000230 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000231 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100232 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000233 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100234 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000235 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000236 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100237 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000238 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100239 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100240 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000241 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000242 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100243 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100244 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000245 m_ParserFunctions[Layer_TransposeLayer] = &Deserializer::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000246}
247
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000248Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000249{
250 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
251
252 switch(layerType)
253 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100254 case Layer::Layer_AbsLayer:
255 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000256 case Layer::Layer_ActivationLayer:
257 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000258 case Layer::Layer_AdditionLayer:
259 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100260 case Layer::Layer_ArgMinMaxLayer:
261 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000262 case Layer::Layer_BatchToSpaceNdLayer:
263 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000264 case Layer::Layer_BatchNormalizationLayer:
265 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100266 case Layer::Layer_ComparisonLayer:
267 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100268 case Layer::Layer_ConcatLayer:
269 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000270 case Layer::Layer_ConstantLayer:
271 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000272 case Layer::Layer_Convolution2dLayer:
273 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100274 case Layer::Layer_DepthToSpaceLayer:
275 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000276 case Layer::Layer_DepthwiseConvolution2dLayer:
277 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000278 case Layer::Layer_DequantizeLayer:
279 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000280 case Layer::Layer_DetectionPostProcessLayer:
281 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000282 case Layer::Layer_DivisionLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000284 case Layer::Layer_EqualLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000286 case Layer::Layer_FullyConnectedLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000288 case Layer::Layer_FloorLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000290 case Layer::Layer_GatherLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000292 case Layer::Layer_GreaterLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000294 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000295 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100296 case Layer::Layer_InstanceNormalizationLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000298 case Layer::Layer_L2NormalizationLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100300 case Layer::Layer_LogSoftmaxLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000302 case Layer::Layer_LstmLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000304 case Layer::Layer_MeanLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000306 case Layer::Layer_MinimumLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000308 case Layer::Layer_MaximumLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100310 case Layer::Layer_MergeLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000312 case Layer::Layer_MergerLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000314 case Layer::Layer_MultiplicationLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000316 case Layer::Layer_NormalizationLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000318 case Layer::Layer_OutputLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000320 case Layer::Layer_PadLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000322 case Layer::Layer_PermuteLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000324 case Layer::Layer_Pooling2dLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100326 case Layer::Layer_PreluLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000328 case Layer::Layer_QuantizeLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100330 case Layer::Layer_QuantizedLstmLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000332 case Layer::Layer_ReshapeLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000334 case Layer::Layer_ResizeBilinearLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100336 case Layer::Layer_ResizeLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000338 case Layer::Layer_RsqrtLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100340 case Layer::Layer_SliceLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000342 case Layer::Layer_SoftmaxLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000344 case Layer::Layer_SpaceToBatchNdLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100346 case Layer::Layer_SpaceToDepthLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000348 case Layer::Layer_SplitterLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100350 case Layer::Layer_StackLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100352 case Layer::Layer_StandInLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000354 case Layer::Layer_StridedSliceLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000356 case Layer::Layer_SubtractionLayer:
357 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100358 case Layer::Layer_SwitchLayer:
359 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100360 case Layer::Layer_TransposeConvolution2dLayer:
361 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000362 case Layer::Layer_TransposeLayer:
363 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000364 case Layer::Layer_NONE:
365 default:
366 throw ParseException(boost::str(
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100367 boost::format("Layer type %1% not recognized") %
368 layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000369 }
370}
371
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000372std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
373{
374 auto layer = GetBaseLayer(graph, index);
375 assert(layer);
376 return layer->layerName()->str();
377}
378
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000379int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000380{
381 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
382
383 if (layerType == Layer::Layer_InputLayer)
384 {
385 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
386 }
387 else if ( layerType == Layer::Layer_OutputLayer )
388 {
389 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
390 }
391 return 0;
392}
393
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000394armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000395{
396 switch (dataLayout)
397 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000398 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000399 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000400 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000401 default:
402 return armnn::DataLayout::NCHW;
403 }
404}
405
Mike Kellyaf484012019-02-20 16:53:11 +0000406armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
407{
408 switch (function)
409 {
410 case armnnSerializer::ActivationFunction_Sigmoid:
411 return armnn::ActivationFunction::Sigmoid;
412 case armnnSerializer::ActivationFunction_TanH:
413 return armnn::ActivationFunction::TanH;
414 case armnnSerializer::ActivationFunction_Linear:
415 return armnn::ActivationFunction::Linear;
416 case armnnSerializer::ActivationFunction_ReLu:
417 return armnn::ActivationFunction::ReLu;
418 case armnnSerializer::ActivationFunction_BoundedReLu:
419 return armnn::ActivationFunction::BoundedReLu;
420 case armnnSerializer::ActivationFunction_LeakyReLu:
421 return armnn::ActivationFunction::LeakyReLu;
422 case armnnSerializer::ActivationFunction_Abs:
423 return armnn::ActivationFunction::Abs;
424 case armnnSerializer::ActivationFunction_Sqrt:
425 return armnn::ActivationFunction::Sqrt;
426 case armnnSerializer::ActivationFunction_Square:
427 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000428 case armnnSerializer::ActivationFunction_Elu:
429 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000430 case armnnSerializer::ActivationFunction_HardSwish:
431 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000432 default:
433 return armnn::ActivationFunction::Sigmoid;
434 }
435}
436
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100437armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
438{
439 switch (function)
440 {
441 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
442 return armnn::ArgMinMaxFunction::Max;
443 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
444 default:
445 return armnn::ArgMinMaxFunction::Min;
446 }
447}
448
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100449armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
450{
451 switch (operation)
452 {
453 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
454 return armnn::ComparisonOperation::Equal;
455 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
456 return armnn::ComparisonOperation::Greater;
457 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
458 return armnn::ComparisonOperation::GreaterOrEqual;
459 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
460 return armnn::ComparisonOperation::Less;
461 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
462 return armnn::ComparisonOperation::LessOrEqual;
463 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
464 default:
465 return armnn::ComparisonOperation::NotEqual;
466 }
467}
468
josh minor4a3c6102020-01-06 16:40:46 -0600469armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
470{
471 switch (operation)
472 {
473 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
474 return armnn::UnaryOperation::Abs;
475 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
476 return armnn::UnaryOperation::Rsqrt;
477 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
478 return armnn::UnaryOperation::Sqrt;
479 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
480 return armnn::UnaryOperation::Exp;
481 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
482 return armnn::UnaryOperation::Neg;
483 default:
484 throw armnn::InvalidArgumentException("Unary operation unknown");
485 }
486}
487
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100488armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
489{
490 switch (method)
491 {
492 case armnnSerializer::ResizeMethod_NearestNeighbor:
493 return armnn::ResizeMethod::NearestNeighbor;
494 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000495 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100496 default:
497 return armnn::ResizeMethod::NearestNeighbor;
498 }
499}
500
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000501armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000502{
503 armnn::DataType type;
504 CHECK_TENSOR_PTR(tensorPtr);
505
506 switch (tensorPtr->dataType())
507 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000508 case DataType_QAsymmS8:
509 type = armnn::DataType::QAsymmS8;
510 break;
Kevin May43a799c2019-02-08 16:31:42 +0000511 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000512 case DataType_QAsymmU8:
513 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000514 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000515 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000516 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000517 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000518 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000519 case DataType_Signed32:
520 type = armnn::DataType::Signed32;
521 break;
Kevin May43a799c2019-02-08 16:31:42 +0000522 case DataType_Float32:
523 type = armnn::DataType::Float32;
524 break;
525 case DataType_Float16:
526 type = armnn::DataType::Float16;
527 break;
528 case DataType_Boolean:
529 type = armnn::DataType::Boolean;
530 break;
531 default:
532 {
533 CheckLocation location = CHECK_LOCATION();
534 throw ParseException(
535 boost::str(
536 boost::format("Unsupported data type %1% = %2%. %3%") %
537 tensorPtr->dataType() %
538 EnumNameDataType(tensorPtr->dataType()) %
539 location.AsString()));
540 }
541 }
542 float quantizationScale = tensorPtr->quantizationScale();
543 int32_t quantizationOffset = tensorPtr->quantizationOffset();
544
545 auto dimensions = tensorPtr->dimensions();
546 unsigned int size = dimensions->size();
547 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
548
549 // two statements (on purpose) for easier debugging:
550 armnn::TensorInfo result(size,
551 outputDims.data(),
552 type,
553 quantizationScale,
554 quantizationOffset);
555 return result;
556}
557
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000558armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000559{
560 CHECK_CONST_TENSOR_PTR(constTensorPtr);
561 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
562
563 switch (constTensorPtr->data_type())
564 {
565 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000566 {
567 auto byteData = constTensorPtr->data_as_ByteData()->data();
568 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
569 return armnn::ConstTensor(tensorInfo, byteData->data());
570 }
Mike Kellya0766c32019-02-19 17:22:07 +0000571 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000572 {
573 auto shortData = constTensorPtr->data_as_ShortData()->data();
574 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
575 return armnn::ConstTensor(tensorInfo, shortData->data());
576 }
Mike Kellya0766c32019-02-19 17:22:07 +0000577 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000578 {
579 auto intData = constTensorPtr->data_as_IntData()->data();
580 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
581 return armnn::ConstTensor(tensorInfo, intData->data());
582 }
Mike Kellya0766c32019-02-19 17:22:07 +0000583 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000584 {
585 auto longData = constTensorPtr->data_as_LongData()->data();
586 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
587 return armnn::ConstTensor(tensorInfo, longData->data());
588 }
Mike Kellya0766c32019-02-19 17:22:07 +0000589 default:
590 {
591 CheckLocation location = CHECK_LOCATION();
592 throw ParseException(
593 boost::str(boost::format("Unsupported data type %1% = %2%. %3%") %
594 constTensorPtr->data_type() %
595 EnumNameConstTensorData(constTensorPtr->data_type()) %
596 location.AsString()));
597 }
598 }
599}
600
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000601Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000602 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000603{
604 CHECK_LAYERS(graphPtr, 0, layerIndex);
605 auto layer = GetBaseLayer(graphPtr, layerIndex);
606 const auto& numInputs = layer->inputSlots()->size();
607
608 TensorRawPtrVector result(numInputs);
609
610 for (unsigned int i=0; i<numInputs; ++i)
611 {
612 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
613 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
614 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
615 }
616 return result;
617}
618
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000619Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000620 unsigned int layerIndex)
621{
622 CHECK_LAYERS(graphPtr, 0, layerIndex);
623 auto layer = GetBaseLayer(graphPtr, layerIndex);
624 const auto& numOutputs = layer->outputSlots()->size();
625
626 TensorRawPtrVector result(numOutputs);
627
628 for (unsigned int i=0; i<numOutputs; ++i)
629 {
630 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
631 }
632 return result;
633}
634
Derek Lamberti8ddae332019-02-21 16:29:43 +0000635void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000636{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000637 CHECK_LAYERS(graph, 0, layerIndex);
638 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Kevin May43a799c2019-02-08 16:31:42 +0000639 throw ParseException(
640 boost::str(
641 boost::format("Layer not supported. "
642 "layerIndex: %1% "
643 "layerName: %2% / %3%") %
644 layerIndex %
645 layerName %
646 CHECK_LOCATION().AsString()));
647}
648
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000649void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000650{
651 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000652 m_InputBindings.clear();
653 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000654}
655
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000656IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000657{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000658 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000659}
660
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000661IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000662{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000663 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000664}
665
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000666void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000667{
668 delete parser;
669}
670
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000671INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000672{
673 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000674 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
675 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000676}
677
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000678armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000679{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000680 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000681 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
682 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
683 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000684}
685
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000686Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000687{
688 if (binaryContent == nullptr)
689 {
690 throw InvalidArgumentException(boost::str(boost::format("Invalid (null) binary content %1%") %
691 CHECK_LOCATION().AsString()));
692 }
693 flatbuffers::Verifier verifier(binaryContent, len);
694 if (verifier.VerifyBuffer<SerializedGraph>() == false)
695 {
696 throw ParseException(
697 boost::str(boost::format("Buffer doesn't conform to the expected Armnn "
698 "flatbuffers format. size:%1% %2%") %
699 len %
700 CHECK_LOCATION().AsString()));
701 }
702 return GetSerializedGraph(binaryContent);
703}
704
Derek Lamberti8ddae332019-02-21 16:29:43 +0000705INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000706{
707 m_Network = INetwork::Create();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000708 BOOST_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000709 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000710 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000711 {
712 if (layer->layer_type() != Layer_InputLayer &&
713 layer->layer_type() != Layer_OutputLayer)
714 {
715 // lookup and call the parser function
716 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000717 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000718 }
719 ++layerIndex;
720 }
721
Derek Lamberti8ddae332019-02-21 16:29:43 +0000722 SetupInputLayers(graph);
723 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000724
725 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100726 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000727 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100728 Connections& connections = graphIt.second;
729 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000730 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100731 const unsigned int outputSlotIndex = outputIt.first;
732 IOutputSlot* outputSlot = outputIt.second;
733 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000734 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100735 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000736 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100737 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000738 }
Kevin May43a799c2019-02-08 16:31:42 +0000739 }
740 }
741 }
742
743 return std::move(m_Network);
744}
745
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000746BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000747 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000748{
Jan Eilers8eb25602020-03-09 12:13:48 +0000749 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000750 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000751 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000752 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000753 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000754 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000755 }
756 }
757 throw ParseException(
758 boost::str(
759 boost::format("No input binding found for layer:%1% / %2%") %
760 name %
761 CHECK_LOCATION().AsString()));
762}
763
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000764BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000765 const std::string& name) const
766{
Jan Eilers8eb25602020-03-09 12:13:48 +0000767 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000768 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000769 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000770 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000771 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000772 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000773 }
774 }
775 throw ParseException(
776 boost::str(
777 boost::format("No output binding found for layer:%1% / %2%") %
778 name %
779 CHECK_LOCATION().AsString()));
780}
781
Tee Jungaa920c52019-11-05 10:48:25 +0000782unsigned int Deserializer::GetInputLayerInVector(GraphPtr graph, int targetId)
783{
784 for (unsigned int i = 0; i < graph->layers()->size(); i++)
785 {
786 auto layer = graph->layers()->Get(i);
787 if (layer->layer_type() == Layer::Layer_InputLayer)
788 {
789 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
790 if (layerBindingId == targetId)
791 {
792 return i;
793 }
794 }
795 }
796 throw ParseException("Input layer with given layerBindingId not found");
797}
798
799unsigned int Deserializer::GetOutputLayerInVector(GraphPtr graph, int targetId)
800{
801 for (unsigned int i = 0; i < graph->layers()->size(); i++)
802 {
803 auto layer = graph->layers()->Get(i);
804 if (layer->layer_type() == Layer::Layer_OutputLayer)
805 {
806 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
807 if (layerBindingId == targetId)
808 {
809 return i;
810 }
811 }
812 }
813 throw ParseException("Output layer with given layerBindingId not found");
814}
815
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100816unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
817{
818 for (unsigned int i = 0; i < graph->layers()->size(); i++)
819 {
820 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
821 if (layer->index() == targetIndex)
822 {
823 return i;
824 }
825 }
826 throw ParseException("Layer with given index not found");
827}
828
Tee Jungaa920c52019-11-05 10:48:25 +0000829Deserializer::FeatureVersions Deserializer::GetFeatureVersions(GraphPtr graph)
830{
831 Deserializer::FeatureVersions versions;
832
833 if (graph->featureVersions())
834 {
835 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
836 }
837
838 return versions;
839}
840
Derek Lamberti8ddae332019-02-21 16:29:43 +0000841void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000842{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000843 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100844 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000845 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100846 m_InputBindings.reserve(numInputs);
847
848 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000849 {
Tee Jungaa920c52019-11-05 10:48:25 +0000850 unsigned int inputLayerIndex = 0xFFFFFFFF;
851 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
852 {
853 const unsigned int inputId = boost::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
854 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
855 }
856 else
857 {
858 const int inputId = graph->inputIds()->Get(i);
859 inputLayerIndex = GetInputLayerInVector(graph, inputId);
860 }
861
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100862 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000863
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100864 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
865 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
866 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000867
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100868 IConnectableLayer* inputLayer =
869 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000870
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100871 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
872 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
873 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
874
Derek Lamberti8ddae332019-02-21 16:29:43 +0000875 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100876 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000877 }
878}
879
Derek Lamberti8ddae332019-02-21 16:29:43 +0000880void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000881{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000882 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100883 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000884 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100885 m_OutputBindings.reserve(numOutputs);
886
887 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000888 {
Tee Jungaa920c52019-11-05 10:48:25 +0000889 unsigned int outputLayerIndex = 0xFFFFFFFF;
890 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
891 {
892 const unsigned int outputId = boost::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
893 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
894 }
895 else
896 {
897 const int outputId = graph->outputIds()->Get(i);
898 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
899 }
900
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100901 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000902
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100903 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
904 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
905 BOOST_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000906
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100907 IConnectableLayer* outputLayer =
908 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000909
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100910 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
911
912 unsigned int sourceLayerIndex =
913 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
914 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
915 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
916
Derek Lamberti8ddae332019-02-21 16:29:43 +0000917 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100918 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000919 }
920}
921
Derek Lamberti8ddae332019-02-21 16:29:43 +0000922void Deserializer::RegisterOutputSlots(GraphPtr graph,
923 uint32_t layerIndex,
924 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000925{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000926 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000927 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100928 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
929 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000930 {
931 throw ParseException(
932 boost::str(boost::format("The number of outputslots (%1%) does not match the number expected (%2%)"
933 " for layer index: %3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100934 baseLayer->outputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000935 layer->GetNumOutputSlots() %
936 layerIndex %
937 CHECK_LOCATION().AsString()));
938 }
939
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100940 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000941 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100942 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
943 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
944 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
945 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000946 }
947}
948
Derek Lamberti8ddae332019-02-21 16:29:43 +0000949void Deserializer::RegisterInputSlots(GraphPtr graph,
950 uint32_t layerIndex,
951 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000952{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000953 CHECK_LAYERS(graph, 0, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000954 BOOST_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100955 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
956 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000957 {
958 throw ParseException(
959 boost::str(boost::format("The number of inputslots (%1%) does not match the number expected (%2%)"
960 " for layer index:%3% %4%") %
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100961 baseLayer->inputSlots()->size() %
Kevin May43a799c2019-02-08 16:31:42 +0000962 layer->GetNumInputSlots() %
963 layerIndex %
964 CHECK_LOCATION().AsString()));
965 }
966
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100967 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000968 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100969 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
970 auto fbConnection = fbInputSlot->connection();
971 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
972 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000973 }
974}
975
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000976void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
977 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100978 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +0000979{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100980 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +0000981 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100982 m_GraphConnections[sourceLayerIndex] = Connections();
983 }
984
985 Connections& connections = m_GraphConnections[sourceLayerIndex];
986 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
987 {
988 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +0000989 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000990 else
991 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100992 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000993 }
994}
Kevin May43a799c2019-02-08 16:31:42 +0000995
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000996void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100997 uint32_t outputSlotIndex,
998 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000999{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001000 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1001 {
1002 m_GraphConnections[sourceLayerIndex] = Connections();
1003 }
1004
1005 Connections& connections = m_GraphConnections[sourceLayerIndex];
1006 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1007 {
1008 throw ParseException("Same output slot index processed twice");
1009 }
1010
1011 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001012}
1013
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001014void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
1015{
1016 CHECK_LAYERS(graph, 0, layerIndex);
1017 auto inputs = GetInputs(graph, layerIndex);
1018 CHECK_LOCATION();
1019 CHECK_VALID_SIZE(inputs.size(), 1);
1020
1021 auto outputs = GetOutputs(graph, layerIndex);
1022 CHECK_VALID_SIZE(outputs.size(), 1);
1023
1024 auto layerName = GetLayerName(graph, layerIndex);
1025
josh minor4a3c6102020-01-06 16:40:46 -06001026 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1027 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001028 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1029 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1030
1031 RegisterInputSlots(graph, layerIndex, layer);
1032 RegisterOutputSlots(graph, layerIndex, layer);
1033}
1034
Derek Lamberti8ddae332019-02-21 16:29:43 +00001035void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001036{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001037 CHECK_LAYERS(graph, 0, layerIndex);
1038 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001039 CHECK_LOCATION();
1040 CHECK_VALID_SIZE(inputs.size(), 1);
1041
Derek Lamberti8ddae332019-02-21 16:29:43 +00001042 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001043 CHECK_VALID_SIZE(outputs.size(), 1);
1044
Derek Lamberti8ddae332019-02-21 16:29:43 +00001045 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001046 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001047 auto serializerDescriptor = serializerLayer->descriptor();
1048
1049 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001050 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001051 descriptor.m_A = serializerDescriptor->a();
1052 descriptor.m_B = serializerDescriptor->b();
1053
1054 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1055 layerName.c_str());
1056 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1057 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1058
Derek Lamberti8ddae332019-02-21 16:29:43 +00001059 RegisterInputSlots(graph, layerIndex, layer);
1060 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001061}
1062
Derek Lamberti8ddae332019-02-21 16:29:43 +00001063void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001064{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001065 CHECK_LAYERS(graph, 0, layerIndex);
1066 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001067 CHECK_LOCATION();
1068 CHECK_VALID_SIZE(inputs.size(), 2);
1069
Derek Lamberti8ddae332019-02-21 16:29:43 +00001070 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001071 CHECK_VALID_SIZE(outputs.size(), 1);
1072
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001073 auto layerName = GetLayerName(graph, layerIndex);
1074 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001075
1076 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1077 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1078
Derek Lamberti8ddae332019-02-21 16:29:43 +00001079 RegisterInputSlots(graph, layerIndex, layer);
1080 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001081}
1082
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001083void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
1084{
1085 CHECK_LAYERS(graph, 0, layerIndex);
1086 auto inputs = GetInputs(graph, layerIndex);
1087 CHECK_LOCATION();
1088 CHECK_VALID_SIZE(inputs.size(), 1);
1089
1090 auto outputs = GetOutputs(graph, layerIndex);
1091 CHECK_VALID_SIZE(outputs.size(), 1);
1092
1093 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1094 auto serializerDescriptor = serializerLayer->descriptor();
1095
1096 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001097 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001098 descriptor.m_Axis = serializerDescriptor->axis();
1099 auto layerName = GetLayerName(graph, layerIndex);
1100 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1101
1102 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1103 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1104
1105 RegisterInputSlots(graph, layerIndex, layer);
1106 RegisterOutputSlots(graph, layerIndex, layer);
1107}
1108
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001109void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
1110{
1111 CHECK_LAYERS(graph, 0, layerIndex);
1112
1113 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1114 CHECK_VALID_SIZE(inputs.size(), 1);
1115
1116 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1117 CHECK_VALID_SIZE(outputs.size(), 1);
1118
1119 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1120 auto flatBufferCrops = flatBufferDescriptor->crops();
1121 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1122
1123 if (flatBufferCrops->Length() % 2 != 0)
1124 {
1125 throw ParseException(boost::str(
1126 boost::format("The size of crops must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1127 }
1128
1129 std::vector<std::pair<unsigned int, unsigned int>> crops;
1130 crops.reserve(flatBufferCrops->Length() / 2);
1131 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1132 {
1133 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1134 }
1135
1136 armnn::BatchToSpaceNdDescriptor descriptor;
1137 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1138 descriptor.m_BlockShape =
1139 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1140 descriptor.m_Crops = crops;
1141
1142 auto layerName = GetLayerName(graph, layerIndex);
1143 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1144
1145 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1146 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1147
1148 RegisterInputSlots(graph, layerIndex, layer);
1149 RegisterOutputSlots(graph, layerIndex, layer);
1150}
1151
ruoyan018e7fa232019-02-28 15:09:07 +00001152void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1153{
1154 CHECK_LAYERS(graph, 0, layerIndex);
1155
1156 auto inputs = GetInputs(graph, layerIndex);
1157 CHECK_VALID_SIZE(inputs.size(), 1);
1158
1159 auto outputs = GetOutputs(graph, layerIndex);
1160 CHECK_VALID_SIZE(outputs.size(), 1);
1161 auto outputInfo = ToTensorInfo(outputs[0]);
1162
ruoyan015c7ab052019-03-04 14:48:02 +00001163 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001164
1165 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1166 auto serializerDescriptor = serializerLayer->descriptor();
1167
1168 armnn::BatchNormalizationDescriptor descriptor;
1169 descriptor.m_Eps = serializerDescriptor->eps();
1170 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1171
1172 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1173 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1174 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1175 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1176
1177 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1178 mean,
1179 variance,
1180 beta,
1181 gamma,
1182 layerName.c_str());
1183 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1184
1185 RegisterInputSlots(graph, layerIndex, layer);
1186 RegisterOutputSlots(graph, layerIndex, layer);
1187}
1188
Conor Kennedy76277882019-02-26 08:29:54 +00001189void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1190{
1191 CHECK_LAYERS(graph, 0, layerIndex);
1192 CHECK_LOCATION();
1193
1194 auto outputs = GetOutputs(graph, layerIndex);
1195 CHECK_VALID_SIZE(outputs.size(), 1);
1196
1197 auto layerName = GetLayerName(graph, layerIndex);
1198
1199 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1200 auto serializerInput = serializerLayer->input();
1201
1202 armnn::ConstTensor input = ToConstTensor(serializerInput);
1203
1204 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1205
1206 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1207 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1208
1209 RegisterOutputSlots(graph, layerIndex, layer);
1210}
1211
Derek Lamberti8ddae332019-02-21 16:29:43 +00001212void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001213{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001214 CHECK_LAYERS(graph, 0, layerIndex);
1215 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001216 CHECK_LOCATION();
1217 CHECK_VALID_SIZE(inputs.size(), 1);
1218
Derek Lamberti8ddae332019-02-21 16:29:43 +00001219 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001220 CHECK_VALID_SIZE(outputs.size(), 1);
1221
Derek Lamberti8ddae332019-02-21 16:29:43 +00001222 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001223 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001224 auto serializerDescriptor = serializerLayer->descriptor();
1225
1226 armnn::Convolution2dDescriptor descriptor;
1227 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1228 descriptor.m_PadRight = serializerDescriptor->padRight();
1229 descriptor.m_PadTop = serializerDescriptor->padTop();
1230 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1231 descriptor.m_StrideX = serializerDescriptor->strideX();
1232 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001233 descriptor.m_DilationX = serializerDescriptor->dilationX();
1234 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001235 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1236 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1237
1238 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1239 armnn::ConstTensor biases;
1240
Matteo Martincighfc598e12019-05-14 10:36:13 +01001241 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001242 if (descriptor.m_BiasEnabled)
1243 {
1244 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001245 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001246 }
1247 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1248 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001249 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001250 layerName.c_str());
1251 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1252 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1253
Derek Lamberti8ddae332019-02-21 16:29:43 +00001254 RegisterInputSlots(graph, layerIndex, layer);
1255 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001256}
1257
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001258void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1259{
1260 CHECK_LAYERS(graph, 0, layerIndex);
1261
1262 auto inputs = GetInputs(graph, layerIndex);
1263 CHECK_VALID_SIZE(inputs.size(), 1);
1264
1265 auto outputs = GetOutputs(graph, layerIndex);
1266 CHECK_VALID_SIZE(outputs.size(), 1);
1267
1268 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1269
1270 armnn::DepthToSpaceDescriptor descriptor;
1271 descriptor.m_BlockSize = fbDescriptor->blockSize();
1272 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1273
1274 auto layerName = GetLayerName(graph, layerIndex);
1275 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1276
1277 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1278 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1279
1280 RegisterInputSlots(graph, layerIndex, layer);
1281 RegisterOutputSlots(graph, layerIndex, layer);
1282}
1283
Derek Lamberti8ddae332019-02-21 16:29:43 +00001284void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001285{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001286 CHECK_LAYERS(graph, 0, layerIndex);
1287 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001288 CHECK_LOCATION();
1289 CHECK_VALID_SIZE(inputs.size(), 1);
1290
Derek Lamberti8ddae332019-02-21 16:29:43 +00001291 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001292 CHECK_VALID_SIZE(outputs.size(), 1);
1293
Derek Lamberti8ddae332019-02-21 16:29:43 +00001294 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001295 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001296 auto serializerDescriptor = serializerLayer->descriptor();
1297
1298 armnn::DepthwiseConvolution2dDescriptor descriptor;
1299 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1300 descriptor.m_PadRight = serializerDescriptor->padRight();
1301 descriptor.m_PadTop = serializerDescriptor->padTop();
1302 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1303 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001304 descriptor.m_StrideY = serializerDescriptor->strideY();
1305 descriptor.m_DilationX = serializerDescriptor->dilationX();
1306 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001307 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1308 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1309
1310 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1311 armnn::ConstTensor biases;
1312
Matteo Martincighfc598e12019-05-14 10:36:13 +01001313 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001314 if (descriptor.m_BiasEnabled)
1315 {
1316 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001317 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001318 }
1319 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1320 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001321 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001322 layerName.c_str());
1323
1324 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1325 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1326
Derek Lamberti8ddae332019-02-21 16:29:43 +00001327 RegisterInputSlots(graph, layerIndex, layer);
1328 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001329}
1330
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001331void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1332{
1333 CHECK_LAYERS(graph, 0, layerIndex);
1334 auto inputs = GetInputs(graph, layerIndex);
1335 CHECK_LOCATION();
1336 CHECK_VALID_SIZE(inputs.size(), 2);
1337
1338 auto outputs = GetOutputs(graph, layerIndex);
1339 CHECK_VALID_SIZE(outputs.size(), 4);
1340
1341 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1342 auto layerName = GetLayerName(graph, layerIndex);
1343 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1344
1345 armnn::DetectionPostProcessDescriptor descriptor;
1346 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1347 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1348 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1349 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1350 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1351 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1352 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1353 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1354 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1355 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1356 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1357
1358 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1359
1360 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1361 anchors,
1362 layerName.c_str());
1363
1364 for (unsigned int i = 0; i < 4; i++)
1365 {
1366 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1367 }
1368
1369 RegisterInputSlots(graph, layerIndex, layer);
1370 RegisterOutputSlots(graph, layerIndex, layer);
1371}
1372
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001373void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1374{
1375 CHECK_LAYERS(graph, 0, layerIndex);
1376 auto inputs = GetInputs(graph, layerIndex);
1377 CHECK_LOCATION();
1378 CHECK_VALID_SIZE(inputs.size(), 2);
1379
1380 auto outputs = GetOutputs(graph, layerIndex);
1381 CHECK_VALID_SIZE(outputs.size(), 1);
1382
1383 auto layerName = GetLayerName(graph, layerIndex);
1384 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1385
1386 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1387 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1388
1389 RegisterInputSlots(graph, layerIndex, layer);
1390 RegisterOutputSlots(graph, layerIndex, layer);
1391}
1392
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001393void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1394{
1395 CHECK_LAYERS(graph, 0, layerIndex);
1396 auto inputs = GetInputs(graph, layerIndex);
1397 CHECK_LOCATION();
1398 CHECK_VALID_SIZE(inputs.size(), 2);
1399
1400 auto outputs = GetOutputs(graph, layerIndex);
1401 CHECK_VALID_SIZE(outputs.size(), 1);
1402
1403 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001404 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1405 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001406
1407 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1408 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1409
1410 RegisterInputSlots(graph, layerIndex, layer);
1411 RegisterOutputSlots(graph, layerIndex, layer);
1412}
1413
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001414void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1415{
1416 CHECK_LAYERS(graph, 0, layerIndex);
1417 auto inputs = GetInputs(graph, layerIndex);
1418 CHECK_LOCATION();
1419 CHECK_VALID_SIZE(inputs.size(), 2);
1420
1421 auto outputs = GetOutputs(graph, layerIndex);
1422 CHECK_VALID_SIZE(outputs.size(), 1);
1423
1424 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001425 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1426 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001427
1428 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1429 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1430
1431 RegisterInputSlots(graph, layerIndex, layer);
1432 RegisterOutputSlots(graph, layerIndex, layer);
1433}
1434
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001435void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1436{
1437 CHECK_LAYERS(graph, 0, layerIndex);
1438
1439 auto inputs = GetInputs(graph, layerIndex);
1440 CHECK_VALID_SIZE(inputs.size(), 1);
1441
1442 auto outputs = GetOutputs(graph, layerIndex);
1443 CHECK_VALID_SIZE(outputs.size(), 1);
1444
1445 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1446 auto fbDescriptor = fbLayer->descriptor();
1447
1448 armnn::InstanceNormalizationDescriptor descriptor;
1449 descriptor.m_Gamma = fbDescriptor->gamma();
1450 descriptor.m_Beta = fbDescriptor->beta();
1451 descriptor.m_Eps = fbDescriptor->eps();
1452 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1453
1454 const std::string layerName = GetLayerName(graph, layerIndex);
1455 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1456
1457 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1458 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1459
1460 RegisterInputSlots(graph, layerIndex, layer);
1461 RegisterOutputSlots(graph, layerIndex, layer);
1462}
1463
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001464void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1465{
1466 CHECK_LAYERS(graph, 0, layerIndex);
1467
1468 auto inputs = GetInputs(graph, layerIndex);
1469 CHECK_VALID_SIZE(inputs.size(), 1);
1470
1471 auto outputs = GetOutputs(graph, layerIndex);
1472 CHECK_VALID_SIZE(outputs.size(), 1);
1473 auto outputInfo = ToTensorInfo(outputs[0]);
1474
1475 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1476 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1477
1478 auto layerName = GetLayerName(graph, layerIndex);
1479 armnn::L2NormalizationDescriptor descriptor;
1480 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001481 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001482
1483 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1484 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1485
1486 RegisterInputSlots(graph, layerIndex, layer);
1487 RegisterOutputSlots(graph, layerIndex, layer);
1488}
1489
Sadik Armagan26257852019-10-14 13:00:47 +01001490void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1491{
1492 CHECK_LAYERS(graph, 0, layerIndex);
1493
1494 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1495 CHECK_VALID_SIZE(inputs.size(), 1);
1496
1497 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1498 CHECK_VALID_SIZE(outputs.size(), 1);
1499
1500 armnn::LogSoftmaxDescriptor descriptor;
1501 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1502 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1503 auto layerName = GetLayerName(graph, layerIndex);
1504
1505 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1506
1507 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1508 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1509
1510 RegisterInputSlots(graph, layerIndex, layer);
1511 RegisterOutputSlots(graph, layerIndex, layer);
1512}
1513
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001514void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1515{
1516 CHECK_LAYERS(graph, 0, layerIndex);
1517 auto inputs = GetInputs(graph, layerIndex);
1518 CHECK_LOCATION();
1519 CHECK_VALID_SIZE(inputs.size(), 2);
1520
1521 auto outputs = GetOutputs(graph, layerIndex);
1522 CHECK_VALID_SIZE(outputs.size(), 1);
1523
1524 auto layerName = GetLayerName(graph, layerIndex);
1525 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1526
1527 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1528 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1529
1530 RegisterInputSlots(graph, layerIndex, layer);
1531 RegisterOutputSlots(graph, layerIndex, layer);
1532}
1533
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001534void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1535{
1536 CHECK_LAYERS(graph, 0, layerIndex);
1537 auto inputs = GetInputs(graph, layerIndex);
1538 CHECK_LOCATION();
1539 CHECK_VALID_SIZE(inputs.size(), 2);
1540
1541 auto outputs = GetOutputs(graph, layerIndex);
1542 CHECK_VALID_SIZE(outputs.size(), 1);
1543
1544 auto layerName = GetLayerName(graph, layerIndex);
1545 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1546
1547 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1548 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1549
1550 RegisterInputSlots(graph, layerIndex, layer);
1551 RegisterOutputSlots(graph, layerIndex, layer);
1552}
1553
Jim Flynne242f2d2019-05-22 14:24:13 +01001554const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1555 unsigned int layerIndex)
1556{
1557 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1558
1559 switch (layerType)
1560 {
1561 case Layer::Layer_ConcatLayer:
1562 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1563 case Layer::Layer_MergerLayer:
1564 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1565 default:
1566 throw armnn::Exception("unknown layer type, should be concat or merger");
1567 }
1568}
1569
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001570void Deserializer::ParseComparison(GraphPtr graph, unsigned int layerIndex)
1571{
1572 CHECK_LAYERS(graph, 0, layerIndex);
1573 CHECK_LOCATION();
1574
1575 auto inputs = GetInputs(graph, layerIndex);
1576 CHECK_VALID_SIZE(inputs.size(), 2);
1577
1578 auto outputs = GetOutputs(graph, layerIndex);
1579 CHECK_VALID_SIZE(outputs.size(), 1);
1580
1581 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1582 auto fbDescriptor = fbLayer->descriptor();
1583
1584 armnn::ComparisonDescriptor descriptor;
1585 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1586
1587 const std::string& layerName = GetLayerName(graph, layerIndex);
1588 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1589
1590 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1591 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1592
1593 RegisterInputSlots(graph, layerIndex, layer);
1594 RegisterOutputSlots(graph, layerIndex, layer);
1595}
1596
josh minor4a3c6102020-01-06 16:40:46 -06001597void Deserializer::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
1598{
1599 CHECK_LAYERS(graph, 0, layerIndex);
1600 CHECK_LOCATION();
1601
1602 auto inputs = GetInputs(graph, layerIndex);
1603 CHECK_VALID_SIZE(inputs.size(), 1);
1604
1605 auto outputs = GetOutputs(graph, layerIndex);
1606 CHECK_VALID_SIZE(outputs.size(), 1);
1607
1608 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1609 auto fbDescriptor = fbLayer->descriptor();
1610
1611 armnn::ElementwiseUnaryDescriptor descriptor;
1612 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1613
1614 const std::string& layerName = GetLayerName(graph, layerIndex);
1615 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1616
1617 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1618 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1619
1620 RegisterInputSlots(graph, layerIndex, layer);
1621 RegisterOutputSlots(graph, layerIndex, layer);
1622}
1623
Jim Flynn906f9462019-05-10 13:55:21 +01001624void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001625{
1626 CHECK_LAYERS(graph, 0, layerIndex);
1627 CHECK_LOCATION();
1628
1629 auto outputs = GetOutputs(graph, layerIndex);
1630 CHECK_VALID_SIZE(outputs.size(), 1);
1631
Jim Flynnac25a1b2019-02-28 10:40:49 +00001632 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001633 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1634 unsigned int numViews = originsDescriptor->numViews();
1635 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001636
1637 // can now check the number of inputs == number of views
1638 auto inputs = GetInputs(graph, layerIndex);
1639 CHECK_VALID_SIZE(inputs.size(), numViews);
1640
1641 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001642 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001643 for (unsigned int v = 0; v < numViews; ++v)
1644 {
1645 auto originPtr = originsPtr->Get(v);
1646 for (unsigned int d = 0; d < numDimensions; ++d)
1647 {
1648 uint32_t value = originPtr->data()->Get(d);
1649 descriptor.SetViewOriginCoord(v, d, value);
1650 }
1651 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001652 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001653
Jim Flynn906f9462019-05-10 13:55:21 +01001654 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001655 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1656 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1657
1658 RegisterInputSlots(graph, layerIndex, layer);
1659 RegisterOutputSlots(graph, layerIndex, layer);
1660}
1661
Derek Lamberti8ddae332019-02-21 16:29:43 +00001662void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001663{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001664 CHECK_LAYERS(graph, 0, layerIndex);
1665 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001666 CHECK_LOCATION();
1667 CHECK_VALID_SIZE(inputs.size(), 2);
1668
Derek Lamberti8ddae332019-02-21 16:29:43 +00001669 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001670 CHECK_VALID_SIZE(outputs.size(), 1);
1671
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001672 auto layerName = GetLayerName(graph, layerIndex);
1673 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001674
1675 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1676 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1677
Derek Lamberti8ddae332019-02-21 16:29:43 +00001678 RegisterInputSlots(graph, layerIndex, layer);
1679 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001680}
1681
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001682void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1683{
1684 CHECK_LAYERS(graph, 0, layerIndex);
1685 CHECK_LOCATION();
1686
1687 auto inputs = GetInputs(graph, layerIndex);
1688 CHECK_VALID_SIZE(inputs.size(), 1);
1689
1690 auto outputs = GetOutputs(graph, layerIndex);
1691 CHECK_VALID_SIZE(outputs.size(), 1);
1692
1693 auto layerName = GetLayerName(graph, layerIndex);
1694
1695 armnn::IConnectableLayer* layer;
1696
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001697 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001698
1699 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1700 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1701
1702 RegisterInputSlots(graph, layerIndex, layer);
1703 RegisterOutputSlots(graph, layerIndex, layer);
1704}
1705
Derek Lamberti8ddae332019-02-21 16:29:43 +00001706void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001707{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001708 CHECK_LAYERS(graph, 0, layerIndex);
1709 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001710 CHECK_LOCATION();
1711 CHECK_VALID_SIZE(inputs.size(), 1);
1712
Derek Lamberti8ddae332019-02-21 16:29:43 +00001713 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001714 CHECK_VALID_SIZE(outputs.size(), 1);
1715
Derek Lamberti8ddae332019-02-21 16:29:43 +00001716 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001717 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001718 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1719
1720 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1721 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1722 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1723
1724 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1725
1726 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001727 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001728 if (flatBufferDescriptor->biasEnabled())
1729 {
1730 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001731 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001732 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001733 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1734 weightsTensor,
1735 optionalBiases,
1736 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001737
1738 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1739 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1740
Derek Lamberti8ddae332019-02-21 16:29:43 +00001741 RegisterInputSlots(graph, layerIndex, layer);
1742 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001743}
1744
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001745void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1746{
1747 CHECK_LAYERS(graph, 0, layerIndex);
1748
1749 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1750 CHECK_VALID_SIZE(inputs.size(), 1);
1751
1752 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1753 CHECK_VALID_SIZE(outputs.size(), 1);
1754
1755 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1756 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001757 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001758
1759 if (flatBufferPadList->Length() % 2 != 0)
1760 {
1761 throw ParseException(boost::str(
1762 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
1763 }
1764
1765 std::vector<std::pair<unsigned int, unsigned int>> padList;
1766 padList.reserve(flatBufferPadList->Length() / 2);
1767 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1768 {
1769 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1770 }
1771
David Monahan34757812019-06-19 11:47:21 +01001772 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001773
1774 auto layerName = GetLayerName(graph, layerIndex);
1775 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1776
1777 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1778 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1779
1780 RegisterInputSlots(graph, layerIndex, layer);
1781 RegisterOutputSlots(graph, layerIndex, layer);
1782}
1783
Derek Lamberti8ddae332019-02-21 16:29:43 +00001784void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001785{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001786 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001787
1788 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001789 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001790
Derek Lamberti8ddae332019-02-21 16:29:43 +00001791 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001792 CHECK_VALID_SIZE(inputs.size(), 1);
1793
Derek Lamberti8ddae332019-02-21 16:29:43 +00001794 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001795 CHECK_VALID_SIZE(outputs.size(), 1);
1796 auto outputInfo = ToTensorInfo(outputs[0]);
1797
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001798 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001799 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1800
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001801 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001802 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1803
Derek Lamberti8ddae332019-02-21 16:29:43 +00001804 RegisterInputSlots(graph, layerIndex, layer);
1805 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001806}
1807
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001808armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001809 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001810{
Jan Eilers8eb25602020-03-09 12:13:48 +00001811 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001812 armnn::Pooling2dDescriptor desc;
1813
1814 switch (pooling2dDesc->poolType())
1815 {
1816 case PoolingAlgorithm_Average:
1817 {
1818 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001819 break;
1820 }
1821 case PoolingAlgorithm_Max:
1822 {
1823 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001824 break;
1825 }
1826 default:
1827 {
1828 BOOST_ASSERT_MSG(false, "Unsupported pooling algorithm");
1829 }
1830 }
1831
1832 switch (pooling2dDesc->outputShapeRounding())
1833 {
1834 case OutputShapeRounding_Floor:
1835 {
1836 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1837 break;
1838 }
1839 case OutputShapeRounding_Ceiling:
1840 {
1841 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1842 break;
1843 }
1844 default:
1845 {
1846 BOOST_ASSERT_MSG(false, "Unsupported output shape rounding");
1847 }
1848 }
1849
1850 switch (pooling2dDesc->paddingMethod())
1851 {
1852 case PaddingMethod_Exclude:
1853 {
1854 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1855 break;
1856 }
1857 case PaddingMethod_IgnoreValue:
1858 {
1859 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1860 break;
1861 }
1862 default:
1863 {
1864 BOOST_ASSERT_MSG(false, "Unsupported padding method");
1865 }
1866 }
1867
1868 switch (pooling2dDesc->dataLayout())
1869 {
1870 case DataLayout_NCHW:
1871 {
1872 desc.m_DataLayout = armnn::DataLayout::NCHW;
1873 break;
1874 }
1875 case DataLayout_NHWC:
1876 {
1877 desc.m_DataLayout = armnn::DataLayout::NHWC;
1878 break;
1879 }
1880 default:
1881 {
1882 BOOST_ASSERT_MSG(false, "Unsupported data layout");
1883 }
1884 }
1885
1886 desc.m_PadRight = pooling2dDesc->padRight();
1887 desc.m_PadLeft = pooling2dDesc->padLeft();
1888 desc.m_PadBottom = pooling2dDesc->padBottom();
1889 desc.m_PadTop = pooling2dDesc->padTop();
1890 desc.m_StrideX = pooling2dDesc->strideX();
1891 desc.m_StrideY = pooling2dDesc->strideY();
1892 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1893 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1894
1895 return desc;
1896}
1897
Derek Lamberti8ddae332019-02-21 16:29:43 +00001898void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001899{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001900 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001901
Derek Lamberti8ddae332019-02-21 16:29:43 +00001902 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001903 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001904 CHECK_VALID_SIZE(inputs.size(), 1);
1905
Derek Lamberti8ddae332019-02-21 16:29:43 +00001906 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001907 CHECK_VALID_SIZE(outputs.size(), 1);
1908 auto outputInfo = ToTensorInfo(outputs[0]);
1909
1910 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001911 auto layerName = GetLayerName(graph, layerIndex);
1912 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001913 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1914
Derek Lamberti8ddae332019-02-21 16:29:43 +00001915 RegisterInputSlots(graph, layerIndex, layer);
1916 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001917}
1918
Derek Lamberti87acb272019-03-27 16:51:31 +00001919void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1920{
1921 CHECK_LAYERS(graph, 0, layerIndex);
1922
1923 auto inputs = GetInputs(graph, layerIndex);
1924 CHECK_VALID_SIZE(inputs.size(), 1);
1925
1926 auto outputs = GetOutputs(graph, layerIndex);
1927 CHECK_VALID_SIZE(outputs.size(), 1);
1928 auto outputInfo = ToTensorInfo(outputs[0]);
1929
1930 auto layerName = GetLayerName(graph, layerIndex);
1931 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
1932 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1933
1934 RegisterInputSlots(graph, layerIndex, layer);
1935 RegisterOutputSlots(graph, layerIndex, layer);
1936}
1937
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001938armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00001939 const std::vector<uint32_t>& targetDimsIn)
1940{
1941 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
1942 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
1943
1944 if (stretchDim != targetDimsIn.end())
1945 {
1946 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
1947 {
1948 throw ParseException(boost::str(
1949 boost::format("At most one component of shape can be -1 %1%") % CHECK_LOCATION().AsString()));
1950 }
1951
1952 auto targetNumElements =
1953 boost::numeric_cast<unsigned int>(
1954 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
1955
1956 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
1957 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
1958 }
1959
1960 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
1961
1962 armnn::TensorInfo reshapeInfo = inputTensorInfo;
1963 reshapeInfo.SetShape(outputShape);
1964
1965 return reshapeInfo;
1966}
1967
Derek Lamberti8ddae332019-02-21 16:29:43 +00001968void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00001969{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001970 CHECK_LAYERS(graph, 0, layerIndex);
1971 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001972
Derek Lamberti8ddae332019-02-21 16:29:43 +00001973 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001974 CHECK_VALID_SIZE(outputs.size(), 1);
1975
1976 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
1977 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
1978
Derek Lamberti8ddae332019-02-21 16:29:43 +00001979 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00001980 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
1981
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001982 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00001983 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
1984
1985 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
1986 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
1987
1988 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
1989 {
1990 std::stringstream ss;
1991 ss << "New shape defined in reshape parameters "
1992 << reshapeOutputTensorShape
1993 << " does not equal output shape "
1994 << actualOutputTensorInfo.GetShape()
1995 << ": "
1996 << CHECK_LOCATION().AsString();
1997 throw ParseException(ss.str());
1998 }
1999
2000 armnn::ReshapeDescriptor reshapeDesc;
2001 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2002
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002003 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002004 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2005 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2006
Derek Lamberti8ddae332019-02-21 16:29:43 +00002007 RegisterInputSlots(graph, layerIndex, layer);
2008 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002009}
2010
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002011void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
2012{
2013 CHECK_LAYERS(graph, 0, layerIndex);
2014
2015 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2016 CHECK_VALID_SIZE(inputs.size(), 1);
2017
2018 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2019 CHECK_VALID_SIZE(outputs.size(), 1);
2020
2021 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2022
2023 armnn::ResizeDescriptor descriptor;
2024 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2025 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2026 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2027 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2028
2029 auto layerName = GetLayerName(graph, layerIndex);
2030 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2031
2032 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2033 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2034
2035 RegisterInputSlots(graph, layerIndex, layer);
2036 RegisterOutputSlots(graph, layerIndex, layer);
2037}
2038
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002039void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
2040{
2041 CHECK_LAYERS(graph, 0, layerIndex);
2042
2043 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2044 CHECK_VALID_SIZE(inputs.size(), 1);
2045
2046 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2047 CHECK_VALID_SIZE(outputs.size(), 1);
2048
2049 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2050
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002051 armnn::ResizeDescriptor descriptor;
2052 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002053 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002054 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2055 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002056
2057 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002058 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002059
2060 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2061 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2062
2063 RegisterInputSlots(graph, layerIndex, layer);
2064 RegisterOutputSlots(graph, layerIndex, layer);
2065}
2066
Derek Lamberti8ddae332019-02-21 16:29:43 +00002067void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002068{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002069 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002070
Derek Lamberti8ddae332019-02-21 16:29:43 +00002071 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002072 CHECK_VALID_SIZE(inputs.size(), 1);
2073
Derek Lamberti8ddae332019-02-21 16:29:43 +00002074 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002075 CHECK_VALID_SIZE(outputs.size(), 1);
2076
2077 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002078 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002079 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002080
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002081 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2082
2083 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2084 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2085
Derek Lamberti8ddae332019-02-21 16:29:43 +00002086 RegisterInputSlots(graph, layerIndex, layer);
2087 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002088}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002089
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002090void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
2091{
2092 CHECK_LAYERS(graph, 0, layerIndex);
2093
2094 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2095 CHECK_VALID_SIZE(inputs.size(), 1);
2096
2097 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2098 CHECK_VALID_SIZE(outputs.size(), 1);
2099
2100 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2101 auto flatBufferPadList = flatBufferDescriptor->padList();
2102 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2103
2104 if (flatBufferPadList->Length() % 2 != 0)
2105 {
2106 throw ParseException(boost::str(
2107 boost::format("The size of the pad list must be divisible by 2 %1%") % CHECK_LOCATION().AsString()));
2108 }
2109
2110 std::vector<std::pair<unsigned int, unsigned int>> padList;
2111 padList.reserve(flatBufferPadList->Length() / 2);
2112 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2113 {
2114 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2115 }
2116
2117 armnn::SpaceToBatchNdDescriptor descriptor;
2118 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2119 descriptor.m_BlockShape =
2120 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2121 descriptor.m_PadList = padList;
2122
2123 auto layerName = GetLayerName(graph, layerIndex);
2124 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2125
2126 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2127 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2128
2129 RegisterInputSlots(graph, layerIndex, layer);
2130 RegisterOutputSlots(graph, layerIndex, layer);
2131}
2132
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002133void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
2134{
2135 CHECK_LAYERS(graph, 0, layerIndex);
2136
2137 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2138 CHECK_VALID_SIZE(inputs.size(), 1);
2139
2140 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2141 CHECK_VALID_SIZE(outputs.size(), 1);
2142
2143 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2144
2145 armnn::SpaceToDepthDescriptor descriptor;
2146 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2147 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2148
2149 auto layerName = GetLayerName(graph, layerIndex);
2150 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2151
2152 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2153 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2154
2155 RegisterInputSlots(graph, layerIndex, layer);
2156 RegisterOutputSlots(graph, layerIndex, layer);
2157}
2158
Nina Drozd57728782019-02-27 10:53:27 +00002159armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
2160 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
2161 unsigned int layerIndex)
2162{
Jan Eilers8eb25602020-03-09 12:13:48 +00002163 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002164 armnn::NormalizationDescriptor desc;
2165
2166 switch (normalizationDescriptor->normChannelType())
2167 {
2168 case NormalizationAlgorithmChannel_Across:
2169 {
2170 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2171 break;
2172 }
2173 case NormalizationAlgorithmChannel_Within:
2174 {
2175 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2176 break;
2177 }
2178 default:
2179 {
2180 BOOST_ASSERT_MSG(false, "Unsupported normalization channel type");
2181 }
2182 }
2183
2184 switch (normalizationDescriptor->normMethodType())
2185 {
2186 case NormalizationAlgorithmMethod_LocalBrightness:
2187 {
2188 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2189 break;
2190 }
2191 case NormalizationAlgorithmMethod_LocalContrast:
2192 {
2193 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2194 break;
2195 }
2196 default:
2197 {
2198 BOOST_ASSERT_MSG(false, "Unsupported normalization method type");
2199 }
2200 }
2201
2202 switch (normalizationDescriptor->dataLayout())
2203 {
2204 case DataLayout_NCHW:
2205 {
2206 desc.m_DataLayout = armnn::DataLayout::NCHW;
2207 break;
2208 }
2209 case DataLayout_NHWC:
2210 {
2211 desc.m_DataLayout = armnn::DataLayout::NHWC;
2212 break;
2213 }
2214 default:
2215 {
2216 BOOST_ASSERT_MSG(false, "Unsupported data layout");
2217 }
2218 }
2219
2220 desc.m_Alpha = normalizationDescriptor->alpha();
2221 desc.m_Beta = normalizationDescriptor->beta();
2222 desc.m_K = normalizationDescriptor->k();
2223 desc.m_NormSize = normalizationDescriptor->normSize();
2224
2225 return desc;
2226}
2227
2228void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2229{
2230 CHECK_LAYERS(graph, 0, layerIndex);
2231
2232 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2233
2234 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2235 CHECK_VALID_SIZE(inputs.size(), 1);
2236
2237 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2238 CHECK_VALID_SIZE(outputs.size(), 1);
2239
2240 auto outputInfo = ToTensorInfo(outputs[0]);
2241
2242 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2243 auto layerName = GetLayerName(graph, layerIndex);
2244
2245 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2246 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2247
2248 RegisterInputSlots(graph, layerIndex, layer);
2249 RegisterOutputSlots(graph, layerIndex, layer);
2250}
2251
Sadik Armagan8b42a382019-03-01 14:24:49 +00002252void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2253{
2254 CHECK_LAYERS(graph, 0, layerIndex);
2255 auto inputs = GetInputs(graph, layerIndex);
2256 CHECK_LOCATION();
2257 CHECK_VALID_SIZE(inputs.size(), 1);
2258
2259 auto outputs = GetOutputs(graph, layerIndex);
2260 CHECK_VALID_SIZE(outputs.size(), 1);
2261
2262 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002263
josh minor4a3c6102020-01-06 16:40:46 -06002264 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2265 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002266 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2267 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2268
2269 RegisterInputSlots(graph, layerIndex, layer);
2270 RegisterOutputSlots(graph, layerIndex, layer);
2271}
2272
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002273void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2274{
2275 CHECK_LAYERS(graph, 0, layerIndex);
2276
2277 auto inputs = GetInputs(graph, layerIndex);
2278 CHECK_VALID_SIZE(inputs.size(), 1);
2279
2280 auto outputs = GetOutputs(graph, layerIndex);
2281 CHECK_VALID_SIZE(outputs.size(), 1);
2282
2283 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2284
2285 auto fbBegin = fbDescriptor->begin();
2286 auto fbSize = fbDescriptor->size();
2287
2288 if (fbBegin->Length() != fbSize->Length())
2289 {
2290 throw ParseException(boost::str(
2291 boost::format("Begin and size descriptors must have the same length %1%") % CHECK_LOCATION().AsString()));
2292 }
2293
2294 armnn::SliceDescriptor descriptor;
2295 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2296 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2297
2298 auto layerName = GetLayerName(graph, layerIndex);
2299 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2300
2301 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2302 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2303
2304 RegisterInputSlots(graph, layerIndex, layer);
2305 RegisterOutputSlots(graph, layerIndex, layer);
2306}
2307
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002308void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2309{
2310 CHECK_LAYERS(graph, 0, layerIndex);
2311
2312 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2313 CHECK_VALID_SIZE(inputs.size(), 1);
2314
2315 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2316 CHECK_VALID_SIZE(outputs.size(), 1);
2317
2318 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2319
2320 auto flatBufferBegin = flatBufferDescriptor->begin();
2321 auto flatBufferEnd = flatBufferDescriptor->end();
2322 auto flatBufferStride = flatBufferDescriptor->stride();
2323
2324 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2325 flatBufferBegin->Length() == flatBufferStride->Length()))
2326 {
2327 throw ParseException(boost::str(
2328 boost::format("The size of the begin, end, and stride must be equal %1%") % CHECK_LOCATION().AsString()));
2329 }
2330
2331 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2332 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2333 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2334
2335 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2336 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2337 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2338 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2339 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2340 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2341 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2342
2343 auto layerName = GetLayerName(graph, layerIndex);
2344 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2345
2346 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2347 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2348
2349 RegisterInputSlots(graph, layerIndex, layer);
2350 RegisterOutputSlots(graph, layerIndex, layer);
2351}
2352
Conor Kennedyda1f9752019-03-01 14:37:12 +00002353void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2354{
2355 CHECK_LAYERS(graph, 0, layerIndex);
2356 auto inputs = GetInputs(graph, layerIndex);
2357 CHECK_LOCATION();
2358 CHECK_VALID_SIZE(inputs.size(), 2);
2359
2360 auto outputs = GetOutputs(graph, layerIndex);
2361 CHECK_VALID_SIZE(outputs.size(), 1);
2362
2363 auto layerName = GetLayerName(graph, layerIndex);
2364 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2365
2366 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2367 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2368
2369 RegisterInputSlots(graph, layerIndex, layer);
2370 RegisterOutputSlots(graph, layerIndex, layer);
2371}
2372
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002373void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2374{
2375 CHECK_LAYERS(graph, 0, layerIndex);
2376
2377 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2378 CHECK_VALID_SIZE(inputs.size(), 2);
2379
2380 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2381 CHECK_VALID_SIZE(outputs.size(), 1);
2382
2383 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002384 IConnectableLayer* layer = m_Network->AddGatherLayer(layerName.c_str());
2385
2386 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002387 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2388
2389 RegisterInputSlots(graph, layerIndex, layer);
2390 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002391}
2392
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002393void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2394{
2395 CHECK_LAYERS(graph, 0, layerIndex);
2396
2397 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2398 CHECK_VALID_SIZE(inputs.size(), 1);
2399
2400 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2401 CHECK_VALID_SIZE(outputs.size(), 1);
2402
2403 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2404 auto flatBufferAxis = flatBufferDescriptor->axis();
2405 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2406
2407 armnn::MeanDescriptor descriptor;
2408 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2409 descriptor.m_KeepDims = flatBufferKeepDims;
2410
2411 auto layerName = GetLayerName(graph, layerIndex);
2412 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2413
2414 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2415 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2416
2417 RegisterInputSlots(graph, layerIndex, layer);
2418 RegisterOutputSlots(graph, layerIndex, layer);
2419}
2420
Jim Flynn18ce3382019-03-08 11:08:30 +00002421void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2422{
2423 CHECK_LAYERS(graph, 0, layerIndex);
2424
2425 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2426 CHECK_VALID_SIZE(inputs.size(), 1);
2427
2428 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2429
2430 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2431 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2432 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2433 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2434 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2435 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2436
2437 // Check numViews and numDimensions corresponds to the ones already serialized ...
2438 // numViews == flatBufferViewSizes.size();
2439 // foreach: numDimensions == flatBufferViewSizes[x].size();
2440
2441 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2442 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2443 {
2444 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2445 {
2446 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2447 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2448 }
2449 }
2450
2451 auto layerName = GetLayerName(graph, layerIndex);
2452 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2453
2454 // I could have as many outputs as views ...
2455 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2456 {
2457 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2458 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2459 }
2460
2461 RegisterInputSlots(graph, layerIndex, layer);
2462 RegisterOutputSlots(graph, layerIndex, layer);
2463}
2464
Jim Flynn11af3752019-03-19 17:22:29 +00002465armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2466{
2467 armnn::LstmDescriptor desc;
2468
2469 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2470 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2471 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2472 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2473 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2474 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002475 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002476
2477 return desc;
2478}
2479
2480void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2481{
2482 CHECK_LAYERS(graph, 0, layerIndex);
2483
2484 auto inputs = GetInputs(graph, layerIndex);
2485 CHECK_VALID_SIZE(inputs.size(), 3);
2486
2487 auto outputs = GetOutputs(graph, layerIndex);
2488 CHECK_VALID_SIZE(outputs.size(), 4);
2489
2490 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2491 auto layerName = GetLayerName(graph, layerIndex);
2492 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2493 auto flatBufferInputParams = flatBufferLayer->inputParams();
2494
2495 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2496
2497 armnn::LstmInputParams lstmInputParams;
2498
2499 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2500 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2501 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2502 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2503 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2504 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2505 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2506 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2507 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2508
2509 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2510 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2511 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2512 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2513 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2514 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2515 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2516 lstmInputParams.m_CellBias = &cellBias;
2517 lstmInputParams.m_OutputGateBias = &outputGateBias;
2518
2519 armnn::ConstTensor inputToInputWeights;
2520 armnn::ConstTensor recurrentToInputWeights;
2521 armnn::ConstTensor cellToInputWeights;
2522 armnn::ConstTensor inputGateBias;
2523 if (!lstmDescriptor.m_CifgEnabled)
2524 {
2525 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2526 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2527 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2528 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2529
2530 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2531 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2532 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2533 lstmInputParams.m_InputGateBias = &inputGateBias;
2534 }
2535
2536 armnn::ConstTensor projectionWeights;
2537 armnn::ConstTensor projectionBias;
2538 if (lstmDescriptor.m_ProjectionEnabled)
2539 {
2540 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2541 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2542
2543 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2544 lstmInputParams.m_ProjectionBias = &projectionBias;
2545 }
2546
2547 armnn::ConstTensor cellToForgetWeights;
2548 armnn::ConstTensor cellToOutputWeights;
2549 if (lstmDescriptor.m_PeepholeEnabled)
2550 {
2551 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2552 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2553
2554 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2555 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2556 }
2557
Jan Eilersf8c62972019-07-17 11:07:49 +01002558 armnn::ConstTensor inputLayerNormWeights;
2559 armnn::ConstTensor forgetLayerNormWeights;
2560 armnn::ConstTensor cellLayerNormWeights;
2561 armnn::ConstTensor outputLayerNormWeights;
2562 if (lstmDescriptor.m_LayerNormEnabled)
2563 {
2564 if (!lstmDescriptor.m_CifgEnabled)
2565 {
2566 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2567 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2568 }
2569 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2570 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2571 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2572
2573 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2574 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2575 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2576 }
2577
Jim Flynn11af3752019-03-19 17:22:29 +00002578 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2579
2580 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2581 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2582
2583 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2584 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2585
2586 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2587 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2588
2589 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2590 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2591
2592 RegisterInputSlots(graph, layerIndex, layer);
2593 RegisterOutputSlots(graph, layerIndex, layer);
2594}
2595
Jan Eilers5b01a892019-07-23 09:47:43 +01002596void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2597{
2598 CHECK_LAYERS(graph, 0, layerIndex);
2599
2600 auto inputs = GetInputs(graph, layerIndex);
2601 CHECK_VALID_SIZE(inputs.size(), 3);
2602
2603 auto outputs = GetOutputs(graph, layerIndex);
2604 CHECK_VALID_SIZE(outputs.size(), 2);
2605
2606 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2607 auto layerName = GetLayerName(graph, layerIndex);
2608 auto flatBufferInputParams = flatBufferLayer->inputParams();
2609
2610 armnn::QuantizedLstmInputParams lstmInputParams;
2611
2612 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2613 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2614 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2615 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2616 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2617 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2618 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2619 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2620 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2621 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2622 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2623 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2624
2625 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2626 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2627 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2628 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2629 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2630 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2631 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2632 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2633 lstmInputParams.m_InputGateBias = &inputGateBias;
2634 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2635 lstmInputParams.m_CellBias = &cellBias;
2636 lstmInputParams.m_OutputGateBias = &outputGateBias;
2637
2638 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2639
2640 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2641 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2642
2643 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2644 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2645
2646 RegisterInputSlots(graph, layerIndex, layer);
2647 RegisterOutputSlots(graph, layerIndex, layer);
2648}
2649
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002650void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2651{
2652 CHECK_LAYERS(graph, 0, layerIndex);
2653
2654 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2655 CHECK_VALID_SIZE(inputs.size(), 1);
2656
2657 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2658 CHECK_VALID_SIZE(outputs.size(), 1);
2659
2660 const std::string layerName = GetLayerName(graph, layerIndex);
2661 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2662
2663 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2664 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2665
2666 RegisterInputSlots(graph, layerIndex, layer);
2667 RegisterOutputSlots(graph, layerIndex, layer);
2668}
2669
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002670void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2671{
2672 CHECK_LAYERS(graph, 0, layerIndex);
2673
2674 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2675 CHECK_VALID_SIZE(inputs.size(), 2);
2676
2677 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2678 CHECK_VALID_SIZE(outputs.size(), 1);
2679
2680 const std::string layerName = GetLayerName(graph, layerIndex);
2681 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2682
2683 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2684 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2685
2686 RegisterInputSlots(graph, layerIndex, layer);
2687 RegisterOutputSlots(graph, layerIndex, layer);
2688}
2689
Sadik Armaganeff363d2019-04-05 15:25:46 +01002690void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2691{
2692 CHECK_LAYERS(graph, 0, layerIndex);
2693 auto inputs = GetInputs(graph, layerIndex);
2694 CHECK_LOCATION();
2695 CHECK_VALID_SIZE(inputs.size(), 2);
2696
2697 auto outputs = GetOutputs(graph, layerIndex);
2698 CHECK_VALID_SIZE(outputs.size(), 2);
2699
2700 auto layerName = GetLayerName(graph, layerIndex);
2701 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2702
2703 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2704 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2705
2706 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2707 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2708
2709 RegisterInputSlots(graph, layerIndex, layer);
2710 RegisterOutputSlots(graph, layerIndex, layer);
2711}
2712
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002713void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2714{
2715 CHECK_LAYERS(graph, 0, layerIndex);
2716 auto inputs = GetInputs(graph, layerIndex);
2717 CHECK_LOCATION();
2718 CHECK_VALID_SIZE(inputs.size(), 2);
2719
2720 auto outputs = GetOutputs(graph, layerIndex);
2721 CHECK_VALID_SIZE(outputs.size(), 1);
2722
2723 auto layerName = GetLayerName(graph, layerIndex);
2724 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2725
2726 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2727 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2728
2729 RegisterInputSlots(graph, layerIndex, layer);
2730 RegisterOutputSlots(graph, layerIndex, layer);
2731}
2732
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002733void Deserializer::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
2734{
2735 CHECK_LAYERS(graph, 0, layerIndex);
2736
2737 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
2738
2739 auto inputs = GetInputs(graph, layerIndex);
2740 CHECK_VALID_SIZE(inputs.size(), 1);
2741
2742 auto outputs = GetOutputs(graph, layerIndex);
2743 CHECK_VALID_SIZE(outputs.size(), 1);
2744 auto outputInfo = ToTensorInfo(outputs[0]);
2745
2746 auto layerName = GetLayerName(graph, layerIndex);
2747 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2748
2749 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
2750 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2751
2752 RegisterInputSlots(graph, layerIndex, layer);
2753 RegisterOutputSlots(graph, layerIndex, layer);
2754}
2755
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01002756void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
2757{
2758 CHECK_LAYERS(graph, 0, layerIndex);
2759
2760 auto inputs = GetInputs(graph, layerIndex);
2761 CHECK_VALID_SIZE(inputs.size(), 1);
2762
2763 auto outputs = GetOutputs(graph, layerIndex);
2764 CHECK_VALID_SIZE(outputs.size(), 1);
2765
2766 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
2767 auto layerName = GetLayerName(graph, layerIndex);
2768 auto serializerDescriptor = serializerLayer->descriptor();
2769
2770 armnn::TransposeConvolution2dDescriptor descriptor;
2771 descriptor.m_PadLeft = serializerDescriptor->padLeft();
2772 descriptor.m_PadRight = serializerDescriptor->padRight();
2773 descriptor.m_PadTop = serializerDescriptor->padTop();
2774 descriptor.m_PadBottom = serializerDescriptor->padBottom();
2775 descriptor.m_StrideX = serializerDescriptor->strideX();
2776 descriptor.m_StrideY = serializerDescriptor->strideY();;
2777 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
2778 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
2779
2780 // weights & biases
2781 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
2782 armnn::Optional<armnn::ConstTensor> optionalBiases;
2783 if (descriptor.m_BiasEnabled)
2784 {
2785 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
2786 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
2787 }
2788
2789 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
2790 weights,
2791 optionalBiases,
2792 layerName.c_str());
2793
2794 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2795 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2796
2797 RegisterInputSlots(graph, layerIndex, layer);
2798 RegisterOutputSlots(graph, layerIndex, layer);
2799}
2800
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002801void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
2802{
2803 CHECK_LAYERS(graph, 0, layerIndex);
2804 auto inputs = GetInputs(graph, layerIndex);
2805
2806 auto outputs = GetOutputs(graph, layerIndex);
2807 CHECK_VALID_SIZE(outputs.size(), 1);
2808
2809 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
2810 unsigned int axis = flatBufferDescriptor->axis();
2811 unsigned int numInputs = flatBufferDescriptor->numInputs();
2812 CHECK_VALID_SIZE(inputs.size(), numInputs);
2813
2814 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
2815 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
2816 flatBufferInputShape->begin() + flatBufferInputShape->size());
2817
2818 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
2819 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
2820
2821 for (unsigned int i=0; i<inputs.size(); ++i)
2822 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01002823 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01002824 if (descriptor.m_InputShape != inputShape)
2825 {
2826 std::stringstream ss;
2827 ss << "Shape of input "
2828 << i
2829 << " "
2830 << inputShape
2831 << " does not equal defined input shape "
2832 << descriptor.m_InputShape
2833 << ": "
2834 << CHECK_LOCATION().AsString();
2835 throw ParseException(ss.str());
2836 }
2837 }
2838
2839 auto layerName = GetLayerName(graph, layerIndex);
2840 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
2841
2842 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2843 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2844
2845 RegisterInputSlots(graph, layerIndex, layer);
2846 RegisterOutputSlots(graph, layerIndex, layer);
2847}
2848
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01002849void Deserializer::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
2850{
2851 CHECK_LAYERS(graph, 0, layerIndex);
2852
2853 auto inputs = GetInputs(graph, layerIndex);
2854 auto outputs = GetOutputs(graph, layerIndex);
2855
2856 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
2857 auto fbDescriptor = fbLayer->descriptor();
2858
2859 armnn::StandInDescriptor descriptor;
2860 descriptor.m_NumInputs = fbDescriptor->numInputs();
2861 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
2862
2863 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
2864 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
2865
2866 const std::string layerName = GetLayerName(graph, layerIndex);
2867 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
2868
2869 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
2870 {
2871 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
2872 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
2873 }
2874
2875 RegisterInputSlots(graph, layerIndex, layer);
2876 RegisterOutputSlots(graph, layerIndex, layer);
2877}
2878
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002879} // namespace armnnDeserializer