blob: 074429b73ce2eb5727dab732c03ee77f23a13833 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Finn Williams85d36712021-01-26 22:30:06 +000037IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
38
39IDeserializer::~IDeserializer() = default;
40
41IDeserializer *IDeserializer::CreateRaw()
42{
43 return new IDeserializer();
44}
45
46IDeserializerPtr IDeserializer::Create()
47{
48 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
49}
50
51void IDeserializer::Destroy(IDeserializer *parser)
52{
53 delete parser;
54}
55
56armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
57{
58 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
59}
60
61armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
62{
63 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
64}
65
66BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
67{
68 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
69}
70
71BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
72{
73 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
74}
75
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000076namespace
77{
Kevin May43a799c2019-02-08 16:31:42 +000078
79const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
80
Finn Williams85d36712021-01-26 22:30:06 +000081 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000082 unsigned int layersIndex,
83 const CheckLocation& location)
84{
85 if (graph->layers() == nullptr)
86 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010087 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
88 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
89 "layers:{1} at {2}",
90 location.m_Function,
91 layersIndex,
92 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000093 }
94 else if (layersIndex >= graph->layers()->size())
95 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010096 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
97 location.m_Function,
98 layersIndex,
99 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000100 }
101}
102
Finn Williams85d36712021-01-26 22:30:06 +0000103void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000104 unsigned int layersIndex,
105 unsigned int layerIndex,
106 const CheckLocation& location)
107{
108 if (graph->layers() == nullptr)
109 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100110 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
111 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
112 "layers:{1} at {2}",
113 location.m_Function,
114 layersIndex,
115 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000116 }
117 else if (layersIndex >= graph->layers()->size())
118 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100119 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
120 "layers:{1} at {2}",
121 location.m_Function,
122 layersIndex,
123 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000124 }
125 else if (layerIndex >= graph->layers()[layersIndex].size()
126 && layerIndex != VIRTUAL_LAYER_ID)
127 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100128 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
129 "layers:{1} layer:{2} at {3}",
130 location.m_Function,
131 layersIndex,
132 layerIndex,
133 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000134 }
135}
136
Finn Williams85d36712021-01-26 22:30:06 +0000137void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000138 const CheckLocation& location)
139{
140 if (rawPtr == nullptr)
141 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100142 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
143 location.m_Function,
144 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000145 }
146}
147
Finn Williams85d36712021-01-26 22:30:06 +0000148void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000149 const CheckLocation& location)
150{
151 if (rawPtr == nullptr)
152 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100153 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
154 location.m_Function,
155 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000156 }
157}
158
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000159void CheckConstTensorSize(const unsigned int constTensorSize,
160 const unsigned int tensorSize,
161 const CheckLocation& location)
162{
163 if (constTensorSize != tensorSize)
164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100165 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
166 location.m_Function,
167 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000168 }
169}
170
Kevin May43a799c2019-02-08 16:31:42 +0000171#define CHECK_TENSOR_PTR(TENSOR_PTR) \
172 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
173
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000174#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
175 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
176
Mike Kellya0766c32019-02-19 17:22:07 +0000177#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
178 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
179
Kevin May43a799c2019-02-08 16:31:42 +0000180#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
181 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
182
183#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
184 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
185}
186
Saoirse Stewart263829c2019-02-19 15:54:14 +0000187bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
188{
189 const unsigned int actualSize = actual.GetNumDimensions();
190 if (actualSize != expected.size())
191 {
192 return false;
193 }
194
195 for (unsigned int i = 0u; i < actualSize; i++)
196 {
197 if (actual[i] != static_cast<unsigned int>(expected[i]))
198 {
199 return false;
200 }
201 }
202
203 return true;
204}
205
Finn Williams85d36712021-01-26 22:30:06 +0000206IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000207: m_Network(nullptr, nullptr),
208//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000209m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000210{
211 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000212 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
213 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
214 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
215 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
216 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
217 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
218 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
mathad01b392e982021-04-07 12:07:30 +0100219 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Finn Williams85d36712021-01-26 22:30:06 +0000220 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
221 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
222 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
223 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
224 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
225 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
226 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
227 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
228 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
229 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
230 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
231 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
232 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
233 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
234 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
235 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
236 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
237 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
238 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
239 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
240 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
241 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
242 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
243 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
244 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
245 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
246 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
247 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
248 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
249 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
250 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
251 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
252 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
253 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
254 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000255 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000256 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
257 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
258 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
259 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100260 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000261 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
262 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
263 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
264 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
265 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
266 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
267 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
268 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
269 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
270 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
271 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
272 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100273 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000274}
275
Finn Williams85d36712021-01-26 22:30:06 +0000276LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000277{
278 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
279
280 switch(layerType)
281 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100282 case Layer::Layer_AbsLayer:
283 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000284 case Layer::Layer_ActivationLayer:
285 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000286 case Layer::Layer_AdditionLayer:
287 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100288 case Layer::Layer_ArgMinMaxLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000290 case Layer::Layer_BatchToSpaceNdLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000292 case Layer::Layer_BatchNormalizationLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100294 case Layer::Layer_CastLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100296 case Layer::Layer_ComparisonLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100298 case Layer::Layer_ConcatLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000300 case Layer::Layer_ConstantLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000302 case Layer::Layer_Convolution2dLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100304 case Layer::Layer_DepthToSpaceLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000306 case Layer::Layer_DepthwiseConvolution2dLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000308 case Layer::Layer_DequantizeLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000310 case Layer::Layer_DetectionPostProcessLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000312 case Layer::Layer_DivisionLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000314 case Layer::Layer_EqualLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000316 case Layer::Layer_ElementwiseUnaryLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000318 case Layer::Layer_FullyConnectedLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100320 case Layer::Layer_FillLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000322 case Layer::Layer_FloorLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000324 case Layer::Layer_GatherLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000326 case Layer::Layer_GreaterLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000328 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000329 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100330 case Layer::Layer_InstanceNormalizationLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000332 case Layer::Layer_L2NormalizationLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000334 case Layer::Layer_LogicalBinaryLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100336 case Layer::Layer_LogSoftmaxLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000338 case Layer::Layer_LstmLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000340 case Layer::Layer_MeanLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000342 case Layer::Layer_MinimumLayer:
343 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000344 case Layer::Layer_MaximumLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100346 case Layer::Layer_MergeLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000348 case Layer::Layer_MergerLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000350 case Layer::Layer_MultiplicationLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000352 case Layer::Layer_NormalizationLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000354 case Layer::Layer_OutputLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000356 case Layer::Layer_PadLayer:
357 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000358 case Layer::Layer_PermuteLayer:
359 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000360 case Layer::Layer_Pooling2dLayer:
361 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100362 case Layer::Layer_PreluLayer:
363 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100364 case Layer::Layer_QLstmLayer:
365 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000366 case Layer::Layer_QuantizeLayer:
367 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100368 case Layer::Layer_QuantizedLstmLayer:
369 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100370 case Layer::Layer_RankLayer:
371 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000372 case Layer::Layer_ReduceLayer:
373 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000374 case Layer::Layer_ReshapeLayer:
375 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000376 case Layer::Layer_ResizeBilinearLayer:
377 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100378 case Layer::Layer_ResizeLayer:
379 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000380 case Layer::Layer_RsqrtLayer:
381 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100382 case Layer::Layer_ShapeLayer:
383 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100384 case Layer::Layer_SliceLayer:
385 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000386 case Layer::Layer_SoftmaxLayer:
387 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000388 case Layer::Layer_SpaceToBatchNdLayer:
389 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100390 case Layer::Layer_SpaceToDepthLayer:
391 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000392 case Layer::Layer_SplitterLayer:
393 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100394 case Layer::Layer_StackLayer:
395 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100396 case Layer::Layer_StandInLayer:
397 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000398 case Layer::Layer_StridedSliceLayer:
399 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000400 case Layer::Layer_SubtractionLayer:
401 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100402 case Layer::Layer_SwitchLayer:
403 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100404 case Layer::Layer_TransposeConvolution2dLayer:
405 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000406 case Layer::Layer_TransposeLayer:
407 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100408 case Layer::Layer_UnidirectionalSequenceLstmLayer:
409 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000410 case Layer::Layer_NONE:
411 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100412 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000413 }
414}
415
Finn Williams85d36712021-01-26 22:30:06 +0000416std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000417{
418 auto layer = GetBaseLayer(graph, index);
419 assert(layer);
420 return layer->layerName()->str();
421}
422
Finn Williams85d36712021-01-26 22:30:06 +0000423int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000424{
425 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
426
427 if (layerType == Layer::Layer_InputLayer)
428 {
429 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
430 }
431 else if ( layerType == Layer::Layer_OutputLayer )
432 {
433 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
434 }
435 return 0;
436}
437
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000438armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000439{
440 switch (dataLayout)
441 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000442 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000443 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000444 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000445 default:
446 return armnn::DataLayout::NCHW;
447 }
448}
449
Mike Kellyaf484012019-02-20 16:53:11 +0000450armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
451{
452 switch (function)
453 {
454 case armnnSerializer::ActivationFunction_Sigmoid:
455 return armnn::ActivationFunction::Sigmoid;
456 case armnnSerializer::ActivationFunction_TanH:
457 return armnn::ActivationFunction::TanH;
458 case armnnSerializer::ActivationFunction_Linear:
459 return armnn::ActivationFunction::Linear;
460 case armnnSerializer::ActivationFunction_ReLu:
461 return armnn::ActivationFunction::ReLu;
462 case armnnSerializer::ActivationFunction_BoundedReLu:
463 return armnn::ActivationFunction::BoundedReLu;
464 case armnnSerializer::ActivationFunction_LeakyReLu:
465 return armnn::ActivationFunction::LeakyReLu;
466 case armnnSerializer::ActivationFunction_Abs:
467 return armnn::ActivationFunction::Abs;
468 case armnnSerializer::ActivationFunction_Sqrt:
469 return armnn::ActivationFunction::Sqrt;
470 case armnnSerializer::ActivationFunction_Square:
471 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000472 case armnnSerializer::ActivationFunction_Elu:
473 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000474 case armnnSerializer::ActivationFunction_HardSwish:
475 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000476 default:
477 return armnn::ActivationFunction::Sigmoid;
478 }
479}
480
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100481armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
482{
483 switch (function)
484 {
485 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
486 return armnn::ArgMinMaxFunction::Max;
487 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
488 default:
489 return armnn::ArgMinMaxFunction::Min;
490 }
491}
492
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100493armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
494{
495 switch (operation)
496 {
497 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
498 return armnn::ComparisonOperation::Equal;
499 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
500 return armnn::ComparisonOperation::Greater;
501 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
502 return armnn::ComparisonOperation::GreaterOrEqual;
503 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
504 return armnn::ComparisonOperation::Less;
505 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
506 return armnn::ComparisonOperation::LessOrEqual;
507 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
508 default:
509 return armnn::ComparisonOperation::NotEqual;
510 }
511}
512
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000513armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
514{
515 switch (operation)
516 {
517 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
518 return armnn::ReduceOperation::Sum;
519 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
520 return armnn::ReduceOperation::Max;
521 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
522 return armnn::ReduceOperation::Mean;
523 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
524 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100525 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
526 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000527 default:
528 return armnn::ReduceOperation::Sum;
529 }
530}
531
James Conroyaba90cd2020-11-06 16:28:18 +0000532armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
533{
534 switch (operation)
535 {
536 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
537 return armnn::LogicalBinaryOperation::LogicalAnd;
538 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
539 return armnn::LogicalBinaryOperation::LogicalOr;
540 default:
541 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
542 }
543}
544
josh minor4a3c6102020-01-06 16:40:46 -0600545armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
546{
547 switch (operation)
548 {
549 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
550 return armnn::UnaryOperation::Abs;
551 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
552 return armnn::UnaryOperation::Rsqrt;
553 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
554 return armnn::UnaryOperation::Sqrt;
555 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
556 return armnn::UnaryOperation::Exp;
557 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
558 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000559 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
560 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100561 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
562 return armnn::UnaryOperation::Log;
563 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
564 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600565 default:
566 throw armnn::InvalidArgumentException("Unary operation unknown");
567 }
568}
569
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100570armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
571{
572 switch (method)
573 {
574 case armnnSerializer::ResizeMethod_NearestNeighbor:
575 return armnn::ResizeMethod::NearestNeighbor;
576 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000577 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100578 default:
579 return armnn::ResizeMethod::NearestNeighbor;
580 }
581}
582
Finn Williams85d36712021-01-26 22:30:06 +0000583armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000584{
585 armnn::DataType type;
586 CHECK_TENSOR_PTR(tensorPtr);
587
588 switch (tensorPtr->dataType())
589 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000590 case DataType_QAsymmS8:
591 type = armnn::DataType::QAsymmS8;
592 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000593 case DataType_QSymmS8:
594 type = armnn::DataType::QSymmS8;
595 break;
Kevin May43a799c2019-02-08 16:31:42 +0000596 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000597 case DataType_QAsymmU8:
598 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000599 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000600 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000601 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000602 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000603 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000604 case DataType_Signed32:
605 type = armnn::DataType::Signed32;
606 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100607 case DataType_Signed64:
608 type = armnn::DataType::Signed64;
609 break;
Kevin May43a799c2019-02-08 16:31:42 +0000610 case DataType_Float32:
611 type = armnn::DataType::Float32;
612 break;
613 case DataType_Float16:
614 type = armnn::DataType::Float16;
615 break;
616 case DataType_Boolean:
617 type = armnn::DataType::Boolean;
618 break;
619 default:
620 {
621 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100622 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
623 tensorPtr->dataType(),
624 EnumNameDataType(tensorPtr->dataType()),
625 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000626 }
627 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000628
Colm Donelan800b2812021-02-12 12:43:35 +0000629 float quantizationScale = tensorPtr->quantizationScale();
630 int32_t quantizationOffset = tensorPtr->quantizationOffset();
631
Finn Williams2605b232020-06-10 15:53:46 +0100632 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
633 {
Colm Donelan800b2812021-02-12 12:43:35 +0000634 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100635 type,
636 quantizationScale,
637 quantizationOffset);
638 }
Colm Donelan800b2812021-02-12 12:43:35 +0000639 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
640 {
641 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
642 type,
643 quantizationScale,
644 quantizationOffset);
645 return result;
646 }
Kevin May43a799c2019-02-08 16:31:42 +0000647
648 auto dimensions = tensorPtr->dimensions();
649 unsigned int size = dimensions->size();
650 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000651 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
652 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
653 // For backwards compatibility check if the dimensionSpecificity vector is present first.
654 // The default is to have dimensionSpecificity set to all true's anyway.
655 if (tensorPtr->dimensionSpecificity() != nullptr)
656 {
657 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
658 size = dimensionSpecificity->size();
659 for (unsigned int i = 0; i < size; ++i)
660 {
661 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
662 }
663 }
664 // Construct a TensorShape
665 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000666
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000667 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000668 if (quantizationScales)
669 {
670 unsigned int quantizationScalesSize = quantizationScales->size();
671 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
672 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000673 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000674 type,
675 scales,
676 quantizationDim);
677 return result;
678 }
679
Kevin May43a799c2019-02-08 16:31:42 +0000680 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000681 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000682 type,
683 quantizationScale,
684 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000685
Kevin May43a799c2019-02-08 16:31:42 +0000686 return result;
687}
688
Finn Williams85d36712021-01-26 22:30:06 +0000689armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000690{
691 CHECK_CONST_TENSOR_PTR(constTensorPtr);
692 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100693 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000694
695 switch (constTensorPtr->data_type())
696 {
697 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000698 {
699 auto byteData = constTensorPtr->data_as_ByteData()->data();
700 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
701 return armnn::ConstTensor(tensorInfo, byteData->data());
702 }
Mike Kellya0766c32019-02-19 17:22:07 +0000703 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000704 {
705 auto shortData = constTensorPtr->data_as_ShortData()->data();
706 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
707 return armnn::ConstTensor(tensorInfo, shortData->data());
708 }
Mike Kellya0766c32019-02-19 17:22:07 +0000709 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000710 {
711 auto intData = constTensorPtr->data_as_IntData()->data();
712 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
713 return armnn::ConstTensor(tensorInfo, intData->data());
714 }
Mike Kellya0766c32019-02-19 17:22:07 +0000715 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000716 {
717 auto longData = constTensorPtr->data_as_LongData()->data();
718 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
719 return armnn::ConstTensor(tensorInfo, longData->data());
720 }
Mike Kellya0766c32019-02-19 17:22:07 +0000721 default:
722 {
723 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100724 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
725 constTensorPtr->data_type(),
726 EnumNameConstTensorData(constTensorPtr->data_type()),
727 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000728 }
729 }
730}
731
Finn Williams85d36712021-01-26 22:30:06 +0000732TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000733{
734 CHECK_LAYERS(graphPtr, 0, layerIndex);
735 auto layer = GetBaseLayer(graphPtr, layerIndex);
736 const auto& numInputs = layer->inputSlots()->size();
737
738 TensorRawPtrVector result(numInputs);
739
740 for (unsigned int i=0; i<numInputs; ++i)
741 {
742 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
743 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
744 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
745 }
746 return result;
747}
748
Finn Williams85d36712021-01-26 22:30:06 +0000749TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000750{
751 CHECK_LAYERS(graphPtr, 0, layerIndex);
752 auto layer = GetBaseLayer(graphPtr, layerIndex);
753 const auto& numOutputs = layer->outputSlots()->size();
754
755 TensorRawPtrVector result(numOutputs);
756
757 for (unsigned int i=0; i<numOutputs; ++i)
758 {
759 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
760 }
761 return result;
762}
763
Finn Williams85d36712021-01-26 22:30:06 +0000764void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000765{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000766 CHECK_LAYERS(graph, 0, layerIndex);
767 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100768 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
769 "layerName: {1} / {2}",
770 layerIndex,
771 layerName,
772 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000773}
774
Finn Williams85d36712021-01-26 22:30:06 +0000775void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000776{
777 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000778 m_InputBindings.clear();
779 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000780}
781
Kevin May43a799c2019-02-08 16:31:42 +0000782
Finn Williams85d36712021-01-26 22:30:06 +0000783INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000784{
785 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000786 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
787 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000788}
789
Finn Williams85d36712021-01-26 22:30:06 +0000790armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000791{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000792 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000793 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
794 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
795 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000796}
797
Finn Williams85d36712021-01-26 22:30:06 +0000798GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000799{
800 if (binaryContent == nullptr)
801 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100802 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
803 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000804 }
805 flatbuffers::Verifier verifier(binaryContent, len);
806 if (verifier.VerifyBuffer<SerializedGraph>() == false)
807 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100808 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
809 "flatbuffers format. size:{0} {1}",
810 len,
811 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000812 }
813 return GetSerializedGraph(binaryContent);
814}
815
Finn Williams85d36712021-01-26 22:30:06 +0000816INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000817{
818 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100819 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000820 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000821 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000822 {
823 if (layer->layer_type() != Layer_InputLayer &&
824 layer->layer_type() != Layer_OutputLayer)
825 {
826 // lookup and call the parser function
827 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000828 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000829 }
830 ++layerIndex;
831 }
832
Derek Lamberti8ddae332019-02-21 16:29:43 +0000833 SetupInputLayers(graph);
834 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000835
836 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100837 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000838 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100839 Connections& connections = graphIt.second;
840 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000841 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100842 const unsigned int outputSlotIndex = outputIt.first;
843 IOutputSlot* outputSlot = outputIt.second;
844 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000845 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100846 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000847 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100848 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000849 }
Kevin May43a799c2019-02-08 16:31:42 +0000850 }
851 }
852 }
853
854 return std::move(m_Network);
855}
856
Finn Williams85d36712021-01-26 22:30:06 +0000857BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000858 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000859{
Jan Eilers8eb25602020-03-09 12:13:48 +0000860 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000861 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000862 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000863 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000864 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000865 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000866 }
867 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100868 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
869 name,
870 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000871}
872
Finn Williams85d36712021-01-26 22:30:06 +0000873BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000874 const std::string& name) const
875{
Jan Eilers8eb25602020-03-09 12:13:48 +0000876 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000877 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000878 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000879 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000880 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000881 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000882 }
883 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100884 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
885 name,
886 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000887}
888
Finn Williams85d36712021-01-26 22:30:06 +0000889unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000890{
891 for (unsigned int i = 0; i < graph->layers()->size(); i++)
892 {
893 auto layer = graph->layers()->Get(i);
894 if (layer->layer_type() == Layer::Layer_InputLayer)
895 {
896 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
897 if (layerBindingId == targetId)
898 {
899 return i;
900 }
901 }
902 }
903 throw ParseException("Input layer with given layerBindingId not found");
904}
905
Finn Williams85d36712021-01-26 22:30:06 +0000906unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000907{
908 for (unsigned int i = 0; i < graph->layers()->size(); i++)
909 {
910 auto layer = graph->layers()->Get(i);
911 if (layer->layer_type() == Layer::Layer_OutputLayer)
912 {
913 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
914 if (layerBindingId == targetId)
915 {
916 return i;
917 }
918 }
919 }
920 throw ParseException("Output layer with given layerBindingId not found");
921}
922
Finn Williams85d36712021-01-26 22:30:06 +0000923unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100924{
925 for (unsigned int i = 0; i < graph->layers()->size(); i++)
926 {
927 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
928 if (layer->index() == targetIndex)
929 {
930 return i;
931 }
932 }
933 throw ParseException("Layer with given index not found");
934}
935
Finn Williams85d36712021-01-26 22:30:06 +0000936IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000937{
Finn Williams85d36712021-01-26 22:30:06 +0000938 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000939
940 if (graph->featureVersions())
941 {
942 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +0100943 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +0100944 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +0000945 }
946
947 return versions;
948}
949
Finn Williams85d36712021-01-26 22:30:06 +0000950void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000951{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000952 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100953 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000954 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100955 m_InputBindings.reserve(numInputs);
956
957 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000958 {
Tee Jungaa920c52019-11-05 10:48:25 +0000959 unsigned int inputLayerIndex = 0xFFFFFFFF;
960 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
961 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100962 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000963 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
964 }
965 else
966 {
967 const int inputId = graph->inputIds()->Get(i);
968 inputLayerIndex = GetInputLayerInVector(graph, inputId);
969 }
970
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100971 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000972
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100973 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
974 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100975 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000976
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100977 IConnectableLayer* inputLayer =
978 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000979
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100980 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
981 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
982 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
983
Derek Lamberti8ddae332019-02-21 16:29:43 +0000984 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100985 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000986 }
987}
988
Finn Williams85d36712021-01-26 22:30:06 +0000989void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000990{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000991 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100992 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000993 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100994 m_OutputBindings.reserve(numOutputs);
995
996 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000997 {
Tee Jungaa920c52019-11-05 10:48:25 +0000998 unsigned int outputLayerIndex = 0xFFFFFFFF;
999 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1000 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001001 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001002 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1003 }
1004 else
1005 {
1006 const int outputId = graph->outputIds()->Get(i);
1007 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1008 }
1009
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001010 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001011
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001012 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1013 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001014 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001015
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001016 IConnectableLayer* outputLayer =
1017 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001018
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001019 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001020 unsigned int sourceLayerIndex =
1021 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001022 unsigned int outputSlotIndex =
1023 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001024 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001025 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1026 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001027 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001028 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001029 }
1030}
1031
Finn Williams85d36712021-01-26 22:30:06 +00001032void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001033 uint32_t layerIndex,
1034 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001035{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001036 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001037 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001038 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1039 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001040 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001041 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1042 " for layer index: {2} {3}",
1043 baseLayer->outputSlots()->size(),
1044 layer->GetNumOutputSlots(),
1045 layerIndex,
1046 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001047 }
1048
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001049 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001050 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001051 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1052 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1053 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1054 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001055 }
1056}
1057
Finn Williams85d36712021-01-26 22:30:06 +00001058void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001059 uint32_t layerIndex,
1060 armnn::IConnectableLayer* layer,
1061 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001062{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001063 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001064 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001065 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001066
1067 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001068 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001069 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1070 " for layer index:{2} {3}",
1071 baseLayer->inputSlots()->size(),
1072 layer->GetNumInputSlots(),
1073 layerIndex,
1074 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001075 }
1076
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001077 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001078 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001079 // Check if slot should be ignored.
1080 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1081 {
1082 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1083 auto fbConnection = fbInputSlot->connection();
1084 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1085 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1086 }
Kevin May43a799c2019-02-08 16:31:42 +00001087 }
1088}
1089
Finn Williams85d36712021-01-26 22:30:06 +00001090void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001091 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001092 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001093{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001094 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001095 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001096 m_GraphConnections[sourceLayerIndex] = Connections();
1097 }
1098
1099 Connections& connections = m_GraphConnections[sourceLayerIndex];
1100 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1101 {
1102 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001103 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001104 else
1105 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001106 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001107 }
1108}
Kevin May43a799c2019-02-08 16:31:42 +00001109
Finn Williams85d36712021-01-26 22:30:06 +00001110void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001111 uint32_t outputSlotIndex,
1112 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001113{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001114 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1115 {
1116 m_GraphConnections[sourceLayerIndex] = Connections();
1117 }
1118
1119 Connections& connections = m_GraphConnections[sourceLayerIndex];
1120 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1121 {
1122 throw ParseException("Same output slot index processed twice");
1123 }
1124
1125 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001126}
1127
Finn Williams85d36712021-01-26 22:30:06 +00001128void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001129{
1130 CHECK_LAYERS(graph, 0, layerIndex);
1131 auto inputs = GetInputs(graph, layerIndex);
1132 CHECK_LOCATION();
1133 CHECK_VALID_SIZE(inputs.size(), 1);
1134
1135 auto outputs = GetOutputs(graph, layerIndex);
1136 CHECK_VALID_SIZE(outputs.size(), 1);
1137
1138 auto layerName = GetLayerName(graph, layerIndex);
1139
josh minor4a3c6102020-01-06 16:40:46 -06001140 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1141 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001142 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1143 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1144
1145 RegisterInputSlots(graph, layerIndex, layer);
1146 RegisterOutputSlots(graph, layerIndex, layer);
1147}
1148
Finn Williams85d36712021-01-26 22:30:06 +00001149void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001150{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001151 CHECK_LAYERS(graph, 0, layerIndex);
1152 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001153 CHECK_LOCATION();
1154 CHECK_VALID_SIZE(inputs.size(), 1);
1155
Derek Lamberti8ddae332019-02-21 16:29:43 +00001156 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001157 CHECK_VALID_SIZE(outputs.size(), 1);
1158
Derek Lamberti8ddae332019-02-21 16:29:43 +00001159 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001160 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001161 auto serializerDescriptor = serializerLayer->descriptor();
1162
1163 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001164 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001165 descriptor.m_A = serializerDescriptor->a();
1166 descriptor.m_B = serializerDescriptor->b();
1167
1168 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1169 layerName.c_str());
1170 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1171 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1172
Derek Lamberti8ddae332019-02-21 16:29:43 +00001173 RegisterInputSlots(graph, layerIndex, layer);
1174 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001175}
1176
Finn Williams85d36712021-01-26 22:30:06 +00001177void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001178{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001179 CHECK_LAYERS(graph, 0, layerIndex);
1180 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001181 CHECK_LOCATION();
1182 CHECK_VALID_SIZE(inputs.size(), 2);
1183
Derek Lamberti8ddae332019-02-21 16:29:43 +00001184 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001185 CHECK_VALID_SIZE(outputs.size(), 1);
1186
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001187 auto layerName = GetLayerName(graph, layerIndex);
1188 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001189
1190 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1191 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1192
Derek Lamberti8ddae332019-02-21 16:29:43 +00001193 RegisterInputSlots(graph, layerIndex, layer);
1194 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001195}
1196
Finn Williams85d36712021-01-26 22:30:06 +00001197void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001198{
1199 CHECK_LAYERS(graph, 0, layerIndex);
1200 auto inputs = GetInputs(graph, layerIndex);
1201 CHECK_LOCATION();
1202 CHECK_VALID_SIZE(inputs.size(), 1);
1203
1204 auto outputs = GetOutputs(graph, layerIndex);
1205 CHECK_VALID_SIZE(outputs.size(), 1);
1206
1207 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1208 auto serializerDescriptor = serializerLayer->descriptor();
1209
1210 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001211 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001212 descriptor.m_Axis = serializerDescriptor->axis();
1213 auto layerName = GetLayerName(graph, layerIndex);
1214 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1215
1216 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1217 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1218
1219 RegisterInputSlots(graph, layerIndex, layer);
1220 RegisterOutputSlots(graph, layerIndex, layer);
1221}
1222
Finn Williams85d36712021-01-26 22:30:06 +00001223void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001224{
1225 CHECK_LAYERS(graph, 0, layerIndex);
1226
Finn Williams85d36712021-01-26 22:30:06 +00001227 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001228 CHECK_VALID_SIZE(inputs.size(), 1);
1229
Finn Williams85d36712021-01-26 22:30:06 +00001230 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001231 CHECK_VALID_SIZE(outputs.size(), 1);
1232
1233 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1234 auto flatBufferCrops = flatBufferDescriptor->crops();
1235 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1236
1237 if (flatBufferCrops->Length() % 2 != 0)
1238 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001239 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001240 }
1241
1242 std::vector<std::pair<unsigned int, unsigned int>> crops;
1243 crops.reserve(flatBufferCrops->Length() / 2);
1244 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1245 {
1246 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1247 }
1248
1249 armnn::BatchToSpaceNdDescriptor descriptor;
1250 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1251 descriptor.m_BlockShape =
1252 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1253 descriptor.m_Crops = crops;
1254
1255 auto layerName = GetLayerName(graph, layerIndex);
1256 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1257
1258 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1259 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1260
1261 RegisterInputSlots(graph, layerIndex, layer);
1262 RegisterOutputSlots(graph, layerIndex, layer);
1263}
1264
Finn Williams85d36712021-01-26 22:30:06 +00001265void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001266{
1267 CHECK_LAYERS(graph, 0, layerIndex);
1268
1269 auto inputs = GetInputs(graph, layerIndex);
1270 CHECK_VALID_SIZE(inputs.size(), 1);
1271
1272 auto outputs = GetOutputs(graph, layerIndex);
1273 CHECK_VALID_SIZE(outputs.size(), 1);
1274 auto outputInfo = ToTensorInfo(outputs[0]);
1275
ruoyan015c7ab052019-03-04 14:48:02 +00001276 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001277
1278 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1279 auto serializerDescriptor = serializerLayer->descriptor();
1280
1281 armnn::BatchNormalizationDescriptor descriptor;
1282 descriptor.m_Eps = serializerDescriptor->eps();
1283 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1284
1285 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1286 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1287 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1288 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1289
1290 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1291 mean,
1292 variance,
1293 beta,
1294 gamma,
1295 layerName.c_str());
1296 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1297
1298 RegisterInputSlots(graph, layerIndex, layer);
1299 RegisterOutputSlots(graph, layerIndex, layer);
1300}
1301
mathad01b392e982021-04-07 12:07:30 +01001302void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1303{
1304 CHECK_LAYERS(graph, 0, layerIndex);
1305 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1306 CHECK_LOCATION();
1307 CHECK_VALID_SIZE(inputs.size(), 1);
1308
1309 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1310 CHECK_VALID_SIZE(outputs.size(), 1);
1311
1312 auto layerName = GetLayerName(graph, layerIndex);
1313
1314 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1315
1316 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1317 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1318
1319 RegisterInputSlots(graph, layerIndex, layer);
1320 RegisterOutputSlots(graph, layerIndex, layer);
1321}
1322
Finn Williams85d36712021-01-26 22:30:06 +00001323void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001324{
1325 CHECK_LAYERS(graph, 0, layerIndex);
1326 CHECK_LOCATION();
1327
1328 auto outputs = GetOutputs(graph, layerIndex);
1329 CHECK_VALID_SIZE(outputs.size(), 1);
1330
1331 auto layerName = GetLayerName(graph, layerIndex);
1332
1333 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1334 auto serializerInput = serializerLayer->input();
1335
1336 armnn::ConstTensor input = ToConstTensor(serializerInput);
1337
1338 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1339
1340 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1341 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1342
1343 RegisterOutputSlots(graph, layerIndex, layer);
1344}
1345
Finn Williams85d36712021-01-26 22:30:06 +00001346void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001347{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001348 CHECK_LAYERS(graph, 0, layerIndex);
1349 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001350 CHECK_LOCATION();
1351 CHECK_VALID_SIZE(inputs.size(), 1);
1352
Derek Lamberti8ddae332019-02-21 16:29:43 +00001353 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001354 CHECK_VALID_SIZE(outputs.size(), 1);
1355
Derek Lamberti8ddae332019-02-21 16:29:43 +00001356 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001357 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001358 auto serializerDescriptor = serializerLayer->descriptor();
1359
1360 armnn::Convolution2dDescriptor descriptor;
1361 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1362 descriptor.m_PadRight = serializerDescriptor->padRight();
1363 descriptor.m_PadTop = serializerDescriptor->padTop();
1364 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1365 descriptor.m_StrideX = serializerDescriptor->strideX();
1366 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001367 descriptor.m_DilationX = serializerDescriptor->dilationX();
1368 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001369 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1370 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1371
1372 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1373 armnn::ConstTensor biases;
1374
Matteo Martincighfc598e12019-05-14 10:36:13 +01001375 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001376 if (descriptor.m_BiasEnabled)
1377 {
1378 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001379 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001380 }
1381 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1382 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001383 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001384 layerName.c_str());
1385 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1386 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1387
Derek Lamberti8ddae332019-02-21 16:29:43 +00001388 RegisterInputSlots(graph, layerIndex, layer);
1389 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001390}
1391
Finn Williams85d36712021-01-26 22:30:06 +00001392void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001393{
1394 CHECK_LAYERS(graph, 0, layerIndex);
1395
1396 auto inputs = GetInputs(graph, layerIndex);
1397 CHECK_VALID_SIZE(inputs.size(), 1);
1398
1399 auto outputs = GetOutputs(graph, layerIndex);
1400 CHECK_VALID_SIZE(outputs.size(), 1);
1401
1402 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1403
1404 armnn::DepthToSpaceDescriptor descriptor;
1405 descriptor.m_BlockSize = fbDescriptor->blockSize();
1406 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1407
1408 auto layerName = GetLayerName(graph, layerIndex);
1409 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1410
1411 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1412 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1413
1414 RegisterInputSlots(graph, layerIndex, layer);
1415 RegisterOutputSlots(graph, layerIndex, layer);
1416}
1417
Finn Williams85d36712021-01-26 22:30:06 +00001418void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001419{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001420 CHECK_LAYERS(graph, 0, layerIndex);
1421 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001422 CHECK_LOCATION();
1423 CHECK_VALID_SIZE(inputs.size(), 1);
1424
Derek Lamberti8ddae332019-02-21 16:29:43 +00001425 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001426 CHECK_VALID_SIZE(outputs.size(), 1);
1427
Derek Lamberti8ddae332019-02-21 16:29:43 +00001428 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001429 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001430 auto serializerDescriptor = serializerLayer->descriptor();
1431
1432 armnn::DepthwiseConvolution2dDescriptor descriptor;
1433 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1434 descriptor.m_PadRight = serializerDescriptor->padRight();
1435 descriptor.m_PadTop = serializerDescriptor->padTop();
1436 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1437 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001438 descriptor.m_StrideY = serializerDescriptor->strideY();
1439 descriptor.m_DilationX = serializerDescriptor->dilationX();
1440 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001441 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1442 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1443
Jan Eilers53ef7952021-06-02 12:01:25 +01001444 IConnectableLayer* layer;
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001445
Matteo Martincighfc598e12019-05-14 10:36:13 +01001446 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001447 if (descriptor.m_BiasEnabled)
1448 {
Jan Eilers53ef7952021-06-02 12:01:25 +01001449 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001450 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001451 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001452
1453 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1454 // The data layout for weights in ArmNN used to be [M,I,H,W] but now it's changed to [1,H,W,I*M]
1455 // When reading older flatbuffer files we need to add a permutation to get to the new layout.
1456 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1457 {
1458 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1459 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1460 PermutationVector permutationVector = { 3, 2, 0, 1 };
1461 armnn::TensorInfo weightsInfo = weights.GetInfo();
1462 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1463 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1464 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1465 weights.GetMemoryArea(), permuteBuffer.get(),
1466 GetDataTypeSize(weightsInfo.GetDataType()));
1467
1468 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1469 auto weightsShape = weightsInfo.GetShape();
1470 weightsInfo.SetShape({1,
1471 weightsShape[0],
1472 weightsShape[1],
1473 weightsShape[2]*weightsShape[3]});
1474
1475 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1476
1477 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1478 weightsPermuted,
1479 optionalBiases,
1480 layerName.c_str());
1481 }
1482 else
1483 {
1484 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1485 weights,
1486 optionalBiases,
1487 layerName.c_str());
1488 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001489
1490 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1491 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1492
Derek Lamberti8ddae332019-02-21 16:29:43 +00001493 RegisterInputSlots(graph, layerIndex, layer);
1494 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001495}
1496
Finn Williams85d36712021-01-26 22:30:06 +00001497void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001498{
1499 CHECK_LAYERS(graph, 0, layerIndex);
1500 auto inputs = GetInputs(graph, layerIndex);
1501 CHECK_LOCATION();
1502 CHECK_VALID_SIZE(inputs.size(), 2);
1503
1504 auto outputs = GetOutputs(graph, layerIndex);
1505 CHECK_VALID_SIZE(outputs.size(), 4);
1506
1507 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1508 auto layerName = GetLayerName(graph, layerIndex);
1509 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1510
1511 armnn::DetectionPostProcessDescriptor descriptor;
1512 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1513 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1514 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1515 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1516 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1517 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1518 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1519 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1520 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1521 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1522 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1523
1524 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1525
1526 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1527 anchors,
1528 layerName.c_str());
1529
1530 for (unsigned int i = 0; i < 4; i++)
1531 {
1532 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1533 }
1534
1535 RegisterInputSlots(graph, layerIndex, layer);
1536 RegisterOutputSlots(graph, layerIndex, layer);
1537}
1538
Finn Williams85d36712021-01-26 22:30:06 +00001539void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001540{
1541 CHECK_LAYERS(graph, 0, layerIndex);
1542 auto inputs = GetInputs(graph, layerIndex);
1543 CHECK_LOCATION();
1544 CHECK_VALID_SIZE(inputs.size(), 2);
1545
1546 auto outputs = GetOutputs(graph, layerIndex);
1547 CHECK_VALID_SIZE(outputs.size(), 1);
1548
1549 auto layerName = GetLayerName(graph, layerIndex);
1550 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1551
1552 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1553 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1554
1555 RegisterInputSlots(graph, layerIndex, layer);
1556 RegisterOutputSlots(graph, layerIndex, layer);
1557}
1558
Finn Williams85d36712021-01-26 22:30:06 +00001559void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001560{
1561 CHECK_LAYERS(graph, 0, layerIndex);
1562 auto inputs = GetInputs(graph, layerIndex);
1563 CHECK_LOCATION();
1564 CHECK_VALID_SIZE(inputs.size(), 2);
1565
1566 auto outputs = GetOutputs(graph, layerIndex);
1567 CHECK_VALID_SIZE(outputs.size(), 1);
1568
1569 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001570 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1571 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001572
1573 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1574 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1575
1576 RegisterInputSlots(graph, layerIndex, layer);
1577 RegisterOutputSlots(graph, layerIndex, layer);
1578}
1579
Finn Williams85d36712021-01-26 22:30:06 +00001580void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001581{
1582 CHECK_LAYERS(graph, 0, layerIndex);
1583 auto inputs = GetInputs(graph, layerIndex);
1584 CHECK_LOCATION();
1585 CHECK_VALID_SIZE(inputs.size(), 1);
1586
1587 auto outputs = GetOutputs(graph, layerIndex);
1588 CHECK_VALID_SIZE(outputs.size(), 1);
1589
1590 auto layerName = GetLayerName(graph, layerIndex);
1591 armnn::FillDescriptor descriptor(1.0f);
1592 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1593
1594 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1595 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1596
1597 RegisterInputSlots(graph, layerIndex, layer);
1598 RegisterOutputSlots(graph, layerIndex, layer);
1599}
1600
Finn Williams85d36712021-01-26 22:30:06 +00001601void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001602{
1603 CHECK_LAYERS(graph, 0, layerIndex);
1604 auto inputs = GetInputs(graph, layerIndex);
1605 CHECK_LOCATION();
1606 CHECK_VALID_SIZE(inputs.size(), 2);
1607
1608 auto outputs = GetOutputs(graph, layerIndex);
1609 CHECK_VALID_SIZE(outputs.size(), 1);
1610
1611 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001612 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1613 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001614
1615 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1616 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1617
1618 RegisterInputSlots(graph, layerIndex, layer);
1619 RegisterOutputSlots(graph, layerIndex, layer);
1620}
1621
Finn Williams85d36712021-01-26 22:30:06 +00001622void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001623{
1624 CHECK_LAYERS(graph, 0, layerIndex);
1625
1626 auto inputs = GetInputs(graph, layerIndex);
1627 CHECK_VALID_SIZE(inputs.size(), 1);
1628
1629 auto outputs = GetOutputs(graph, layerIndex);
1630 CHECK_VALID_SIZE(outputs.size(), 1);
1631
1632 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1633 auto fbDescriptor = fbLayer->descriptor();
1634
1635 armnn::InstanceNormalizationDescriptor descriptor;
1636 descriptor.m_Gamma = fbDescriptor->gamma();
1637 descriptor.m_Beta = fbDescriptor->beta();
1638 descriptor.m_Eps = fbDescriptor->eps();
1639 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1640
1641 const std::string layerName = GetLayerName(graph, layerIndex);
1642 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1643
1644 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1645 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1646
1647 RegisterInputSlots(graph, layerIndex, layer);
1648 RegisterOutputSlots(graph, layerIndex, layer);
1649}
1650
Finn Williams85d36712021-01-26 22:30:06 +00001651void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001652{
1653 CHECK_LAYERS(graph, 0, layerIndex);
1654
1655 auto inputs = GetInputs(graph, layerIndex);
1656 CHECK_VALID_SIZE(inputs.size(), 1);
1657
1658 auto outputs = GetOutputs(graph, layerIndex);
1659 CHECK_VALID_SIZE(outputs.size(), 1);
1660 auto outputInfo = ToTensorInfo(outputs[0]);
1661
1662 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1663 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1664
1665 auto layerName = GetLayerName(graph, layerIndex);
1666 armnn::L2NormalizationDescriptor descriptor;
1667 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001668 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001669
1670 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1671 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1672
1673 RegisterInputSlots(graph, layerIndex, layer);
1674 RegisterOutputSlots(graph, layerIndex, layer);
1675}
1676
Finn Williams85d36712021-01-26 22:30:06 +00001677void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001678{
1679 CHECK_LAYERS(graph, 0, layerIndex);
1680 CHECK_LOCATION();
1681
1682 auto inputs = GetInputs(graph, layerIndex);
1683 CHECK_VALID_SIZE(inputs.size(), 2);
1684
1685 auto outputs = GetOutputs(graph, layerIndex);
1686 CHECK_VALID_SIZE(outputs.size(), 1);
1687
1688 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1689 auto fbDescriptor = fbLayer->descriptor();
1690
1691 armnn::LogicalBinaryDescriptor descriptor;
1692 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1693
1694 const std::string& layerName = GetLayerName(graph, layerIndex);
1695 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1696
1697 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1698 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1699
1700 RegisterInputSlots(graph, layerIndex, layer);
1701 RegisterOutputSlots(graph, layerIndex, layer);
1702}
1703
Finn Williams85d36712021-01-26 22:30:06 +00001704void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001705{
1706 CHECK_LAYERS(graph, 0, layerIndex);
1707
Finn Williams85d36712021-01-26 22:30:06 +00001708 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001709 CHECK_VALID_SIZE(inputs.size(), 1);
1710
Finn Williams85d36712021-01-26 22:30:06 +00001711 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001712 CHECK_VALID_SIZE(outputs.size(), 1);
1713
1714 armnn::LogSoftmaxDescriptor descriptor;
1715 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1716 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1717 auto layerName = GetLayerName(graph, layerIndex);
1718
1719 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1720
1721 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1722 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1723
1724 RegisterInputSlots(graph, layerIndex, layer);
1725 RegisterOutputSlots(graph, layerIndex, layer);
1726}
1727
Finn Williams85d36712021-01-26 22:30:06 +00001728void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001729{
1730 CHECK_LAYERS(graph, 0, layerIndex);
1731 auto inputs = GetInputs(graph, layerIndex);
1732 CHECK_LOCATION();
1733 CHECK_VALID_SIZE(inputs.size(), 2);
1734
1735 auto outputs = GetOutputs(graph, layerIndex);
1736 CHECK_VALID_SIZE(outputs.size(), 1);
1737
1738 auto layerName = GetLayerName(graph, layerIndex);
1739 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1740
1741 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1742 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1743
1744 RegisterInputSlots(graph, layerIndex, layer);
1745 RegisterOutputSlots(graph, layerIndex, layer);
1746}
1747
Finn Williams85d36712021-01-26 22:30:06 +00001748void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001749{
1750 CHECK_LAYERS(graph, 0, layerIndex);
1751 auto inputs = GetInputs(graph, layerIndex);
1752 CHECK_LOCATION();
1753 CHECK_VALID_SIZE(inputs.size(), 2);
1754
1755 auto outputs = GetOutputs(graph, layerIndex);
1756 CHECK_VALID_SIZE(outputs.size(), 1);
1757
1758 auto layerName = GetLayerName(graph, layerIndex);
1759 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1760
1761 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1762 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1763
1764 RegisterInputSlots(graph, layerIndex, layer);
1765 RegisterOutputSlots(graph, layerIndex, layer);
1766}
1767
Jim Flynne242f2d2019-05-22 14:24:13 +01001768const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1769 unsigned int layerIndex)
1770{
1771 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1772
1773 switch (layerType)
1774 {
1775 case Layer::Layer_ConcatLayer:
1776 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1777 case Layer::Layer_MergerLayer:
1778 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1779 default:
1780 throw armnn::Exception("unknown layer type, should be concat or merger");
1781 }
1782}
1783
Finn Williams85d36712021-01-26 22:30:06 +00001784void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001785{
1786 CHECK_LAYERS(graph, 0, layerIndex);
1787 CHECK_LOCATION();
1788
1789 auto inputs = GetInputs(graph, layerIndex);
1790 CHECK_VALID_SIZE(inputs.size(), 2);
1791
1792 auto outputs = GetOutputs(graph, layerIndex);
1793 CHECK_VALID_SIZE(outputs.size(), 1);
1794
1795 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1796 auto fbDescriptor = fbLayer->descriptor();
1797
1798 armnn::ComparisonDescriptor descriptor;
1799 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1800
1801 const std::string& layerName = GetLayerName(graph, layerIndex);
1802 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1803
1804 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1805 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1806
1807 RegisterInputSlots(graph, layerIndex, layer);
1808 RegisterOutputSlots(graph, layerIndex, layer);
1809}
1810
Finn Williams85d36712021-01-26 22:30:06 +00001811void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06001812{
1813 CHECK_LAYERS(graph, 0, layerIndex);
1814 CHECK_LOCATION();
1815
1816 auto inputs = GetInputs(graph, layerIndex);
1817 CHECK_VALID_SIZE(inputs.size(), 1);
1818
1819 auto outputs = GetOutputs(graph, layerIndex);
1820 CHECK_VALID_SIZE(outputs.size(), 1);
1821
1822 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1823 auto fbDescriptor = fbLayer->descriptor();
1824
1825 armnn::ElementwiseUnaryDescriptor descriptor;
1826 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1827
1828 const std::string& layerName = GetLayerName(graph, layerIndex);
1829 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1830
1831 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1832 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1833
1834 RegisterInputSlots(graph, layerIndex, layer);
1835 RegisterOutputSlots(graph, layerIndex, layer);
1836}
1837
Finn Williams85d36712021-01-26 22:30:06 +00001838void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001839{
1840 CHECK_LAYERS(graph, 0, layerIndex);
1841 CHECK_LOCATION();
1842
1843 auto outputs = GetOutputs(graph, layerIndex);
1844 CHECK_VALID_SIZE(outputs.size(), 1);
1845
Jim Flynnac25a1b2019-02-28 10:40:49 +00001846 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001847 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1848 unsigned int numViews = originsDescriptor->numViews();
1849 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001850
1851 // can now check the number of inputs == number of views
1852 auto inputs = GetInputs(graph, layerIndex);
1853 CHECK_VALID_SIZE(inputs.size(), numViews);
1854
1855 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001856 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001857 for (unsigned int v = 0; v < numViews; ++v)
1858 {
1859 auto originPtr = originsPtr->Get(v);
1860 for (unsigned int d = 0; d < numDimensions; ++d)
1861 {
1862 uint32_t value = originPtr->data()->Get(d);
1863 descriptor.SetViewOriginCoord(v, d, value);
1864 }
1865 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001866 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001867
Jim Flynn906f9462019-05-10 13:55:21 +01001868 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001869 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1870 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1871
1872 RegisterInputSlots(graph, layerIndex, layer);
1873 RegisterOutputSlots(graph, layerIndex, layer);
1874}
1875
Finn Williams85d36712021-01-26 22:30:06 +00001876void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001877{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001878 CHECK_LAYERS(graph, 0, layerIndex);
1879 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001880 CHECK_LOCATION();
1881 CHECK_VALID_SIZE(inputs.size(), 2);
1882
Derek Lamberti8ddae332019-02-21 16:29:43 +00001883 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001884 CHECK_VALID_SIZE(outputs.size(), 1);
1885
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001886 auto layerName = GetLayerName(graph, layerIndex);
1887 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001888
1889 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1890 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1891
Derek Lamberti8ddae332019-02-21 16:29:43 +00001892 RegisterInputSlots(graph, layerIndex, layer);
1893 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001894}
1895
Finn Williams85d36712021-01-26 22:30:06 +00001896void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001897{
1898 CHECK_LAYERS(graph, 0, layerIndex);
1899 CHECK_LOCATION();
1900
1901 auto inputs = GetInputs(graph, layerIndex);
1902 CHECK_VALID_SIZE(inputs.size(), 1);
1903
1904 auto outputs = GetOutputs(graph, layerIndex);
1905 CHECK_VALID_SIZE(outputs.size(), 1);
1906
1907 auto layerName = GetLayerName(graph, layerIndex);
1908
1909 armnn::IConnectableLayer* layer;
1910
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001911 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001912
1913 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1914 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1915
1916 RegisterInputSlots(graph, layerIndex, layer);
1917 RegisterOutputSlots(graph, layerIndex, layer);
1918}
1919
Finn Williams85d36712021-01-26 22:30:06 +00001920void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001921{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001922 CHECK_LAYERS(graph, 0, layerIndex);
1923 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001924 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001925
Derek Lamberti8ddae332019-02-21 16:29:43 +00001926 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001927 CHECK_VALID_SIZE(outputs.size(), 1);
1928
Derek Lamberti8ddae332019-02-21 16:29:43 +00001929 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001930 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001931 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1932
1933 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1934 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1935 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001936 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01001937
1938 armnn::IConnectableLayer* layer;
1939 std::vector<unsigned int> ignoreSlots {};
1940
1941 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1942 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1943 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001944 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001945 // If the model stores weights and biases as members of the layer we have to read them from there
1946 // but add them to their own ConstantLayer for compatibility
1947 CHECK_VALID_SIZE(inputs.size(), 1);
1948 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1949 layerName.c_str());
1950
1951 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1952 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1953 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1954 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
1955 ignoreSlots.emplace_back(1u);
1956
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001957 if (fullyConnectedDescriptor.m_BiasEnabled)
1958 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001959 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
1960 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1961 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1962 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
1963 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001964 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001965 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01001966 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001967 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001968 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1969 layerName.c_str());
1970 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
1971 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00001972 }
1973
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001974 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1975 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1976
Matthew Sloyan81beae32021-07-13 19:46:11 +01001977 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001978 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001979}
1980
Finn Williams85d36712021-01-26 22:30:06 +00001981void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001982{
1983 CHECK_LAYERS(graph, 0, layerIndex);
1984
Finn Williams85d36712021-01-26 22:30:06 +00001985 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001986 CHECK_VALID_SIZE(inputs.size(), 1);
1987
Finn Williams85d36712021-01-26 22:30:06 +00001988 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001989 CHECK_VALID_SIZE(outputs.size(), 1);
1990
1991 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1992 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001993 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001994
1995 if (flatBufferPadList->Length() % 2 != 0)
1996 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001997 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
1998 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001999 }
2000
2001 std::vector<std::pair<unsigned int, unsigned int>> padList;
2002 padList.reserve(flatBufferPadList->Length() / 2);
2003 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2004 {
2005 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2006 }
2007
David Monahan34757812019-06-19 11:47:21 +01002008 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002009
2010 auto layerName = GetLayerName(graph, layerIndex);
2011 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2012
2013 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2014 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2015
2016 RegisterInputSlots(graph, layerIndex, layer);
2017 RegisterOutputSlots(graph, layerIndex, layer);
2018}
2019
Finn Williams85d36712021-01-26 22:30:06 +00002020void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002021{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002022 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002023
2024 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002025 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002026
Derek Lamberti8ddae332019-02-21 16:29:43 +00002027 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002028 CHECK_VALID_SIZE(inputs.size(), 1);
2029
Derek Lamberti8ddae332019-02-21 16:29:43 +00002030 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002031 CHECK_VALID_SIZE(outputs.size(), 1);
2032 auto outputInfo = ToTensorInfo(outputs[0]);
2033
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002034 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002035 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2036
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002037 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002038 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2039
Derek Lamberti8ddae332019-02-21 16:29:43 +00002040 RegisterInputSlots(graph, layerIndex, layer);
2041 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002042}
2043
Finn Williams85d36712021-01-26 22:30:06 +00002044armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPoolingDescriptor(PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002045 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002046{
Jan Eilers8eb25602020-03-09 12:13:48 +00002047 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002048 armnn::Pooling2dDescriptor desc;
2049
2050 switch (pooling2dDesc->poolType())
2051 {
2052 case PoolingAlgorithm_Average:
2053 {
2054 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002055 break;
2056 }
2057 case PoolingAlgorithm_Max:
2058 {
2059 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002060 break;
2061 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002062 case PoolingAlgorithm_L2:
2063 {
2064 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2065 break;
2066 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002067 default:
2068 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002069 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002070 }
2071 }
2072
2073 switch (pooling2dDesc->outputShapeRounding())
2074 {
2075 case OutputShapeRounding_Floor:
2076 {
2077 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2078 break;
2079 }
2080 case OutputShapeRounding_Ceiling:
2081 {
2082 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2083 break;
2084 }
2085 default:
2086 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002087 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002088 }
2089 }
2090
2091 switch (pooling2dDesc->paddingMethod())
2092 {
2093 case PaddingMethod_Exclude:
2094 {
2095 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2096 break;
2097 }
2098 case PaddingMethod_IgnoreValue:
2099 {
2100 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2101 break;
2102 }
2103 default:
2104 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002105 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002106 }
2107 }
2108
2109 switch (pooling2dDesc->dataLayout())
2110 {
2111 case DataLayout_NCHW:
2112 {
2113 desc.m_DataLayout = armnn::DataLayout::NCHW;
2114 break;
2115 }
2116 case DataLayout_NHWC:
2117 {
2118 desc.m_DataLayout = armnn::DataLayout::NHWC;
2119 break;
2120 }
2121 default:
2122 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002123 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002124 }
2125 }
2126
2127 desc.m_PadRight = pooling2dDesc->padRight();
2128 desc.m_PadLeft = pooling2dDesc->padLeft();
2129 desc.m_PadBottom = pooling2dDesc->padBottom();
2130 desc.m_PadTop = pooling2dDesc->padTop();
2131 desc.m_StrideX = pooling2dDesc->strideX();
2132 desc.m_StrideY = pooling2dDesc->strideY();
2133 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2134 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2135
2136 return desc;
2137}
2138
Finn Williams85d36712021-01-26 22:30:06 +00002139
2140
2141void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002142{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002143 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002144
Derek Lamberti8ddae332019-02-21 16:29:43 +00002145 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002146 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002147 CHECK_VALID_SIZE(inputs.size(), 1);
2148
Derek Lamberti8ddae332019-02-21 16:29:43 +00002149 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002150 CHECK_VALID_SIZE(outputs.size(), 1);
2151 auto outputInfo = ToTensorInfo(outputs[0]);
2152
2153 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002154 auto layerName = GetLayerName(graph, layerIndex);
2155 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002156 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2157
Derek Lamberti8ddae332019-02-21 16:29:43 +00002158 RegisterInputSlots(graph, layerIndex, layer);
2159 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002160}
2161
Finn Williams85d36712021-01-26 22:30:06 +00002162void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002163{
2164 CHECK_LAYERS(graph, 0, layerIndex);
2165
2166 auto inputs = GetInputs(graph, layerIndex);
2167 CHECK_VALID_SIZE(inputs.size(), 1);
2168
2169 auto outputs = GetOutputs(graph, layerIndex);
2170 CHECK_VALID_SIZE(outputs.size(), 1);
2171 auto outputInfo = ToTensorInfo(outputs[0]);
2172
2173 auto layerName = GetLayerName(graph, layerIndex);
2174 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2175 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2176
2177 RegisterInputSlots(graph, layerIndex, layer);
2178 RegisterOutputSlots(graph, layerIndex, layer);
2179}
2180
Finn Williams85d36712021-01-26 22:30:06 +00002181armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002182 const std::vector<uint32_t>& targetDimsIn)
2183{
2184 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2185 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2186
2187 if (stretchDim != targetDimsIn.end())
2188 {
2189 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2190 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002191 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2192 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002193 }
2194
2195 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002196 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002197 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2198
2199 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2200 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2201 }
2202
2203 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2204
2205 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2206 reshapeInfo.SetShape(outputShape);
2207
2208 return reshapeInfo;
2209}
2210
Finn Williams85d36712021-01-26 22:30:06 +00002211void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002212{
2213 CHECK_LAYERS(graph, 0, layerIndex);
2214
Finn Williams85d36712021-01-26 22:30:06 +00002215 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002216 CHECK_VALID_SIZE(inputs.size(), 1);
2217
Finn Williams85d36712021-01-26 22:30:06 +00002218 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002219 CHECK_VALID_SIZE(outputs.size(), 1);
2220
2221 auto layerName = GetLayerName(graph, layerIndex);
2222 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2223
2224 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2225 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2226
2227 RegisterInputSlots(graph, layerIndex, layer);
2228 RegisterOutputSlots(graph, layerIndex, layer);
2229}
2230
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002231void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2232{
2233 CHECK_LAYERS(graph, 0, layerIndex);
2234 CHECK_LOCATION();
2235
2236 auto inputs = GetInputs(graph, layerIndex);
2237 CHECK_VALID_SIZE(inputs.size(), 1);
2238
2239 auto outputs = GetOutputs(graph, layerIndex);
2240 CHECK_VALID_SIZE(outputs.size(), 1);
2241
2242 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2243 auto fbDescriptor = fbLayer->descriptor();
2244 auto flatBufferAxis = fbDescriptor->axis();
2245
2246 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002247 descriptor.m_KeepDims = fbDescriptor->keepDims();
2248 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2249 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2250
2251 const std::string& layerName = GetLayerName(graph, layerIndex);
2252 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2253
2254 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2255 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2256
2257 RegisterInputSlots(graph, layerIndex, layer);
2258 RegisterOutputSlots(graph, layerIndex, layer);
2259}
2260
Finn Williams85d36712021-01-26 22:30:06 +00002261void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002262{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002263 CHECK_LAYERS(graph, 0, layerIndex);
2264 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002265
Derek Lamberti8ddae332019-02-21 16:29:43 +00002266 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002267 CHECK_VALID_SIZE(outputs.size(), 1);
2268
2269 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2270 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2271
Derek Lamberti8ddae332019-02-21 16:29:43 +00002272 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002273 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2274
Finn Williams85d36712021-01-26 22:30:06 +00002275 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002276 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2277
2278 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2279 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2280
2281 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2282 {
2283 std::stringstream ss;
2284 ss << "New shape defined in reshape parameters "
2285 << reshapeOutputTensorShape
2286 << " does not equal output shape "
2287 << actualOutputTensorInfo.GetShape()
2288 << ": "
2289 << CHECK_LOCATION().AsString();
2290 throw ParseException(ss.str());
2291 }
2292
2293 armnn::ReshapeDescriptor reshapeDesc;
2294 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2295
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002296 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002297 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2298 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2299
Derek Lamberti8ddae332019-02-21 16:29:43 +00002300 RegisterInputSlots(graph, layerIndex, layer);
2301 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002302}
2303
Finn Williams85d36712021-01-26 22:30:06 +00002304void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002305{
2306 CHECK_LAYERS(graph, 0, layerIndex);
2307
Finn Williams85d36712021-01-26 22:30:06 +00002308 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002309 CHECK_VALID_SIZE(inputs.size(), 1);
2310
Finn Williams85d36712021-01-26 22:30:06 +00002311 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002312 CHECK_VALID_SIZE(outputs.size(), 1);
2313
2314 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2315
2316 armnn::ResizeDescriptor descriptor;
2317 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2318 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2319 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2320 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002321 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2322 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002323
2324 auto layerName = GetLayerName(graph, layerIndex);
2325 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2326
2327 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2328 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2329
2330 RegisterInputSlots(graph, layerIndex, layer);
2331 RegisterOutputSlots(graph, layerIndex, layer);
2332}
2333
Finn Williams85d36712021-01-26 22:30:06 +00002334void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002335{
2336 CHECK_LAYERS(graph, 0, layerIndex);
2337
Finn Williams85d36712021-01-26 22:30:06 +00002338 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002339 CHECK_VALID_SIZE(inputs.size(), 1);
2340
Finn Williams85d36712021-01-26 22:30:06 +00002341 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002342 CHECK_VALID_SIZE(outputs.size(), 1);
2343
2344 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2345
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002346 armnn::ResizeDescriptor descriptor;
2347 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002348 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002349 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2350 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002351 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2352 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002353
2354 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002355 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002356
2357 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2358 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2359
2360 RegisterInputSlots(graph, layerIndex, layer);
2361 RegisterOutputSlots(graph, layerIndex, layer);
2362}
2363
Keith Davis3ae3f972021-05-21 16:33:48 +01002364void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2365{
2366 CHECK_LAYERS(graph, 0, layerIndex);
2367
2368 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2369 CHECK_VALID_SIZE(inputs.size(), 1);
2370
2371 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2372 CHECK_VALID_SIZE(outputs.size(), 1);
2373
2374 auto layerName = GetLayerName(graph, layerIndex);
2375 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2376
2377 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2378 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2379
2380 RegisterInputSlots(graph, layerIndex, layer);
2381 RegisterOutputSlots(graph, layerIndex, layer);
2382}
2383
Finn Williams85d36712021-01-26 22:30:06 +00002384void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002385{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002386 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002387
Finn Williams85d36712021-01-26 22:30:06 +00002388 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002389 CHECK_VALID_SIZE(inputs.size(), 1);
2390
Finn Williams85d36712021-01-26 22:30:06 +00002391 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002392 CHECK_VALID_SIZE(outputs.size(), 1);
2393
2394 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002395 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002396 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002397
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002398 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2399
2400 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2401 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2402
Derek Lamberti8ddae332019-02-21 16:29:43 +00002403 RegisterInputSlots(graph, layerIndex, layer);
2404 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002405}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002406
Finn Williams85d36712021-01-26 22:30:06 +00002407void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002408{
2409 CHECK_LAYERS(graph, 0, layerIndex);
2410
Finn Williams85d36712021-01-26 22:30:06 +00002411 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002412 CHECK_VALID_SIZE(inputs.size(), 1);
2413
Finn Williams85d36712021-01-26 22:30:06 +00002414 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002415 CHECK_VALID_SIZE(outputs.size(), 1);
2416
2417 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2418 auto flatBufferPadList = flatBufferDescriptor->padList();
2419 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2420
2421 if (flatBufferPadList->Length() % 2 != 0)
2422 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002423 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2424 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002425 }
2426
2427 std::vector<std::pair<unsigned int, unsigned int>> padList;
2428 padList.reserve(flatBufferPadList->Length() / 2);
2429 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2430 {
2431 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2432 }
2433
2434 armnn::SpaceToBatchNdDescriptor descriptor;
2435 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2436 descriptor.m_BlockShape =
2437 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2438 descriptor.m_PadList = padList;
2439
2440 auto layerName = GetLayerName(graph, layerIndex);
2441 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2442
2443 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2444 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2445
2446 RegisterInputSlots(graph, layerIndex, layer);
2447 RegisterOutputSlots(graph, layerIndex, layer);
2448}
2449
Finn Williams85d36712021-01-26 22:30:06 +00002450void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002451{
2452 CHECK_LAYERS(graph, 0, layerIndex);
2453
Finn Williams85d36712021-01-26 22:30:06 +00002454 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002455 CHECK_VALID_SIZE(inputs.size(), 1);
2456
Finn Williams85d36712021-01-26 22:30:06 +00002457 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002458 CHECK_VALID_SIZE(outputs.size(), 1);
2459
2460 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2461
2462 armnn::SpaceToDepthDescriptor descriptor;
2463 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2464 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2465
2466 auto layerName = GetLayerName(graph, layerIndex);
2467 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2468
2469 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2470 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2471
2472 RegisterInputSlots(graph, layerIndex, layer);
2473 RegisterOutputSlots(graph, layerIndex, layer);
2474}
2475
Finn Williams85d36712021-01-26 22:30:06 +00002476armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2477 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002478 unsigned int layerIndex)
2479{
Jan Eilers8eb25602020-03-09 12:13:48 +00002480 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002481 armnn::NormalizationDescriptor desc;
2482
2483 switch (normalizationDescriptor->normChannelType())
2484 {
2485 case NormalizationAlgorithmChannel_Across:
2486 {
2487 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2488 break;
2489 }
2490 case NormalizationAlgorithmChannel_Within:
2491 {
2492 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2493 break;
2494 }
2495 default:
2496 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002497 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002498 }
2499 }
2500
2501 switch (normalizationDescriptor->normMethodType())
2502 {
2503 case NormalizationAlgorithmMethod_LocalBrightness:
2504 {
2505 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2506 break;
2507 }
2508 case NormalizationAlgorithmMethod_LocalContrast:
2509 {
2510 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2511 break;
2512 }
2513 default:
2514 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002515 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002516 }
2517 }
2518
2519 switch (normalizationDescriptor->dataLayout())
2520 {
2521 case DataLayout_NCHW:
2522 {
2523 desc.m_DataLayout = armnn::DataLayout::NCHW;
2524 break;
2525 }
2526 case DataLayout_NHWC:
2527 {
2528 desc.m_DataLayout = armnn::DataLayout::NHWC;
2529 break;
2530 }
2531 default:
2532 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002533 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002534 }
2535 }
2536
2537 desc.m_Alpha = normalizationDescriptor->alpha();
2538 desc.m_Beta = normalizationDescriptor->beta();
2539 desc.m_K = normalizationDescriptor->k();
2540 desc.m_NormSize = normalizationDescriptor->normSize();
2541
2542 return desc;
2543}
2544
Finn Williams85d36712021-01-26 22:30:06 +00002545void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002546{
2547 CHECK_LAYERS(graph, 0, layerIndex);
2548
2549 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2550
Finn Williams85d36712021-01-26 22:30:06 +00002551 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002552 CHECK_VALID_SIZE(inputs.size(), 1);
2553
Finn Williams85d36712021-01-26 22:30:06 +00002554 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002555 CHECK_VALID_SIZE(outputs.size(), 1);
2556
2557 auto outputInfo = ToTensorInfo(outputs[0]);
2558
2559 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2560 auto layerName = GetLayerName(graph, layerIndex);
2561
2562 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2563 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2564
2565 RegisterInputSlots(graph, layerIndex, layer);
2566 RegisterOutputSlots(graph, layerIndex, layer);
2567}
2568
Finn Williams85d36712021-01-26 22:30:06 +00002569void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002570{
2571 CHECK_LAYERS(graph, 0, layerIndex);
2572 auto inputs = GetInputs(graph, layerIndex);
2573 CHECK_LOCATION();
2574 CHECK_VALID_SIZE(inputs.size(), 1);
2575
2576 auto outputs = GetOutputs(graph, layerIndex);
2577 CHECK_VALID_SIZE(outputs.size(), 1);
2578
2579 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002580
josh minor4a3c6102020-01-06 16:40:46 -06002581 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2582 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002583 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2584 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2585
2586 RegisterInputSlots(graph, layerIndex, layer);
2587 RegisterOutputSlots(graph, layerIndex, layer);
2588}
2589
Finn Williams85d36712021-01-26 22:30:06 +00002590void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002591{
2592 CHECK_LAYERS(graph, 0, layerIndex);
2593
2594 auto inputs = GetInputs(graph, layerIndex);
2595 CHECK_VALID_SIZE(inputs.size(), 1);
2596
2597 auto outputs = GetOutputs(graph, layerIndex);
2598 CHECK_VALID_SIZE(outputs.size(), 1);
2599
2600 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2601
2602 auto fbBegin = fbDescriptor->begin();
2603 auto fbSize = fbDescriptor->size();
2604
2605 if (fbBegin->Length() != fbSize->Length())
2606 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002607 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2608 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002609 }
2610
2611 armnn::SliceDescriptor descriptor;
2612 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2613 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2614
2615 auto layerName = GetLayerName(graph, layerIndex);
2616 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2617
2618 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2619 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2620
2621 RegisterInputSlots(graph, layerIndex, layer);
2622 RegisterOutputSlots(graph, layerIndex, layer);
2623}
2624
Finn Williams85d36712021-01-26 22:30:06 +00002625void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002626{
2627 CHECK_LAYERS(graph, 0, layerIndex);
2628
Finn Williams85d36712021-01-26 22:30:06 +00002629 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002630 CHECK_VALID_SIZE(inputs.size(), 1);
2631
Finn Williams85d36712021-01-26 22:30:06 +00002632 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002633 CHECK_VALID_SIZE(outputs.size(), 1);
2634
2635 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2636
2637 auto flatBufferBegin = flatBufferDescriptor->begin();
2638 auto flatBufferEnd = flatBufferDescriptor->end();
2639 auto flatBufferStride = flatBufferDescriptor->stride();
2640
2641 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2642 flatBufferBegin->Length() == flatBufferStride->Length()))
2643 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002644 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2645 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002646 }
2647
2648 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2649 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2650 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2651
2652 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2653 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2654 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2655 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2656 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2657 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2658 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2659
2660 auto layerName = GetLayerName(graph, layerIndex);
2661 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2662
2663 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2664 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2665
2666 RegisterInputSlots(graph, layerIndex, layer);
2667 RegisterOutputSlots(graph, layerIndex, layer);
2668}
2669
Finn Williams85d36712021-01-26 22:30:06 +00002670void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002671{
2672 CHECK_LAYERS(graph, 0, layerIndex);
2673 auto inputs = GetInputs(graph, layerIndex);
2674 CHECK_LOCATION();
2675 CHECK_VALID_SIZE(inputs.size(), 2);
2676
2677 auto outputs = GetOutputs(graph, layerIndex);
2678 CHECK_VALID_SIZE(outputs.size(), 1);
2679
2680 auto layerName = GetLayerName(graph, layerIndex);
2681 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2682
2683 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2684 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2685
2686 RegisterInputSlots(graph, layerIndex, layer);
2687 RegisterOutputSlots(graph, layerIndex, layer);
2688}
2689
Finn Williams85d36712021-01-26 22:30:06 +00002690void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002691{
2692 CHECK_LAYERS(graph, 0, layerIndex);
2693
Finn Williams85d36712021-01-26 22:30:06 +00002694 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002695 CHECK_VALID_SIZE(inputs.size(), 2);
2696
Finn Williams85d36712021-01-26 22:30:06 +00002697 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002698 CHECK_VALID_SIZE(outputs.size(), 1);
2699
Teresa Charlin52664732020-06-29 16:27:03 +01002700 armnn::GatherDescriptor descriptor;
2701 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2702
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002703 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002704 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002705
2706 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002707 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2708
2709 RegisterInputSlots(graph, layerIndex, layer);
2710 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002711}
2712
Finn Williams85d36712021-01-26 22:30:06 +00002713void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002714{
2715 CHECK_LAYERS(graph, 0, layerIndex);
2716
Finn Williams85d36712021-01-26 22:30:06 +00002717 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002718 CHECK_VALID_SIZE(inputs.size(), 1);
2719
Finn Williams85d36712021-01-26 22:30:06 +00002720 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002721 CHECK_VALID_SIZE(outputs.size(), 1);
2722
2723 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2724 auto flatBufferAxis = flatBufferDescriptor->axis();
2725 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2726
2727 armnn::MeanDescriptor descriptor;
2728 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2729 descriptor.m_KeepDims = flatBufferKeepDims;
2730
2731 auto layerName = GetLayerName(graph, layerIndex);
2732 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2733
2734 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2735 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2736
2737 RegisterInputSlots(graph, layerIndex, layer);
2738 RegisterOutputSlots(graph, layerIndex, layer);
2739}
2740
Finn Williams85d36712021-01-26 22:30:06 +00002741void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00002742{
2743 CHECK_LAYERS(graph, 0, layerIndex);
2744
Finn Williams85d36712021-01-26 22:30:06 +00002745 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002746 CHECK_VALID_SIZE(inputs.size(), 1);
2747
Finn Williams85d36712021-01-26 22:30:06 +00002748 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002749
2750 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2751 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2752 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2753 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2754 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2755 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2756
2757 // Check numViews and numDimensions corresponds to the ones already serialized ...
2758 // numViews == flatBufferViewSizes.size();
2759 // foreach: numDimensions == flatBufferViewSizes[x].size();
2760
2761 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2762 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2763 {
2764 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2765 {
2766 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2767 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2768 }
2769 }
2770
2771 auto layerName = GetLayerName(graph, layerIndex);
2772 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2773
2774 // I could have as many outputs as views ...
2775 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2776 {
2777 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2778 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2779 }
2780
2781 RegisterInputSlots(graph, layerIndex, layer);
2782 RegisterOutputSlots(graph, layerIndex, layer);
2783}
2784
Finn Williams85d36712021-01-26 22:30:06 +00002785armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00002786{
2787 armnn::LstmDescriptor desc;
2788
2789 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2790 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2791 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2792 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2793 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2794 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002795 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002796
2797 return desc;
2798}
2799
Finn Williams85d36712021-01-26 22:30:06 +00002800void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00002801{
2802 CHECK_LAYERS(graph, 0, layerIndex);
2803
2804 auto inputs = GetInputs(graph, layerIndex);
2805 CHECK_VALID_SIZE(inputs.size(), 3);
2806
2807 auto outputs = GetOutputs(graph, layerIndex);
2808 CHECK_VALID_SIZE(outputs.size(), 4);
2809
2810 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2811 auto layerName = GetLayerName(graph, layerIndex);
2812 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2813 auto flatBufferInputParams = flatBufferLayer->inputParams();
2814
2815 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2816
2817 armnn::LstmInputParams lstmInputParams;
2818
2819 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2820 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2821 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2822 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2823 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2824 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2825 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2826 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2827 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2828
2829 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2830 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2831 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2832 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2833 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2834 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2835 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2836 lstmInputParams.m_CellBias = &cellBias;
2837 lstmInputParams.m_OutputGateBias = &outputGateBias;
2838
2839 armnn::ConstTensor inputToInputWeights;
2840 armnn::ConstTensor recurrentToInputWeights;
2841 armnn::ConstTensor cellToInputWeights;
2842 armnn::ConstTensor inputGateBias;
2843 if (!lstmDescriptor.m_CifgEnabled)
2844 {
2845 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2846 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2847 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2848 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2849
2850 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2851 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2852 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2853 lstmInputParams.m_InputGateBias = &inputGateBias;
2854 }
2855
2856 armnn::ConstTensor projectionWeights;
2857 armnn::ConstTensor projectionBias;
2858 if (lstmDescriptor.m_ProjectionEnabled)
2859 {
2860 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2861 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2862
2863 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2864 lstmInputParams.m_ProjectionBias = &projectionBias;
2865 }
2866
2867 armnn::ConstTensor cellToForgetWeights;
2868 armnn::ConstTensor cellToOutputWeights;
2869 if (lstmDescriptor.m_PeepholeEnabled)
2870 {
2871 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2872 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2873
2874 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2875 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2876 }
2877
Jan Eilersf8c62972019-07-17 11:07:49 +01002878 armnn::ConstTensor inputLayerNormWeights;
2879 armnn::ConstTensor forgetLayerNormWeights;
2880 armnn::ConstTensor cellLayerNormWeights;
2881 armnn::ConstTensor outputLayerNormWeights;
2882 if (lstmDescriptor.m_LayerNormEnabled)
2883 {
2884 if (!lstmDescriptor.m_CifgEnabled)
2885 {
2886 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2887 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2888 }
2889 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2890 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2891 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2892
2893 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2894 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2895 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2896 }
2897
Jim Flynn11af3752019-03-19 17:22:29 +00002898 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2899
2900 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2901 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2902
2903 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2904 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2905
2906 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2907 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2908
2909 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2910 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2911
2912 RegisterInputSlots(graph, layerIndex, layer);
2913 RegisterOutputSlots(graph, layerIndex, layer);
2914}
2915
Finn Williams85d36712021-01-26 22:30:06 +00002916armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01002917{
2918 armnn::QLstmDescriptor desc;
2919
2920 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2921 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2922 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2923 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2924
2925 desc.m_CellClip = qLstmDescriptor->cellClip();
2926 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2927
2928 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2929 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2930 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2931 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2932
2933 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2934 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2935
2936 return desc;
2937}
2938
Finn Williams85d36712021-01-26 22:30:06 +00002939void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01002940{
2941 CHECK_LAYERS(graph, 0, layerIndex);
2942
2943 auto inputs = GetInputs(graph, layerIndex);
2944 CHECK_VALID_SIZE(inputs.size(), 3);
2945
2946 auto outputs = GetOutputs(graph, layerIndex);
2947 CHECK_VALID_SIZE(outputs.size(), 3);
2948
2949 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2950 auto layerName = GetLayerName(graph, layerIndex);
2951 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2952 auto flatBufferInputParams = flatBufferLayer->inputParams();
2953
2954 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2955 armnn::LstmInputParams qLstmInputParams;
2956
2957 // Mandatory params
2958 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2959 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2960 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2961 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2962 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2963 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2964 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2965 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2966 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2967
2968 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2969 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2970 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2971 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2972 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2973 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2974 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2975 qLstmInputParams.m_CellBias = &cellBias;
2976 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2977
2978 // Optional CIFG params
2979 armnn::ConstTensor inputToInputWeights;
2980 armnn::ConstTensor recurrentToInputWeights;
2981 armnn::ConstTensor inputGateBias;
2982
2983 if (!qLstmDescriptor.m_CifgEnabled)
2984 {
2985 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2986 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2987 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2988
2989 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2990 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2991 qLstmInputParams.m_InputGateBias = &inputGateBias;
2992 }
2993
2994 // Optional projection params
2995 armnn::ConstTensor projectionWeights;
2996 armnn::ConstTensor projectionBias;
2997
2998 if (qLstmDescriptor.m_ProjectionEnabled)
2999 {
3000 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3001 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3002
3003 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3004 qLstmInputParams.m_ProjectionBias = &projectionBias;
3005 }
3006
3007 // Optional peephole params
3008 armnn::ConstTensor cellToInputWeights;
3009 armnn::ConstTensor cellToForgetWeights;
3010 armnn::ConstTensor cellToOutputWeights;
3011
3012 if (qLstmDescriptor.m_PeepholeEnabled)
3013 {
3014 if (!qLstmDescriptor.m_CifgEnabled)
3015 {
3016 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3017 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3018 }
3019
3020 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3021 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3022
3023 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3024 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3025 }
3026
3027 // Optional layer norm params
3028 armnn::ConstTensor inputLayerNormWeights;
3029 armnn::ConstTensor forgetLayerNormWeights;
3030 armnn::ConstTensor cellLayerNormWeights;
3031 armnn::ConstTensor outputLayerNormWeights;
3032
3033 if (qLstmDescriptor.m_LayerNormEnabled)
3034 {
3035 if (!qLstmDescriptor.m_CifgEnabled)
3036 {
3037 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3038 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3039 }
3040
3041 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3042 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3043 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3044
3045 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3046 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3047 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3048 }
3049
3050 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3051
3052 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3053 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3054
3055 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3056 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3057
3058 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3059 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3060
3061 RegisterInputSlots(graph, layerIndex, layer);
3062 RegisterOutputSlots(graph, layerIndex, layer);
3063}
3064
Finn Williams85d36712021-01-26 22:30:06 +00003065void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003066{
3067 CHECK_LAYERS(graph, 0, layerIndex);
3068
3069 auto inputs = GetInputs(graph, layerIndex);
3070 CHECK_VALID_SIZE(inputs.size(), 3);
3071
3072 auto outputs = GetOutputs(graph, layerIndex);
3073 CHECK_VALID_SIZE(outputs.size(), 2);
3074
3075 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3076 auto layerName = GetLayerName(graph, layerIndex);
3077 auto flatBufferInputParams = flatBufferLayer->inputParams();
3078
3079 armnn::QuantizedLstmInputParams lstmInputParams;
3080
3081 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3082 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3083 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3084 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3085 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3086 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3087 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3088 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3089 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3090 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3091 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3092 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3093
3094 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3095 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3096 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3097 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3098 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3099 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3100 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3101 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3102 lstmInputParams.m_InputGateBias = &inputGateBias;
3103 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3104 lstmInputParams.m_CellBias = &cellBias;
3105 lstmInputParams.m_OutputGateBias = &outputGateBias;
3106
3107 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3108
3109 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3110 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3111
3112 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3113 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3114
3115 RegisterInputSlots(graph, layerIndex, layer);
3116 RegisterOutputSlots(graph, layerIndex, layer);
3117}
3118
Finn Williams85d36712021-01-26 22:30:06 +00003119void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003120{
3121 CHECK_LAYERS(graph, 0, layerIndex);
3122
Finn Williams85d36712021-01-26 22:30:06 +00003123 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003124 CHECK_VALID_SIZE(inputs.size(), 1);
3125
Finn Williams85d36712021-01-26 22:30:06 +00003126 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003127 CHECK_VALID_SIZE(outputs.size(), 1);
3128
3129 const std::string layerName = GetLayerName(graph, layerIndex);
3130 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3131
3132 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3133 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3134
3135 RegisterInputSlots(graph, layerIndex, layer);
3136 RegisterOutputSlots(graph, layerIndex, layer);
3137}
3138
Finn Williams85d36712021-01-26 22:30:06 +00003139void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003140{
3141 CHECK_LAYERS(graph, 0, layerIndex);
3142
Finn Williams85d36712021-01-26 22:30:06 +00003143 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003144 CHECK_VALID_SIZE(inputs.size(), 2);
3145
Finn Williams85d36712021-01-26 22:30:06 +00003146 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003147 CHECK_VALID_SIZE(outputs.size(), 1);
3148
3149 const std::string layerName = GetLayerName(graph, layerIndex);
3150 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3151
3152 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3153 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3154
3155 RegisterInputSlots(graph, layerIndex, layer);
3156 RegisterOutputSlots(graph, layerIndex, layer);
3157}
3158
Finn Williams85d36712021-01-26 22:30:06 +00003159void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003160{
3161 CHECK_LAYERS(graph, 0, layerIndex);
3162 auto inputs = GetInputs(graph, layerIndex);
3163 CHECK_LOCATION();
3164 CHECK_VALID_SIZE(inputs.size(), 2);
3165
3166 auto outputs = GetOutputs(graph, layerIndex);
3167 CHECK_VALID_SIZE(outputs.size(), 2);
3168
3169 auto layerName = GetLayerName(graph, layerIndex);
3170 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3171
3172 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3173 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3174
3175 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3176 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3177
3178 RegisterInputSlots(graph, layerIndex, layer);
3179 RegisterOutputSlots(graph, layerIndex, layer);
3180}
3181
Finn Williams85d36712021-01-26 22:30:06 +00003182void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003183{
3184 CHECK_LAYERS(graph, 0, layerIndex);
3185 auto inputs = GetInputs(graph, layerIndex);
3186 CHECK_LOCATION();
3187 CHECK_VALID_SIZE(inputs.size(), 2);
3188
3189 auto outputs = GetOutputs(graph, layerIndex);
3190 CHECK_VALID_SIZE(outputs.size(), 1);
3191
3192 auto layerName = GetLayerName(graph, layerIndex);
3193 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3194
3195 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3196 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3197
3198 RegisterInputSlots(graph, layerIndex, layer);
3199 RegisterOutputSlots(graph, layerIndex, layer);
3200}
3201
Finn Williams85d36712021-01-26 22:30:06 +00003202void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003203{
3204 CHECK_LAYERS(graph, 0, layerIndex);
3205
3206 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3207
3208 auto inputs = GetInputs(graph, layerIndex);
3209 CHECK_VALID_SIZE(inputs.size(), 1);
3210
3211 auto outputs = GetOutputs(graph, layerIndex);
3212 CHECK_VALID_SIZE(outputs.size(), 1);
3213 auto outputInfo = ToTensorInfo(outputs[0]);
3214
3215 auto layerName = GetLayerName(graph, layerIndex);
3216 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
3217
3218 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3219 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3220
3221 RegisterInputSlots(graph, layerIndex, layer);
3222 RegisterOutputSlots(graph, layerIndex, layer);
3223}
3224
Finn Williams85d36712021-01-26 22:30:06 +00003225void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003226{
3227 CHECK_LAYERS(graph, 0, layerIndex);
3228
3229 auto inputs = GetInputs(graph, layerIndex);
3230 CHECK_VALID_SIZE(inputs.size(), 1);
3231
3232 auto outputs = GetOutputs(graph, layerIndex);
3233 CHECK_VALID_SIZE(outputs.size(), 1);
3234
3235 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3236 auto layerName = GetLayerName(graph, layerIndex);
3237 auto serializerDescriptor = serializerLayer->descriptor();
3238
3239 armnn::TransposeConvolution2dDescriptor descriptor;
3240 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3241 descriptor.m_PadRight = serializerDescriptor->padRight();
3242 descriptor.m_PadTop = serializerDescriptor->padTop();
3243 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3244 descriptor.m_StrideX = serializerDescriptor->strideX();
3245 descriptor.m_StrideY = serializerDescriptor->strideY();;
3246 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3247 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3248
3249 // weights & biases
3250 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3251 armnn::Optional<armnn::ConstTensor> optionalBiases;
3252 if (descriptor.m_BiasEnabled)
3253 {
3254 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3255 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3256 }
3257
3258 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3259 weights,
3260 optionalBiases,
3261 layerName.c_str());
3262
3263 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3264 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3265
3266 RegisterInputSlots(graph, layerIndex, layer);
3267 RegisterOutputSlots(graph, layerIndex, layer);
3268}
3269
Finn Williams85d36712021-01-26 22:30:06 +00003270void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003271{
3272 CHECK_LAYERS(graph, 0, layerIndex);
3273 auto inputs = GetInputs(graph, layerIndex);
3274
3275 auto outputs = GetOutputs(graph, layerIndex);
3276 CHECK_VALID_SIZE(outputs.size(), 1);
3277
3278 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3279 unsigned int axis = flatBufferDescriptor->axis();
3280 unsigned int numInputs = flatBufferDescriptor->numInputs();
3281 CHECK_VALID_SIZE(inputs.size(), numInputs);
3282
3283 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3284 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3285 flatBufferInputShape->begin() + flatBufferInputShape->size());
3286
3287 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3288 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3289
3290 for (unsigned int i=0; i<inputs.size(); ++i)
3291 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003292 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003293 if (descriptor.m_InputShape != inputShape)
3294 {
3295 std::stringstream ss;
3296 ss << "Shape of input "
3297 << i
3298 << " "
3299 << inputShape
3300 << " does not equal defined input shape "
3301 << descriptor.m_InputShape
3302 << ": "
3303 << CHECK_LOCATION().AsString();
3304 throw ParseException(ss.str());
3305 }
3306 }
3307
3308 auto layerName = GetLayerName(graph, layerIndex);
3309 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3310
3311 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3312 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3313
3314 RegisterInputSlots(graph, layerIndex, layer);
3315 RegisterOutputSlots(graph, layerIndex, layer);
3316}
3317
Finn Williams85d36712021-01-26 22:30:06 +00003318void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003319{
3320 CHECK_LAYERS(graph, 0, layerIndex);
3321
3322 auto inputs = GetInputs(graph, layerIndex);
3323 auto outputs = GetOutputs(graph, layerIndex);
3324
3325 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3326 auto fbDescriptor = fbLayer->descriptor();
3327
3328 armnn::StandInDescriptor descriptor;
3329 descriptor.m_NumInputs = fbDescriptor->numInputs();
3330 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3331
3332 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3333 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3334
3335 const std::string layerName = GetLayerName(graph, layerIndex);
3336 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3337
3338 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3339 {
3340 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3341 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3342 }
3343
3344 RegisterInputSlots(graph, layerIndex, layer);
3345 RegisterOutputSlots(graph, layerIndex, layer);
3346}
3347
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003348armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3349 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3350{
3351 armnn::UnidirectionalSequenceLstmDescriptor desc;
3352
3353 desc.m_ActivationFunc = descriptor->activationFunc();
3354 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3355 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3356 desc.m_CifgEnabled = descriptor->cifgEnabled();
3357 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3358 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3359 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3360 desc.m_TimeMajor = descriptor->timeMajor();
3361
3362 return desc;
3363}
3364
3365void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3366{
3367 CHECK_LAYERS(graph, 0, layerIndex);
3368
3369 auto inputs = GetInputs(graph, layerIndex);
3370 CHECK_VALID_SIZE(inputs.size(), 3);
3371
3372 auto outputs = GetOutputs(graph, layerIndex);
3373 CHECK_VALID_SIZE(outputs.size(), 1);
3374
3375 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3376 auto layerName = GetLayerName(graph, layerIndex);
3377 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3378 auto flatBufferInputParams = flatBufferLayer->inputParams();
3379
3380 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3381
3382 armnn::LstmInputParams lstmInputParams;
3383
3384 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3385 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3386 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3387 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3388 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3389 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3390 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3391 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3392 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3393
3394 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3395 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3396 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3397 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3398 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3399 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3400 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3401 lstmInputParams.m_CellBias = &cellBias;
3402 lstmInputParams.m_OutputGateBias = &outputGateBias;
3403
3404 armnn::ConstTensor inputToInputWeights;
3405 armnn::ConstTensor recurrentToInputWeights;
3406 armnn::ConstTensor cellToInputWeights;
3407 armnn::ConstTensor inputGateBias;
3408 if (!descriptor.m_CifgEnabled)
3409 {
3410 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3411 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3412 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3413
3414 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3415 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3416 lstmInputParams.m_InputGateBias = &inputGateBias;
3417
3418 if (descriptor.m_PeepholeEnabled)
3419 {
3420 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3421 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3422 }
3423 }
3424
3425 armnn::ConstTensor projectionWeights;
3426 armnn::ConstTensor projectionBias;
3427 if (descriptor.m_ProjectionEnabled)
3428 {
3429 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3430 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3431
3432 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3433 lstmInputParams.m_ProjectionBias = &projectionBias;
3434 }
3435
3436 armnn::ConstTensor cellToForgetWeights;
3437 armnn::ConstTensor cellToOutputWeights;
3438 if (descriptor.m_PeepholeEnabled)
3439 {
3440 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3441 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3442
3443 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3444 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3445 }
3446
3447 armnn::ConstTensor inputLayerNormWeights;
3448 armnn::ConstTensor forgetLayerNormWeights;
3449 armnn::ConstTensor cellLayerNormWeights;
3450 armnn::ConstTensor outputLayerNormWeights;
3451 if (descriptor.m_LayerNormEnabled)
3452 {
3453 if (!descriptor.m_CifgEnabled)
3454 {
3455 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3456 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3457 }
3458 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3459 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3460 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3461
3462 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3463 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3464 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3465 }
3466
3467 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3468 lstmInputParams,
3469 layerName.c_str());
3470
3471 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3472 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3473
3474 RegisterInputSlots(graph, layerIndex, layer);
3475 RegisterOutputSlots(graph, layerIndex, layer);
3476}
3477
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003478} // namespace armnnDeserializer