blob: cbc4758e0e60db755b865d026e22e93d2873a160 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Finn Williams85d36712021-01-26 22:30:06 +000037IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
38
39IDeserializer::~IDeserializer() = default;
40
41IDeserializer *IDeserializer::CreateRaw()
42{
43 return new IDeserializer();
44}
45
46IDeserializerPtr IDeserializer::Create()
47{
48 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
49}
50
51void IDeserializer::Destroy(IDeserializer *parser)
52{
53 delete parser;
54}
55
56armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
57{
58 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
59}
60
61armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
62{
63 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
64}
65
66BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
67{
68 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
69}
70
71BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
72{
73 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
74}
75
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000076namespace
77{
Kevin May43a799c2019-02-08 16:31:42 +000078
79const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
80
Finn Williams85d36712021-01-26 22:30:06 +000081 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000082 unsigned int layersIndex,
83 const CheckLocation& location)
84{
85 if (graph->layers() == nullptr)
86 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010087 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
88 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
89 "layers:{1} at {2}",
90 location.m_Function,
91 layersIndex,
92 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000093 }
94 else if (layersIndex >= graph->layers()->size())
95 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010096 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
97 location.m_Function,
98 layersIndex,
99 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000100 }
101}
102
Finn Williams85d36712021-01-26 22:30:06 +0000103void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000104 unsigned int layersIndex,
105 unsigned int layerIndex,
106 const CheckLocation& location)
107{
108 if (graph->layers() == nullptr)
109 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100110 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
111 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
112 "layers:{1} at {2}",
113 location.m_Function,
114 layersIndex,
115 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000116 }
117 else if (layersIndex >= graph->layers()->size())
118 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100119 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
120 "layers:{1} at {2}",
121 location.m_Function,
122 layersIndex,
123 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000124 }
125 else if (layerIndex >= graph->layers()[layersIndex].size()
126 && layerIndex != VIRTUAL_LAYER_ID)
127 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100128 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
129 "layers:{1} layer:{2} at {3}",
130 location.m_Function,
131 layersIndex,
132 layerIndex,
133 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000134 }
135}
136
Finn Williams85d36712021-01-26 22:30:06 +0000137void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000138 const CheckLocation& location)
139{
140 if (rawPtr == nullptr)
141 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100142 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
143 location.m_Function,
144 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000145 }
146}
147
Finn Williams85d36712021-01-26 22:30:06 +0000148void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000149 const CheckLocation& location)
150{
151 if (rawPtr == nullptr)
152 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100153 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
154 location.m_Function,
155 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000156 }
157}
158
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000159void CheckConstTensorSize(const unsigned int constTensorSize,
160 const unsigned int tensorSize,
161 const CheckLocation& location)
162{
163 if (constTensorSize != tensorSize)
164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100165 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
166 location.m_Function,
167 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000168 }
169}
170
Kevin May43a799c2019-02-08 16:31:42 +0000171#define CHECK_TENSOR_PTR(TENSOR_PTR) \
172 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
173
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000174#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
175 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
176
Mike Kellya0766c32019-02-19 17:22:07 +0000177#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
178 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
179
Kevin May43a799c2019-02-08 16:31:42 +0000180#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
181 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
182
183#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
184 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
185}
186
Saoirse Stewart263829c2019-02-19 15:54:14 +0000187bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
188{
189 const unsigned int actualSize = actual.GetNumDimensions();
190 if (actualSize != expected.size())
191 {
192 return false;
193 }
194
195 for (unsigned int i = 0u; i < actualSize; i++)
196 {
197 if (actual[i] != static_cast<unsigned int>(expected[i]))
198 {
199 return false;
200 }
201 }
202
203 return true;
204}
205
Finn Williams85d36712021-01-26 22:30:06 +0000206IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000207: m_Network(nullptr, nullptr),
208//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000209m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000210{
211 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000212 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
213 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
214 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
215 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
216 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
217 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
218 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
219 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
220 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
221 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
222 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
223 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
224 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
225 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
226 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
227 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
228 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
229 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
230 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
231 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
232 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
233 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
234 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
235 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
236 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
237 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
238 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
239 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
240 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
241 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
242 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
243 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
244 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
245 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
246 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
247 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
248 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
249 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
250 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
251 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
252 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
253 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000254 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000255 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
256 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
257 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
258 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
259 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
260 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
261 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
262 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
263 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
264 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
265 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
266 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
267 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
268 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
269 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
270 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000271}
272
Finn Williams85d36712021-01-26 22:30:06 +0000273LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000274{
275 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
276
277 switch(layerType)
278 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100279 case Layer::Layer_AbsLayer:
280 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000281 case Layer::Layer_ActivationLayer:
282 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000283 case Layer::Layer_AdditionLayer:
284 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100285 case Layer::Layer_ArgMinMaxLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000287 case Layer::Layer_BatchToSpaceNdLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000289 case Layer::Layer_BatchNormalizationLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100291 case Layer::Layer_ComparisonLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100293 case Layer::Layer_ConcatLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000295 case Layer::Layer_ConstantLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000297 case Layer::Layer_Convolution2dLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100299 case Layer::Layer_DepthToSpaceLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000301 case Layer::Layer_DepthwiseConvolution2dLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000303 case Layer::Layer_DequantizeLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000305 case Layer::Layer_DetectionPostProcessLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000307 case Layer::Layer_DivisionLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000309 case Layer::Layer_EqualLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000311 case Layer::Layer_ElementwiseUnaryLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000313 case Layer::Layer_FullyConnectedLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100315 case Layer::Layer_FillLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000317 case Layer::Layer_FloorLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000319 case Layer::Layer_GatherLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000321 case Layer::Layer_GreaterLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000323 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000324 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100325 case Layer::Layer_InstanceNormalizationLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000327 case Layer::Layer_L2NormalizationLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000329 case Layer::Layer_LogicalBinaryLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100331 case Layer::Layer_LogSoftmaxLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000333 case Layer::Layer_LstmLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000335 case Layer::Layer_MeanLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000337 case Layer::Layer_MinimumLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000339 case Layer::Layer_MaximumLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100341 case Layer::Layer_MergeLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000343 case Layer::Layer_MergerLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000345 case Layer::Layer_MultiplicationLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000347 case Layer::Layer_NormalizationLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000349 case Layer::Layer_OutputLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000351 case Layer::Layer_PadLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000353 case Layer::Layer_PermuteLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000355 case Layer::Layer_Pooling2dLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100357 case Layer::Layer_PreluLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100359 case Layer::Layer_QLstmLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000361 case Layer::Layer_QuantizeLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100363 case Layer::Layer_QuantizedLstmLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100365 case Layer::Layer_RankLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000367 case Layer::Layer_ReduceLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000369 case Layer::Layer_ReshapeLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000371 case Layer::Layer_ResizeBilinearLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100373 case Layer::Layer_ResizeLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000375 case Layer::Layer_RsqrtLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100377 case Layer::Layer_SliceLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000379 case Layer::Layer_SoftmaxLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000381 case Layer::Layer_SpaceToBatchNdLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100383 case Layer::Layer_SpaceToDepthLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000385 case Layer::Layer_SplitterLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100387 case Layer::Layer_StackLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100389 case Layer::Layer_StandInLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000391 case Layer::Layer_StridedSliceLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000393 case Layer::Layer_SubtractionLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100395 case Layer::Layer_SwitchLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100397 case Layer::Layer_TransposeConvolution2dLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000399 case Layer::Layer_TransposeLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000401 case Layer::Layer_NONE:
402 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100403 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000404 }
405}
406
Finn Williams85d36712021-01-26 22:30:06 +0000407std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000408{
409 auto layer = GetBaseLayer(graph, index);
410 assert(layer);
411 return layer->layerName()->str();
412}
413
Finn Williams85d36712021-01-26 22:30:06 +0000414int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000415{
416 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
417
418 if (layerType == Layer::Layer_InputLayer)
419 {
420 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
421 }
422 else if ( layerType == Layer::Layer_OutputLayer )
423 {
424 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
425 }
426 return 0;
427}
428
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000429armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000430{
431 switch (dataLayout)
432 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000433 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000434 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000435 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000436 default:
437 return armnn::DataLayout::NCHW;
438 }
439}
440
Mike Kellyaf484012019-02-20 16:53:11 +0000441armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
442{
443 switch (function)
444 {
445 case armnnSerializer::ActivationFunction_Sigmoid:
446 return armnn::ActivationFunction::Sigmoid;
447 case armnnSerializer::ActivationFunction_TanH:
448 return armnn::ActivationFunction::TanH;
449 case armnnSerializer::ActivationFunction_Linear:
450 return armnn::ActivationFunction::Linear;
451 case armnnSerializer::ActivationFunction_ReLu:
452 return armnn::ActivationFunction::ReLu;
453 case armnnSerializer::ActivationFunction_BoundedReLu:
454 return armnn::ActivationFunction::BoundedReLu;
455 case armnnSerializer::ActivationFunction_LeakyReLu:
456 return armnn::ActivationFunction::LeakyReLu;
457 case armnnSerializer::ActivationFunction_Abs:
458 return armnn::ActivationFunction::Abs;
459 case armnnSerializer::ActivationFunction_Sqrt:
460 return armnn::ActivationFunction::Sqrt;
461 case armnnSerializer::ActivationFunction_Square:
462 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000463 case armnnSerializer::ActivationFunction_Elu:
464 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000465 case armnnSerializer::ActivationFunction_HardSwish:
466 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000467 default:
468 return armnn::ActivationFunction::Sigmoid;
469 }
470}
471
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100472armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
473{
474 switch (function)
475 {
476 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
477 return armnn::ArgMinMaxFunction::Max;
478 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
479 default:
480 return armnn::ArgMinMaxFunction::Min;
481 }
482}
483
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100484armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
485{
486 switch (operation)
487 {
488 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
489 return armnn::ComparisonOperation::Equal;
490 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
491 return armnn::ComparisonOperation::Greater;
492 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
493 return armnn::ComparisonOperation::GreaterOrEqual;
494 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
495 return armnn::ComparisonOperation::Less;
496 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
497 return armnn::ComparisonOperation::LessOrEqual;
498 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
499 default:
500 return armnn::ComparisonOperation::NotEqual;
501 }
502}
503
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000504armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
505{
506 switch (operation)
507 {
508 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
509 return armnn::ReduceOperation::Sum;
510 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
511 return armnn::ReduceOperation::Max;
512 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
513 return armnn::ReduceOperation::Mean;
514 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
515 return armnn::ReduceOperation::Min;
516 default:
517 return armnn::ReduceOperation::Sum;
518 }
519}
520
James Conroyaba90cd2020-11-06 16:28:18 +0000521armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
522{
523 switch (operation)
524 {
525 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
526 return armnn::LogicalBinaryOperation::LogicalAnd;
527 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
528 return armnn::LogicalBinaryOperation::LogicalOr;
529 default:
530 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
531 }
532}
533
josh minor4a3c6102020-01-06 16:40:46 -0600534armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
535{
536 switch (operation)
537 {
538 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
539 return armnn::UnaryOperation::Abs;
540 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
541 return armnn::UnaryOperation::Rsqrt;
542 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
543 return armnn::UnaryOperation::Sqrt;
544 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
545 return armnn::UnaryOperation::Exp;
546 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
547 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000548 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
549 return armnn::UnaryOperation::LogicalNot;
josh minor4a3c6102020-01-06 16:40:46 -0600550 default:
551 throw armnn::InvalidArgumentException("Unary operation unknown");
552 }
553}
554
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100555armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
556{
557 switch (method)
558 {
559 case armnnSerializer::ResizeMethod_NearestNeighbor:
560 return armnn::ResizeMethod::NearestNeighbor;
561 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000562 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100563 default:
564 return armnn::ResizeMethod::NearestNeighbor;
565 }
566}
567
Finn Williams85d36712021-01-26 22:30:06 +0000568armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000569{
570 armnn::DataType type;
571 CHECK_TENSOR_PTR(tensorPtr);
572
573 switch (tensorPtr->dataType())
574 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000575 case DataType_QAsymmS8:
576 type = armnn::DataType::QAsymmS8;
577 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000578 case DataType_QSymmS8:
579 type = armnn::DataType::QSymmS8;
580 break;
Kevin May43a799c2019-02-08 16:31:42 +0000581 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000582 case DataType_QAsymmU8:
583 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000584 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000585 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000586 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000587 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000588 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000589 case DataType_Signed32:
590 type = armnn::DataType::Signed32;
591 break;
Kevin May43a799c2019-02-08 16:31:42 +0000592 case DataType_Float32:
593 type = armnn::DataType::Float32;
594 break;
595 case DataType_Float16:
596 type = armnn::DataType::Float16;
597 break;
598 case DataType_Boolean:
599 type = armnn::DataType::Boolean;
600 break;
601 default:
602 {
603 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100604 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
605 tensorPtr->dataType(),
606 EnumNameDataType(tensorPtr->dataType()),
607 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000608 }
609 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000610
Finn Williams2605b232020-06-10 15:53:46 +0100611 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
612 {
613 float quantizationScale = tensorPtr->quantizationScale();
614 int32_t quantizationOffset = tensorPtr->quantizationOffset();
615
616 return armnn::TensorInfo(armnn::TensorShape{armnn::Dimensionality::Scalar},
617 type,
618 quantizationScale,
619 quantizationOffset);
620 }
Kevin May43a799c2019-02-08 16:31:42 +0000621
622 auto dimensions = tensorPtr->dimensions();
623 unsigned int size = dimensions->size();
624 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
625
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000626 auto quantizationScales = tensorPtr->quantizationScales();
627
628 if (quantizationScales)
629 {
630 unsigned int quantizationScalesSize = quantizationScales->size();
631 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
632 unsigned int quantizationDim = tensorPtr->quantizationDim();
633 armnn::TensorInfo result(size,
634 outputDims.data(),
635 type,
636 scales,
637 quantizationDim);
638 return result;
639 }
640
641 float quantizationScale = tensorPtr->quantizationScale();
642 int32_t quantizationOffset = tensorPtr->quantizationOffset();
643
Kevin May43a799c2019-02-08 16:31:42 +0000644 // two statements (on purpose) for easier debugging:
645 armnn::TensorInfo result(size,
646 outputDims.data(),
647 type,
648 quantizationScale,
649 quantizationOffset);
650 return result;
651}
652
Finn Williams85d36712021-01-26 22:30:06 +0000653armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000654{
655 CHECK_CONST_TENSOR_PTR(constTensorPtr);
656 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
657
658 switch (constTensorPtr->data_type())
659 {
660 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000661 {
662 auto byteData = constTensorPtr->data_as_ByteData()->data();
663 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
664 return armnn::ConstTensor(tensorInfo, byteData->data());
665 }
Mike Kellya0766c32019-02-19 17:22:07 +0000666 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000667 {
668 auto shortData = constTensorPtr->data_as_ShortData()->data();
669 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
670 return armnn::ConstTensor(tensorInfo, shortData->data());
671 }
Mike Kellya0766c32019-02-19 17:22:07 +0000672 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000673 {
674 auto intData = constTensorPtr->data_as_IntData()->data();
675 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
676 return armnn::ConstTensor(tensorInfo, intData->data());
677 }
Mike Kellya0766c32019-02-19 17:22:07 +0000678 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000679 {
680 auto longData = constTensorPtr->data_as_LongData()->data();
681 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
682 return armnn::ConstTensor(tensorInfo, longData->data());
683 }
Mike Kellya0766c32019-02-19 17:22:07 +0000684 default:
685 {
686 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100687 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
688 constTensorPtr->data_type(),
689 EnumNameConstTensorData(constTensorPtr->data_type()),
690 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000691 }
692 }
693}
694
Finn Williams85d36712021-01-26 22:30:06 +0000695TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000696{
697 CHECK_LAYERS(graphPtr, 0, layerIndex);
698 auto layer = GetBaseLayer(graphPtr, layerIndex);
699 const auto& numInputs = layer->inputSlots()->size();
700
701 TensorRawPtrVector result(numInputs);
702
703 for (unsigned int i=0; i<numInputs; ++i)
704 {
705 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
706 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
707 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
708 }
709 return result;
710}
711
Finn Williams85d36712021-01-26 22:30:06 +0000712TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000713{
714 CHECK_LAYERS(graphPtr, 0, layerIndex);
715 auto layer = GetBaseLayer(graphPtr, layerIndex);
716 const auto& numOutputs = layer->outputSlots()->size();
717
718 TensorRawPtrVector result(numOutputs);
719
720 for (unsigned int i=0; i<numOutputs; ++i)
721 {
722 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
723 }
724 return result;
725}
726
Finn Williams85d36712021-01-26 22:30:06 +0000727void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000728{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000729 CHECK_LAYERS(graph, 0, layerIndex);
730 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100731 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
732 "layerName: {1} / {2}",
733 layerIndex,
734 layerName,
735 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000736}
737
Finn Williams85d36712021-01-26 22:30:06 +0000738void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000739{
740 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000741 m_InputBindings.clear();
742 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000743}
744
Kevin May43a799c2019-02-08 16:31:42 +0000745
Finn Williams85d36712021-01-26 22:30:06 +0000746INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000747{
748 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000749 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
750 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000751}
752
Finn Williams85d36712021-01-26 22:30:06 +0000753armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000754{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000755 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000756 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
757 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
758 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000759}
760
Finn Williams85d36712021-01-26 22:30:06 +0000761GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000762{
763 if (binaryContent == nullptr)
764 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100765 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
766 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000767 }
768 flatbuffers::Verifier verifier(binaryContent, len);
769 if (verifier.VerifyBuffer<SerializedGraph>() == false)
770 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100771 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
772 "flatbuffers format. size:{0} {1}",
773 len,
774 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000775 }
776 return GetSerializedGraph(binaryContent);
777}
778
Finn Williams85d36712021-01-26 22:30:06 +0000779INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000780{
781 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100782 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000783 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000784 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000785 {
786 if (layer->layer_type() != Layer_InputLayer &&
787 layer->layer_type() != Layer_OutputLayer)
788 {
789 // lookup and call the parser function
790 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000791 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000792 }
793 ++layerIndex;
794 }
795
Derek Lamberti8ddae332019-02-21 16:29:43 +0000796 SetupInputLayers(graph);
797 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000798
799 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100800 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000801 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100802 Connections& connections = graphIt.second;
803 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000804 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100805 const unsigned int outputSlotIndex = outputIt.first;
806 IOutputSlot* outputSlot = outputIt.second;
807 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000808 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100809 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000810 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100811 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000812 }
Kevin May43a799c2019-02-08 16:31:42 +0000813 }
814 }
815 }
816
817 return std::move(m_Network);
818}
819
Finn Williams85d36712021-01-26 22:30:06 +0000820BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000821 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000822{
Jan Eilers8eb25602020-03-09 12:13:48 +0000823 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000824 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000825 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000826 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000827 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000828 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000829 }
830 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100831 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
832 name,
833 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000834}
835
Finn Williams85d36712021-01-26 22:30:06 +0000836BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000837 const std::string& name) const
838{
Jan Eilers8eb25602020-03-09 12:13:48 +0000839 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000840 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000841 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000842 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000843 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000844 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000845 }
846 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100847 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
848 name,
849 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000850}
851
Finn Williams85d36712021-01-26 22:30:06 +0000852unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000853{
854 for (unsigned int i = 0; i < graph->layers()->size(); i++)
855 {
856 auto layer = graph->layers()->Get(i);
857 if (layer->layer_type() == Layer::Layer_InputLayer)
858 {
859 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
860 if (layerBindingId == targetId)
861 {
862 return i;
863 }
864 }
865 }
866 throw ParseException("Input layer with given layerBindingId not found");
867}
868
Finn Williams85d36712021-01-26 22:30:06 +0000869unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000870{
871 for (unsigned int i = 0; i < graph->layers()->size(); i++)
872 {
873 auto layer = graph->layers()->Get(i);
874 if (layer->layer_type() == Layer::Layer_OutputLayer)
875 {
876 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
877 if (layerBindingId == targetId)
878 {
879 return i;
880 }
881 }
882 }
883 throw ParseException("Output layer with given layerBindingId not found");
884}
885
Finn Williams85d36712021-01-26 22:30:06 +0000886unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100887{
888 for (unsigned int i = 0; i < graph->layers()->size(); i++)
889 {
890 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
891 if (layer->index() == targetIndex)
892 {
893 return i;
894 }
895 }
896 throw ParseException("Layer with given index not found");
897}
898
Finn Williams85d36712021-01-26 22:30:06 +0000899IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000900{
Finn Williams85d36712021-01-26 22:30:06 +0000901 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000902
903 if (graph->featureVersions())
904 {
905 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
906 }
907
908 return versions;
909}
910
Finn Williams85d36712021-01-26 22:30:06 +0000911void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000912{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000913 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100914 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000915 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100916 m_InputBindings.reserve(numInputs);
917
918 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000919 {
Tee Jungaa920c52019-11-05 10:48:25 +0000920 unsigned int inputLayerIndex = 0xFFFFFFFF;
921 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
922 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100923 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000924 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
925 }
926 else
927 {
928 const int inputId = graph->inputIds()->Get(i);
929 inputLayerIndex = GetInputLayerInVector(graph, inputId);
930 }
931
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100932 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000933
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100934 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
935 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100936 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000937
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100938 IConnectableLayer* inputLayer =
939 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000940
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100941 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
942 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
943 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
944
Derek Lamberti8ddae332019-02-21 16:29:43 +0000945 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100946 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000947 }
948}
949
Finn Williams85d36712021-01-26 22:30:06 +0000950void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000951{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000952 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100953 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000954 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100955 m_OutputBindings.reserve(numOutputs);
956
957 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000958 {
Tee Jungaa920c52019-11-05 10:48:25 +0000959 unsigned int outputLayerIndex = 0xFFFFFFFF;
960 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
961 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100962 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000963 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
964 }
965 else
966 {
967 const int outputId = graph->outputIds()->Get(i);
968 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
969 }
970
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100971 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000972
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100973 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
974 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Josh Minor4c10dfc2020-06-17 13:56:20 -0500975 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000976
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100977 IConnectableLayer* outputLayer =
978 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000979
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100980 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
981
982 unsigned int sourceLayerIndex =
983 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
984 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Josh Minor4c10dfc2020-06-17 13:56:20 -0500985 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(i)->tensorInfo());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100986
Derek Lamberti8ddae332019-02-21 16:29:43 +0000987 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100988 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000989 }
990}
991
Finn Williams85d36712021-01-26 22:30:06 +0000992void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +0000993 uint32_t layerIndex,
994 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000995{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000996 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100997 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100998 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
999 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001000 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001001 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1002 " for layer index: {2} {3}",
1003 baseLayer->outputSlots()->size(),
1004 layer->GetNumOutputSlots(),
1005 layerIndex,
1006 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001007 }
1008
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001009 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001010 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001011 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1012 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1013 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1014 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001015 }
1016}
1017
Finn Williams85d36712021-01-26 22:30:06 +00001018void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001019 uint32_t layerIndex,
1020 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001021{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001022 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001023 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001024 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1025 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001026 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001027 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1028 " for layer index:{2} {3}",
1029 baseLayer->inputSlots()->size(),
1030 layer->GetNumInputSlots(),
1031 layerIndex,
1032 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001033 }
1034
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001035 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001036 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001037 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1038 auto fbConnection = fbInputSlot->connection();
1039 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1040 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001041 }
1042}
1043
Finn Williams85d36712021-01-26 22:30:06 +00001044void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001045 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001046 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001047{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001048 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001049 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001050 m_GraphConnections[sourceLayerIndex] = Connections();
1051 }
1052
1053 Connections& connections = m_GraphConnections[sourceLayerIndex];
1054 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1055 {
1056 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001057 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001058 else
1059 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001060 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001061 }
1062}
Kevin May43a799c2019-02-08 16:31:42 +00001063
Finn Williams85d36712021-01-26 22:30:06 +00001064void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001065 uint32_t outputSlotIndex,
1066 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001067{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001068 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1069 {
1070 m_GraphConnections[sourceLayerIndex] = Connections();
1071 }
1072
1073 Connections& connections = m_GraphConnections[sourceLayerIndex];
1074 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1075 {
1076 throw ParseException("Same output slot index processed twice");
1077 }
1078
1079 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001080}
1081
Finn Williams85d36712021-01-26 22:30:06 +00001082void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001083{
1084 CHECK_LAYERS(graph, 0, layerIndex);
1085 auto inputs = GetInputs(graph, layerIndex);
1086 CHECK_LOCATION();
1087 CHECK_VALID_SIZE(inputs.size(), 1);
1088
1089 auto outputs = GetOutputs(graph, layerIndex);
1090 CHECK_VALID_SIZE(outputs.size(), 1);
1091
1092 auto layerName = GetLayerName(graph, layerIndex);
1093
josh minor4a3c6102020-01-06 16:40:46 -06001094 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1095 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001096 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1097 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1098
1099 RegisterInputSlots(graph, layerIndex, layer);
1100 RegisterOutputSlots(graph, layerIndex, layer);
1101}
1102
Finn Williams85d36712021-01-26 22:30:06 +00001103void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001104{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001105 CHECK_LAYERS(graph, 0, layerIndex);
1106 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001107 CHECK_LOCATION();
1108 CHECK_VALID_SIZE(inputs.size(), 1);
1109
Derek Lamberti8ddae332019-02-21 16:29:43 +00001110 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001111 CHECK_VALID_SIZE(outputs.size(), 1);
1112
Derek Lamberti8ddae332019-02-21 16:29:43 +00001113 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001114 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001115 auto serializerDescriptor = serializerLayer->descriptor();
1116
1117 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001118 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001119 descriptor.m_A = serializerDescriptor->a();
1120 descriptor.m_B = serializerDescriptor->b();
1121
1122 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1123 layerName.c_str());
1124 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1125 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1126
Derek Lamberti8ddae332019-02-21 16:29:43 +00001127 RegisterInputSlots(graph, layerIndex, layer);
1128 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001129}
1130
Finn Williams85d36712021-01-26 22:30:06 +00001131void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001132{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001133 CHECK_LAYERS(graph, 0, layerIndex);
1134 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001135 CHECK_LOCATION();
1136 CHECK_VALID_SIZE(inputs.size(), 2);
1137
Derek Lamberti8ddae332019-02-21 16:29:43 +00001138 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001139 CHECK_VALID_SIZE(outputs.size(), 1);
1140
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001141 auto layerName = GetLayerName(graph, layerIndex);
1142 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001143
1144 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1145 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1146
Derek Lamberti8ddae332019-02-21 16:29:43 +00001147 RegisterInputSlots(graph, layerIndex, layer);
1148 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001149}
1150
Finn Williams85d36712021-01-26 22:30:06 +00001151void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001152{
1153 CHECK_LAYERS(graph, 0, layerIndex);
1154 auto inputs = GetInputs(graph, layerIndex);
1155 CHECK_LOCATION();
1156 CHECK_VALID_SIZE(inputs.size(), 1);
1157
1158 auto outputs = GetOutputs(graph, layerIndex);
1159 CHECK_VALID_SIZE(outputs.size(), 1);
1160
1161 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1162 auto serializerDescriptor = serializerLayer->descriptor();
1163
1164 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001165 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001166 descriptor.m_Axis = serializerDescriptor->axis();
1167 auto layerName = GetLayerName(graph, layerIndex);
1168 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1169
1170 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1171 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1172
1173 RegisterInputSlots(graph, layerIndex, layer);
1174 RegisterOutputSlots(graph, layerIndex, layer);
1175}
1176
Finn Williams85d36712021-01-26 22:30:06 +00001177void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001178{
1179 CHECK_LAYERS(graph, 0, layerIndex);
1180
Finn Williams85d36712021-01-26 22:30:06 +00001181 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001182 CHECK_VALID_SIZE(inputs.size(), 1);
1183
Finn Williams85d36712021-01-26 22:30:06 +00001184 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001185 CHECK_VALID_SIZE(outputs.size(), 1);
1186
1187 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1188 auto flatBufferCrops = flatBufferDescriptor->crops();
1189 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1190
1191 if (flatBufferCrops->Length() % 2 != 0)
1192 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001193 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001194 }
1195
1196 std::vector<std::pair<unsigned int, unsigned int>> crops;
1197 crops.reserve(flatBufferCrops->Length() / 2);
1198 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1199 {
1200 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1201 }
1202
1203 armnn::BatchToSpaceNdDescriptor descriptor;
1204 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1205 descriptor.m_BlockShape =
1206 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1207 descriptor.m_Crops = crops;
1208
1209 auto layerName = GetLayerName(graph, layerIndex);
1210 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1211
1212 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1213 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1214
1215 RegisterInputSlots(graph, layerIndex, layer);
1216 RegisterOutputSlots(graph, layerIndex, layer);
1217}
1218
Finn Williams85d36712021-01-26 22:30:06 +00001219void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001220{
1221 CHECK_LAYERS(graph, 0, layerIndex);
1222
1223 auto inputs = GetInputs(graph, layerIndex);
1224 CHECK_VALID_SIZE(inputs.size(), 1);
1225
1226 auto outputs = GetOutputs(graph, layerIndex);
1227 CHECK_VALID_SIZE(outputs.size(), 1);
1228 auto outputInfo = ToTensorInfo(outputs[0]);
1229
ruoyan015c7ab052019-03-04 14:48:02 +00001230 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001231
1232 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1233 auto serializerDescriptor = serializerLayer->descriptor();
1234
1235 armnn::BatchNormalizationDescriptor descriptor;
1236 descriptor.m_Eps = serializerDescriptor->eps();
1237 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1238
1239 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1240 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1241 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1242 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1243
1244 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1245 mean,
1246 variance,
1247 beta,
1248 gamma,
1249 layerName.c_str());
1250 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1251
1252 RegisterInputSlots(graph, layerIndex, layer);
1253 RegisterOutputSlots(graph, layerIndex, layer);
1254}
1255
Finn Williams85d36712021-01-26 22:30:06 +00001256void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001257{
1258 CHECK_LAYERS(graph, 0, layerIndex);
1259 CHECK_LOCATION();
1260
1261 auto outputs = GetOutputs(graph, layerIndex);
1262 CHECK_VALID_SIZE(outputs.size(), 1);
1263
1264 auto layerName = GetLayerName(graph, layerIndex);
1265
1266 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1267 auto serializerInput = serializerLayer->input();
1268
1269 armnn::ConstTensor input = ToConstTensor(serializerInput);
1270
1271 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1272
1273 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1274 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1275
1276 RegisterOutputSlots(graph, layerIndex, layer);
1277}
1278
Finn Williams85d36712021-01-26 22:30:06 +00001279void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001280{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001281 CHECK_LAYERS(graph, 0, layerIndex);
1282 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001283 CHECK_LOCATION();
1284 CHECK_VALID_SIZE(inputs.size(), 1);
1285
Derek Lamberti8ddae332019-02-21 16:29:43 +00001286 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001287 CHECK_VALID_SIZE(outputs.size(), 1);
1288
Derek Lamberti8ddae332019-02-21 16:29:43 +00001289 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001290 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001291 auto serializerDescriptor = serializerLayer->descriptor();
1292
1293 armnn::Convolution2dDescriptor descriptor;
1294 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1295 descriptor.m_PadRight = serializerDescriptor->padRight();
1296 descriptor.m_PadTop = serializerDescriptor->padTop();
1297 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1298 descriptor.m_StrideX = serializerDescriptor->strideX();
1299 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001300 descriptor.m_DilationX = serializerDescriptor->dilationX();
1301 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001302 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1303 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1304
1305 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1306 armnn::ConstTensor biases;
1307
Matteo Martincighfc598e12019-05-14 10:36:13 +01001308 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001309 if (descriptor.m_BiasEnabled)
1310 {
1311 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001312 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001313 }
1314 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1315 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001316 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001317 layerName.c_str());
1318 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1319 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1320
Derek Lamberti8ddae332019-02-21 16:29:43 +00001321 RegisterInputSlots(graph, layerIndex, layer);
1322 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001323}
1324
Finn Williams85d36712021-01-26 22:30:06 +00001325void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001326{
1327 CHECK_LAYERS(graph, 0, layerIndex);
1328
1329 auto inputs = GetInputs(graph, layerIndex);
1330 CHECK_VALID_SIZE(inputs.size(), 1);
1331
1332 auto outputs = GetOutputs(graph, layerIndex);
1333 CHECK_VALID_SIZE(outputs.size(), 1);
1334
1335 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1336
1337 armnn::DepthToSpaceDescriptor descriptor;
1338 descriptor.m_BlockSize = fbDescriptor->blockSize();
1339 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1340
1341 auto layerName = GetLayerName(graph, layerIndex);
1342 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1343
1344 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1345 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1346
1347 RegisterInputSlots(graph, layerIndex, layer);
1348 RegisterOutputSlots(graph, layerIndex, layer);
1349}
1350
Finn Williams85d36712021-01-26 22:30:06 +00001351void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001352{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001353 CHECK_LAYERS(graph, 0, layerIndex);
1354 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001355 CHECK_LOCATION();
1356 CHECK_VALID_SIZE(inputs.size(), 1);
1357
Derek Lamberti8ddae332019-02-21 16:29:43 +00001358 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001359 CHECK_VALID_SIZE(outputs.size(), 1);
1360
Derek Lamberti8ddae332019-02-21 16:29:43 +00001361 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001362 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001363 auto serializerDescriptor = serializerLayer->descriptor();
1364
1365 armnn::DepthwiseConvolution2dDescriptor descriptor;
1366 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1367 descriptor.m_PadRight = serializerDescriptor->padRight();
1368 descriptor.m_PadTop = serializerDescriptor->padTop();
1369 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1370 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001371 descriptor.m_StrideY = serializerDescriptor->strideY();
1372 descriptor.m_DilationX = serializerDescriptor->dilationX();
1373 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001374 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1375 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1376
1377 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1378 armnn::ConstTensor biases;
1379
Matteo Martincighfc598e12019-05-14 10:36:13 +01001380 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001381 if (descriptor.m_BiasEnabled)
1382 {
1383 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001384 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001385 }
1386 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1387 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001388 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001389 layerName.c_str());
1390
1391 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1392 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1393
Derek Lamberti8ddae332019-02-21 16:29:43 +00001394 RegisterInputSlots(graph, layerIndex, layer);
1395 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001396}
1397
Finn Williams85d36712021-01-26 22:30:06 +00001398void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001399{
1400 CHECK_LAYERS(graph, 0, layerIndex);
1401 auto inputs = GetInputs(graph, layerIndex);
1402 CHECK_LOCATION();
1403 CHECK_VALID_SIZE(inputs.size(), 2);
1404
1405 auto outputs = GetOutputs(graph, layerIndex);
1406 CHECK_VALID_SIZE(outputs.size(), 4);
1407
1408 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1409 auto layerName = GetLayerName(graph, layerIndex);
1410 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1411
1412 armnn::DetectionPostProcessDescriptor descriptor;
1413 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1414 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1415 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1416 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1417 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1418 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1419 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1420 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1421 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1422 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1423 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1424
1425 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1426
1427 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1428 anchors,
1429 layerName.c_str());
1430
1431 for (unsigned int i = 0; i < 4; i++)
1432 {
1433 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1434 }
1435
1436 RegisterInputSlots(graph, layerIndex, layer);
1437 RegisterOutputSlots(graph, layerIndex, layer);
1438}
1439
Finn Williams85d36712021-01-26 22:30:06 +00001440void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001441{
1442 CHECK_LAYERS(graph, 0, layerIndex);
1443 auto inputs = GetInputs(graph, layerIndex);
1444 CHECK_LOCATION();
1445 CHECK_VALID_SIZE(inputs.size(), 2);
1446
1447 auto outputs = GetOutputs(graph, layerIndex);
1448 CHECK_VALID_SIZE(outputs.size(), 1);
1449
1450 auto layerName = GetLayerName(graph, layerIndex);
1451 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1452
1453 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1454 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1455
1456 RegisterInputSlots(graph, layerIndex, layer);
1457 RegisterOutputSlots(graph, layerIndex, layer);
1458}
1459
Finn Williams85d36712021-01-26 22:30:06 +00001460void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001461{
1462 CHECK_LAYERS(graph, 0, layerIndex);
1463 auto inputs = GetInputs(graph, layerIndex);
1464 CHECK_LOCATION();
1465 CHECK_VALID_SIZE(inputs.size(), 2);
1466
1467 auto outputs = GetOutputs(graph, layerIndex);
1468 CHECK_VALID_SIZE(outputs.size(), 1);
1469
1470 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001471 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1472 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001473
1474 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1475 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1476
1477 RegisterInputSlots(graph, layerIndex, layer);
1478 RegisterOutputSlots(graph, layerIndex, layer);
1479}
1480
Finn Williams85d36712021-01-26 22:30:06 +00001481void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001482{
1483 CHECK_LAYERS(graph, 0, layerIndex);
1484 auto inputs = GetInputs(graph, layerIndex);
1485 CHECK_LOCATION();
1486 CHECK_VALID_SIZE(inputs.size(), 1);
1487
1488 auto outputs = GetOutputs(graph, layerIndex);
1489 CHECK_VALID_SIZE(outputs.size(), 1);
1490
1491 auto layerName = GetLayerName(graph, layerIndex);
1492 armnn::FillDescriptor descriptor(1.0f);
1493 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1494
1495 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1496 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1497
1498 RegisterInputSlots(graph, layerIndex, layer);
1499 RegisterOutputSlots(graph, layerIndex, layer);
1500}
1501
Finn Williams85d36712021-01-26 22:30:06 +00001502void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001503{
1504 CHECK_LAYERS(graph, 0, layerIndex);
1505 auto inputs = GetInputs(graph, layerIndex);
1506 CHECK_LOCATION();
1507 CHECK_VALID_SIZE(inputs.size(), 2);
1508
1509 auto outputs = GetOutputs(graph, layerIndex);
1510 CHECK_VALID_SIZE(outputs.size(), 1);
1511
1512 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001513 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1514 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001515
1516 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1517 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1518
1519 RegisterInputSlots(graph, layerIndex, layer);
1520 RegisterOutputSlots(graph, layerIndex, layer);
1521}
1522
Finn Williams85d36712021-01-26 22:30:06 +00001523void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001524{
1525 CHECK_LAYERS(graph, 0, layerIndex);
1526
1527 auto inputs = GetInputs(graph, layerIndex);
1528 CHECK_VALID_SIZE(inputs.size(), 1);
1529
1530 auto outputs = GetOutputs(graph, layerIndex);
1531 CHECK_VALID_SIZE(outputs.size(), 1);
1532
1533 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1534 auto fbDescriptor = fbLayer->descriptor();
1535
1536 armnn::InstanceNormalizationDescriptor descriptor;
1537 descriptor.m_Gamma = fbDescriptor->gamma();
1538 descriptor.m_Beta = fbDescriptor->beta();
1539 descriptor.m_Eps = fbDescriptor->eps();
1540 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1541
1542 const std::string layerName = GetLayerName(graph, layerIndex);
1543 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1544
1545 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1546 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1547
1548 RegisterInputSlots(graph, layerIndex, layer);
1549 RegisterOutputSlots(graph, layerIndex, layer);
1550}
1551
Finn Williams85d36712021-01-26 22:30:06 +00001552void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001553{
1554 CHECK_LAYERS(graph, 0, layerIndex);
1555
1556 auto inputs = GetInputs(graph, layerIndex);
1557 CHECK_VALID_SIZE(inputs.size(), 1);
1558
1559 auto outputs = GetOutputs(graph, layerIndex);
1560 CHECK_VALID_SIZE(outputs.size(), 1);
1561 auto outputInfo = ToTensorInfo(outputs[0]);
1562
1563 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1564 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1565
1566 auto layerName = GetLayerName(graph, layerIndex);
1567 armnn::L2NormalizationDescriptor descriptor;
1568 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001569 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001570
1571 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1572 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1573
1574 RegisterInputSlots(graph, layerIndex, layer);
1575 RegisterOutputSlots(graph, layerIndex, layer);
1576}
1577
Finn Williams85d36712021-01-26 22:30:06 +00001578void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001579{
1580 CHECK_LAYERS(graph, 0, layerIndex);
1581 CHECK_LOCATION();
1582
1583 auto inputs = GetInputs(graph, layerIndex);
1584 CHECK_VALID_SIZE(inputs.size(), 2);
1585
1586 auto outputs = GetOutputs(graph, layerIndex);
1587 CHECK_VALID_SIZE(outputs.size(), 1);
1588
1589 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1590 auto fbDescriptor = fbLayer->descriptor();
1591
1592 armnn::LogicalBinaryDescriptor descriptor;
1593 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1594
1595 const std::string& layerName = GetLayerName(graph, layerIndex);
1596 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1597
1598 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1599 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1600
1601 RegisterInputSlots(graph, layerIndex, layer);
1602 RegisterOutputSlots(graph, layerIndex, layer);
1603}
1604
Finn Williams85d36712021-01-26 22:30:06 +00001605void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001606{
1607 CHECK_LAYERS(graph, 0, layerIndex);
1608
Finn Williams85d36712021-01-26 22:30:06 +00001609 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001610 CHECK_VALID_SIZE(inputs.size(), 1);
1611
Finn Williams85d36712021-01-26 22:30:06 +00001612 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001613 CHECK_VALID_SIZE(outputs.size(), 1);
1614
1615 armnn::LogSoftmaxDescriptor descriptor;
1616 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1617 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1618 auto layerName = GetLayerName(graph, layerIndex);
1619
1620 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1621
1622 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1623 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1624
1625 RegisterInputSlots(graph, layerIndex, layer);
1626 RegisterOutputSlots(graph, layerIndex, layer);
1627}
1628
Finn Williams85d36712021-01-26 22:30:06 +00001629void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001630{
1631 CHECK_LAYERS(graph, 0, layerIndex);
1632 auto inputs = GetInputs(graph, layerIndex);
1633 CHECK_LOCATION();
1634 CHECK_VALID_SIZE(inputs.size(), 2);
1635
1636 auto outputs = GetOutputs(graph, layerIndex);
1637 CHECK_VALID_SIZE(outputs.size(), 1);
1638
1639 auto layerName = GetLayerName(graph, layerIndex);
1640 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1641
1642 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1643 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1644
1645 RegisterInputSlots(graph, layerIndex, layer);
1646 RegisterOutputSlots(graph, layerIndex, layer);
1647}
1648
Finn Williams85d36712021-01-26 22:30:06 +00001649void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001650{
1651 CHECK_LAYERS(graph, 0, layerIndex);
1652 auto inputs = GetInputs(graph, layerIndex);
1653 CHECK_LOCATION();
1654 CHECK_VALID_SIZE(inputs.size(), 2);
1655
1656 auto outputs = GetOutputs(graph, layerIndex);
1657 CHECK_VALID_SIZE(outputs.size(), 1);
1658
1659 auto layerName = GetLayerName(graph, layerIndex);
1660 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1661
1662 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1663 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1664
1665 RegisterInputSlots(graph, layerIndex, layer);
1666 RegisterOutputSlots(graph, layerIndex, layer);
1667}
1668
Jim Flynne242f2d2019-05-22 14:24:13 +01001669const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1670 unsigned int layerIndex)
1671{
1672 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1673
1674 switch (layerType)
1675 {
1676 case Layer::Layer_ConcatLayer:
1677 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1678 case Layer::Layer_MergerLayer:
1679 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1680 default:
1681 throw armnn::Exception("unknown layer type, should be concat or merger");
1682 }
1683}
1684
Finn Williams85d36712021-01-26 22:30:06 +00001685void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001686{
1687 CHECK_LAYERS(graph, 0, layerIndex);
1688 CHECK_LOCATION();
1689
1690 auto inputs = GetInputs(graph, layerIndex);
1691 CHECK_VALID_SIZE(inputs.size(), 2);
1692
1693 auto outputs = GetOutputs(graph, layerIndex);
1694 CHECK_VALID_SIZE(outputs.size(), 1);
1695
1696 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1697 auto fbDescriptor = fbLayer->descriptor();
1698
1699 armnn::ComparisonDescriptor descriptor;
1700 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1701
1702 const std::string& layerName = GetLayerName(graph, layerIndex);
1703 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1704
1705 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1706 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1707
1708 RegisterInputSlots(graph, layerIndex, layer);
1709 RegisterOutputSlots(graph, layerIndex, layer);
1710}
1711
Finn Williams85d36712021-01-26 22:30:06 +00001712void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06001713{
1714 CHECK_LAYERS(graph, 0, layerIndex);
1715 CHECK_LOCATION();
1716
1717 auto inputs = GetInputs(graph, layerIndex);
1718 CHECK_VALID_SIZE(inputs.size(), 1);
1719
1720 auto outputs = GetOutputs(graph, layerIndex);
1721 CHECK_VALID_SIZE(outputs.size(), 1);
1722
1723 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1724 auto fbDescriptor = fbLayer->descriptor();
1725
1726 armnn::ElementwiseUnaryDescriptor descriptor;
1727 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1728
1729 const std::string& layerName = GetLayerName(graph, layerIndex);
1730 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1731
1732 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1733 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1734
1735 RegisterInputSlots(graph, layerIndex, layer);
1736 RegisterOutputSlots(graph, layerIndex, layer);
1737}
1738
Finn Williams85d36712021-01-26 22:30:06 +00001739void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001740{
1741 CHECK_LAYERS(graph, 0, layerIndex);
1742 CHECK_LOCATION();
1743
1744 auto outputs = GetOutputs(graph, layerIndex);
1745 CHECK_VALID_SIZE(outputs.size(), 1);
1746
Jim Flynnac25a1b2019-02-28 10:40:49 +00001747 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001748 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1749 unsigned int numViews = originsDescriptor->numViews();
1750 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001751
1752 // can now check the number of inputs == number of views
1753 auto inputs = GetInputs(graph, layerIndex);
1754 CHECK_VALID_SIZE(inputs.size(), numViews);
1755
1756 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001757 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001758 for (unsigned int v = 0; v < numViews; ++v)
1759 {
1760 auto originPtr = originsPtr->Get(v);
1761 for (unsigned int d = 0; d < numDimensions; ++d)
1762 {
1763 uint32_t value = originPtr->data()->Get(d);
1764 descriptor.SetViewOriginCoord(v, d, value);
1765 }
1766 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001767 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001768
Jim Flynn906f9462019-05-10 13:55:21 +01001769 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001770 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1771 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1772
1773 RegisterInputSlots(graph, layerIndex, layer);
1774 RegisterOutputSlots(graph, layerIndex, layer);
1775}
1776
Finn Williams85d36712021-01-26 22:30:06 +00001777void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001778{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001779 CHECK_LAYERS(graph, 0, layerIndex);
1780 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001781 CHECK_LOCATION();
1782 CHECK_VALID_SIZE(inputs.size(), 2);
1783
Derek Lamberti8ddae332019-02-21 16:29:43 +00001784 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001785 CHECK_VALID_SIZE(outputs.size(), 1);
1786
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001787 auto layerName = GetLayerName(graph, layerIndex);
1788 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001789
1790 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1791 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1792
Derek Lamberti8ddae332019-02-21 16:29:43 +00001793 RegisterInputSlots(graph, layerIndex, layer);
1794 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001795}
1796
Finn Williams85d36712021-01-26 22:30:06 +00001797void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001798{
1799 CHECK_LAYERS(graph, 0, layerIndex);
1800 CHECK_LOCATION();
1801
1802 auto inputs = GetInputs(graph, layerIndex);
1803 CHECK_VALID_SIZE(inputs.size(), 1);
1804
1805 auto outputs = GetOutputs(graph, layerIndex);
1806 CHECK_VALID_SIZE(outputs.size(), 1);
1807
1808 auto layerName = GetLayerName(graph, layerIndex);
1809
1810 armnn::IConnectableLayer* layer;
1811
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001812 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001813
1814 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1815 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1816
1817 RegisterInputSlots(graph, layerIndex, layer);
1818 RegisterOutputSlots(graph, layerIndex, layer);
1819}
1820
Finn Williams85d36712021-01-26 22:30:06 +00001821void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001822{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001823 CHECK_LAYERS(graph, 0, layerIndex);
1824 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001825 CHECK_LOCATION();
1826 CHECK_VALID_SIZE(inputs.size(), 1);
1827
Derek Lamberti8ddae332019-02-21 16:29:43 +00001828 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001829 CHECK_VALID_SIZE(outputs.size(), 1);
1830
Derek Lamberti8ddae332019-02-21 16:29:43 +00001831 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001832 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001833 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1834
1835 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1836 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1837 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1838
1839 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1840
1841 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001842 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001843 if (flatBufferDescriptor->biasEnabled())
1844 {
1845 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001846 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001847 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001848 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1849 weightsTensor,
1850 optionalBiases,
1851 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001852
1853 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1854 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1855
Derek Lamberti8ddae332019-02-21 16:29:43 +00001856 RegisterInputSlots(graph, layerIndex, layer);
1857 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001858}
1859
Finn Williams85d36712021-01-26 22:30:06 +00001860void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001861{
1862 CHECK_LAYERS(graph, 0, layerIndex);
1863
Finn Williams85d36712021-01-26 22:30:06 +00001864 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001865 CHECK_VALID_SIZE(inputs.size(), 1);
1866
Finn Williams85d36712021-01-26 22:30:06 +00001867 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001868 CHECK_VALID_SIZE(outputs.size(), 1);
1869
1870 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1871 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001872 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001873
1874 if (flatBufferPadList->Length() % 2 != 0)
1875 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001876 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
1877 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001878 }
1879
1880 std::vector<std::pair<unsigned int, unsigned int>> padList;
1881 padList.reserve(flatBufferPadList->Length() / 2);
1882 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1883 {
1884 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1885 }
1886
David Monahan34757812019-06-19 11:47:21 +01001887 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001888
1889 auto layerName = GetLayerName(graph, layerIndex);
1890 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1891
1892 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1893 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1894
1895 RegisterInputSlots(graph, layerIndex, layer);
1896 RegisterOutputSlots(graph, layerIndex, layer);
1897}
1898
Finn Williams85d36712021-01-26 22:30:06 +00001899void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001900{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001901 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001902
1903 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001904 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001905
Derek Lamberti8ddae332019-02-21 16:29:43 +00001906 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001907 CHECK_VALID_SIZE(inputs.size(), 1);
1908
Derek Lamberti8ddae332019-02-21 16:29:43 +00001909 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001910 CHECK_VALID_SIZE(outputs.size(), 1);
1911 auto outputInfo = ToTensorInfo(outputs[0]);
1912
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001913 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001914 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1915
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001916 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001917 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1918
Derek Lamberti8ddae332019-02-21 16:29:43 +00001919 RegisterInputSlots(graph, layerIndex, layer);
1920 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001921}
1922
Finn Williams85d36712021-01-26 22:30:06 +00001923armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPoolingDescriptor(PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001924 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001925{
Jan Eilers8eb25602020-03-09 12:13:48 +00001926 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001927 armnn::Pooling2dDescriptor desc;
1928
1929 switch (pooling2dDesc->poolType())
1930 {
1931 case PoolingAlgorithm_Average:
1932 {
1933 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001934 break;
1935 }
1936 case PoolingAlgorithm_Max:
1937 {
1938 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001939 break;
1940 }
1941 default:
1942 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001943 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001944 }
1945 }
1946
1947 switch (pooling2dDesc->outputShapeRounding())
1948 {
1949 case OutputShapeRounding_Floor:
1950 {
1951 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1952 break;
1953 }
1954 case OutputShapeRounding_Ceiling:
1955 {
1956 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1957 break;
1958 }
1959 default:
1960 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001961 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001962 }
1963 }
1964
1965 switch (pooling2dDesc->paddingMethod())
1966 {
1967 case PaddingMethod_Exclude:
1968 {
1969 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1970 break;
1971 }
1972 case PaddingMethod_IgnoreValue:
1973 {
1974 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1975 break;
1976 }
1977 default:
1978 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001979 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001980 }
1981 }
1982
1983 switch (pooling2dDesc->dataLayout())
1984 {
1985 case DataLayout_NCHW:
1986 {
1987 desc.m_DataLayout = armnn::DataLayout::NCHW;
1988 break;
1989 }
1990 case DataLayout_NHWC:
1991 {
1992 desc.m_DataLayout = armnn::DataLayout::NHWC;
1993 break;
1994 }
1995 default:
1996 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001997 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001998 }
1999 }
2000
2001 desc.m_PadRight = pooling2dDesc->padRight();
2002 desc.m_PadLeft = pooling2dDesc->padLeft();
2003 desc.m_PadBottom = pooling2dDesc->padBottom();
2004 desc.m_PadTop = pooling2dDesc->padTop();
2005 desc.m_StrideX = pooling2dDesc->strideX();
2006 desc.m_StrideY = pooling2dDesc->strideY();
2007 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2008 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2009
2010 return desc;
2011}
2012
Finn Williams85d36712021-01-26 22:30:06 +00002013
2014
2015void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002016{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002017 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002018
Derek Lamberti8ddae332019-02-21 16:29:43 +00002019 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002020 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002021 CHECK_VALID_SIZE(inputs.size(), 1);
2022
Derek Lamberti8ddae332019-02-21 16:29:43 +00002023 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002024 CHECK_VALID_SIZE(outputs.size(), 1);
2025 auto outputInfo = ToTensorInfo(outputs[0]);
2026
2027 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002028 auto layerName = GetLayerName(graph, layerIndex);
2029 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002030 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2031
Derek Lamberti8ddae332019-02-21 16:29:43 +00002032 RegisterInputSlots(graph, layerIndex, layer);
2033 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002034}
2035
Finn Williams85d36712021-01-26 22:30:06 +00002036void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002037{
2038 CHECK_LAYERS(graph, 0, layerIndex);
2039
2040 auto inputs = GetInputs(graph, layerIndex);
2041 CHECK_VALID_SIZE(inputs.size(), 1);
2042
2043 auto outputs = GetOutputs(graph, layerIndex);
2044 CHECK_VALID_SIZE(outputs.size(), 1);
2045 auto outputInfo = ToTensorInfo(outputs[0]);
2046
2047 auto layerName = GetLayerName(graph, layerIndex);
2048 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2049 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2050
2051 RegisterInputSlots(graph, layerIndex, layer);
2052 RegisterOutputSlots(graph, layerIndex, layer);
2053}
2054
Finn Williams85d36712021-01-26 22:30:06 +00002055armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002056 const std::vector<uint32_t>& targetDimsIn)
2057{
2058 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2059 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2060
2061 if (stretchDim != targetDimsIn.end())
2062 {
2063 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2064 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002065 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2066 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002067 }
2068
2069 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002070 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002071 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2072
2073 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2074 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2075 }
2076
2077 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2078
2079 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2080 reshapeInfo.SetShape(outputShape);
2081
2082 return reshapeInfo;
2083}
2084
Finn Williams85d36712021-01-26 22:30:06 +00002085void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002086{
2087 CHECK_LAYERS(graph, 0, layerIndex);
2088
Finn Williams85d36712021-01-26 22:30:06 +00002089 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002090 CHECK_VALID_SIZE(inputs.size(), 1);
2091
Finn Williams85d36712021-01-26 22:30:06 +00002092 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002093 CHECK_VALID_SIZE(outputs.size(), 1);
2094
2095 auto layerName = GetLayerName(graph, layerIndex);
2096 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2097
2098 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2099 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2100
2101 RegisterInputSlots(graph, layerIndex, layer);
2102 RegisterOutputSlots(graph, layerIndex, layer);
2103}
2104
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002105void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2106{
2107 CHECK_LAYERS(graph, 0, layerIndex);
2108 CHECK_LOCATION();
2109
2110 auto inputs = GetInputs(graph, layerIndex);
2111 CHECK_VALID_SIZE(inputs.size(), 1);
2112
2113 auto outputs = GetOutputs(graph, layerIndex);
2114 CHECK_VALID_SIZE(outputs.size(), 1);
2115
2116 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2117 auto fbDescriptor = fbLayer->descriptor();
2118 auto flatBufferAxis = fbDescriptor->axis();
2119
2120 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002121 descriptor.m_KeepDims = fbDescriptor->keepDims();
2122 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2123 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2124
2125 const std::string& layerName = GetLayerName(graph, layerIndex);
2126 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2127
2128 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2129 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2130
2131 RegisterInputSlots(graph, layerIndex, layer);
2132 RegisterOutputSlots(graph, layerIndex, layer);
2133}
2134
Finn Williams85d36712021-01-26 22:30:06 +00002135void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002136{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002137 CHECK_LAYERS(graph, 0, layerIndex);
2138 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002139
Derek Lamberti8ddae332019-02-21 16:29:43 +00002140 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002141 CHECK_VALID_SIZE(outputs.size(), 1);
2142
2143 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2144 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2145
Derek Lamberti8ddae332019-02-21 16:29:43 +00002146 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002147 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2148
Finn Williams85d36712021-01-26 22:30:06 +00002149 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002150 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2151
2152 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2153 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2154
2155 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2156 {
2157 std::stringstream ss;
2158 ss << "New shape defined in reshape parameters "
2159 << reshapeOutputTensorShape
2160 << " does not equal output shape "
2161 << actualOutputTensorInfo.GetShape()
2162 << ": "
2163 << CHECK_LOCATION().AsString();
2164 throw ParseException(ss.str());
2165 }
2166
2167 armnn::ReshapeDescriptor reshapeDesc;
2168 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2169
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002170 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002171 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2172 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2173
Derek Lamberti8ddae332019-02-21 16:29:43 +00002174 RegisterInputSlots(graph, layerIndex, layer);
2175 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002176}
2177
Finn Williams85d36712021-01-26 22:30:06 +00002178void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002179{
2180 CHECK_LAYERS(graph, 0, layerIndex);
2181
Finn Williams85d36712021-01-26 22:30:06 +00002182 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002183 CHECK_VALID_SIZE(inputs.size(), 1);
2184
Finn Williams85d36712021-01-26 22:30:06 +00002185 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002186 CHECK_VALID_SIZE(outputs.size(), 1);
2187
2188 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2189
2190 armnn::ResizeDescriptor descriptor;
2191 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2192 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2193 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2194 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002195 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2196 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002197
2198 auto layerName = GetLayerName(graph, layerIndex);
2199 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2200
2201 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2202 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2203
2204 RegisterInputSlots(graph, layerIndex, layer);
2205 RegisterOutputSlots(graph, layerIndex, layer);
2206}
2207
Finn Williams85d36712021-01-26 22:30:06 +00002208void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002209{
2210 CHECK_LAYERS(graph, 0, layerIndex);
2211
Finn Williams85d36712021-01-26 22:30:06 +00002212 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002213 CHECK_VALID_SIZE(inputs.size(), 1);
2214
Finn Williams85d36712021-01-26 22:30:06 +00002215 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002216 CHECK_VALID_SIZE(outputs.size(), 1);
2217
2218 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2219
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002220 armnn::ResizeDescriptor descriptor;
2221 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002222 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002223 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2224 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002225 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2226 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002227
2228 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002229 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002230
2231 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2232 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2233
2234 RegisterInputSlots(graph, layerIndex, layer);
2235 RegisterOutputSlots(graph, layerIndex, layer);
2236}
2237
Finn Williams85d36712021-01-26 22:30:06 +00002238void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002239{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002240 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002241
Finn Williams85d36712021-01-26 22:30:06 +00002242 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002243 CHECK_VALID_SIZE(inputs.size(), 1);
2244
Finn Williams85d36712021-01-26 22:30:06 +00002245 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002246 CHECK_VALID_SIZE(outputs.size(), 1);
2247
2248 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002249 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002250 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002251
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002252 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2253
2254 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2255 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2256
Derek Lamberti8ddae332019-02-21 16:29:43 +00002257 RegisterInputSlots(graph, layerIndex, layer);
2258 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002259}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002260
Finn Williams85d36712021-01-26 22:30:06 +00002261void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002262{
2263 CHECK_LAYERS(graph, 0, layerIndex);
2264
Finn Williams85d36712021-01-26 22:30:06 +00002265 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002266 CHECK_VALID_SIZE(inputs.size(), 1);
2267
Finn Williams85d36712021-01-26 22:30:06 +00002268 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002269 CHECK_VALID_SIZE(outputs.size(), 1);
2270
2271 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2272 auto flatBufferPadList = flatBufferDescriptor->padList();
2273 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2274
2275 if (flatBufferPadList->Length() % 2 != 0)
2276 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002277 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2278 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002279 }
2280
2281 std::vector<std::pair<unsigned int, unsigned int>> padList;
2282 padList.reserve(flatBufferPadList->Length() / 2);
2283 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2284 {
2285 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2286 }
2287
2288 armnn::SpaceToBatchNdDescriptor descriptor;
2289 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2290 descriptor.m_BlockShape =
2291 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2292 descriptor.m_PadList = padList;
2293
2294 auto layerName = GetLayerName(graph, layerIndex);
2295 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2296
2297 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2298 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2299
2300 RegisterInputSlots(graph, layerIndex, layer);
2301 RegisterOutputSlots(graph, layerIndex, layer);
2302}
2303
Finn Williams85d36712021-01-26 22:30:06 +00002304void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002305{
2306 CHECK_LAYERS(graph, 0, layerIndex);
2307
Finn Williams85d36712021-01-26 22:30:06 +00002308 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002309 CHECK_VALID_SIZE(inputs.size(), 1);
2310
Finn Williams85d36712021-01-26 22:30:06 +00002311 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002312 CHECK_VALID_SIZE(outputs.size(), 1);
2313
2314 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2315
2316 armnn::SpaceToDepthDescriptor descriptor;
2317 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2318 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2319
2320 auto layerName = GetLayerName(graph, layerIndex);
2321 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2322
2323 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2324 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2325
2326 RegisterInputSlots(graph, layerIndex, layer);
2327 RegisterOutputSlots(graph, layerIndex, layer);
2328}
2329
Finn Williams85d36712021-01-26 22:30:06 +00002330armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2331 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002332 unsigned int layerIndex)
2333{
Jan Eilers8eb25602020-03-09 12:13:48 +00002334 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002335 armnn::NormalizationDescriptor desc;
2336
2337 switch (normalizationDescriptor->normChannelType())
2338 {
2339 case NormalizationAlgorithmChannel_Across:
2340 {
2341 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2342 break;
2343 }
2344 case NormalizationAlgorithmChannel_Within:
2345 {
2346 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2347 break;
2348 }
2349 default:
2350 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002351 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002352 }
2353 }
2354
2355 switch (normalizationDescriptor->normMethodType())
2356 {
2357 case NormalizationAlgorithmMethod_LocalBrightness:
2358 {
2359 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2360 break;
2361 }
2362 case NormalizationAlgorithmMethod_LocalContrast:
2363 {
2364 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2365 break;
2366 }
2367 default:
2368 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002369 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002370 }
2371 }
2372
2373 switch (normalizationDescriptor->dataLayout())
2374 {
2375 case DataLayout_NCHW:
2376 {
2377 desc.m_DataLayout = armnn::DataLayout::NCHW;
2378 break;
2379 }
2380 case DataLayout_NHWC:
2381 {
2382 desc.m_DataLayout = armnn::DataLayout::NHWC;
2383 break;
2384 }
2385 default:
2386 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002387 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002388 }
2389 }
2390
2391 desc.m_Alpha = normalizationDescriptor->alpha();
2392 desc.m_Beta = normalizationDescriptor->beta();
2393 desc.m_K = normalizationDescriptor->k();
2394 desc.m_NormSize = normalizationDescriptor->normSize();
2395
2396 return desc;
2397}
2398
Finn Williams85d36712021-01-26 22:30:06 +00002399void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002400{
2401 CHECK_LAYERS(graph, 0, layerIndex);
2402
2403 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2404
Finn Williams85d36712021-01-26 22:30:06 +00002405 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002406 CHECK_VALID_SIZE(inputs.size(), 1);
2407
Finn Williams85d36712021-01-26 22:30:06 +00002408 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002409 CHECK_VALID_SIZE(outputs.size(), 1);
2410
2411 auto outputInfo = ToTensorInfo(outputs[0]);
2412
2413 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2414 auto layerName = GetLayerName(graph, layerIndex);
2415
2416 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2417 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2418
2419 RegisterInputSlots(graph, layerIndex, layer);
2420 RegisterOutputSlots(graph, layerIndex, layer);
2421}
2422
Finn Williams85d36712021-01-26 22:30:06 +00002423void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002424{
2425 CHECK_LAYERS(graph, 0, layerIndex);
2426 auto inputs = GetInputs(graph, layerIndex);
2427 CHECK_LOCATION();
2428 CHECK_VALID_SIZE(inputs.size(), 1);
2429
2430 auto outputs = GetOutputs(graph, layerIndex);
2431 CHECK_VALID_SIZE(outputs.size(), 1);
2432
2433 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002434
josh minor4a3c6102020-01-06 16:40:46 -06002435 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2436 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002437 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2438 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2439
2440 RegisterInputSlots(graph, layerIndex, layer);
2441 RegisterOutputSlots(graph, layerIndex, layer);
2442}
2443
Finn Williams85d36712021-01-26 22:30:06 +00002444void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002445{
2446 CHECK_LAYERS(graph, 0, layerIndex);
2447
2448 auto inputs = GetInputs(graph, layerIndex);
2449 CHECK_VALID_SIZE(inputs.size(), 1);
2450
2451 auto outputs = GetOutputs(graph, layerIndex);
2452 CHECK_VALID_SIZE(outputs.size(), 1);
2453
2454 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2455
2456 auto fbBegin = fbDescriptor->begin();
2457 auto fbSize = fbDescriptor->size();
2458
2459 if (fbBegin->Length() != fbSize->Length())
2460 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002461 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2462 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002463 }
2464
2465 armnn::SliceDescriptor descriptor;
2466 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2467 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2468
2469 auto layerName = GetLayerName(graph, layerIndex);
2470 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2471
2472 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2473 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2474
2475 RegisterInputSlots(graph, layerIndex, layer);
2476 RegisterOutputSlots(graph, layerIndex, layer);
2477}
2478
Finn Williams85d36712021-01-26 22:30:06 +00002479void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002480{
2481 CHECK_LAYERS(graph, 0, layerIndex);
2482
Finn Williams85d36712021-01-26 22:30:06 +00002483 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002484 CHECK_VALID_SIZE(inputs.size(), 1);
2485
Finn Williams85d36712021-01-26 22:30:06 +00002486 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002487 CHECK_VALID_SIZE(outputs.size(), 1);
2488
2489 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2490
2491 auto flatBufferBegin = flatBufferDescriptor->begin();
2492 auto flatBufferEnd = flatBufferDescriptor->end();
2493 auto flatBufferStride = flatBufferDescriptor->stride();
2494
2495 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2496 flatBufferBegin->Length() == flatBufferStride->Length()))
2497 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002498 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2499 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002500 }
2501
2502 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2503 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2504 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2505
2506 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2507 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2508 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2509 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2510 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2511 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2512 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2513
2514 auto layerName = GetLayerName(graph, layerIndex);
2515 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2516
2517 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2518 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2519
2520 RegisterInputSlots(graph, layerIndex, layer);
2521 RegisterOutputSlots(graph, layerIndex, layer);
2522}
2523
Finn Williams85d36712021-01-26 22:30:06 +00002524void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002525{
2526 CHECK_LAYERS(graph, 0, layerIndex);
2527 auto inputs = GetInputs(graph, layerIndex);
2528 CHECK_LOCATION();
2529 CHECK_VALID_SIZE(inputs.size(), 2);
2530
2531 auto outputs = GetOutputs(graph, layerIndex);
2532 CHECK_VALID_SIZE(outputs.size(), 1);
2533
2534 auto layerName = GetLayerName(graph, layerIndex);
2535 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2536
2537 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2538 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2539
2540 RegisterInputSlots(graph, layerIndex, layer);
2541 RegisterOutputSlots(graph, layerIndex, layer);
2542}
2543
Finn Williams85d36712021-01-26 22:30:06 +00002544void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002545{
2546 CHECK_LAYERS(graph, 0, layerIndex);
2547
Finn Williams85d36712021-01-26 22:30:06 +00002548 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002549 CHECK_VALID_SIZE(inputs.size(), 2);
2550
Finn Williams85d36712021-01-26 22:30:06 +00002551 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002552 CHECK_VALID_SIZE(outputs.size(), 1);
2553
Teresa Charlin52664732020-06-29 16:27:03 +01002554 armnn::GatherDescriptor descriptor;
2555 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2556
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002557 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002558 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002559
2560 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002561 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2562
2563 RegisterInputSlots(graph, layerIndex, layer);
2564 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002565}
2566
Finn Williams85d36712021-01-26 22:30:06 +00002567void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002568{
2569 CHECK_LAYERS(graph, 0, layerIndex);
2570
Finn Williams85d36712021-01-26 22:30:06 +00002571 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002572 CHECK_VALID_SIZE(inputs.size(), 1);
2573
Finn Williams85d36712021-01-26 22:30:06 +00002574 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002575 CHECK_VALID_SIZE(outputs.size(), 1);
2576
2577 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2578 auto flatBufferAxis = flatBufferDescriptor->axis();
2579 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2580
2581 armnn::MeanDescriptor descriptor;
2582 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2583 descriptor.m_KeepDims = flatBufferKeepDims;
2584
2585 auto layerName = GetLayerName(graph, layerIndex);
2586 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2587
2588 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2589 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2590
2591 RegisterInputSlots(graph, layerIndex, layer);
2592 RegisterOutputSlots(graph, layerIndex, layer);
2593}
2594
Finn Williams85d36712021-01-26 22:30:06 +00002595void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00002596{
2597 CHECK_LAYERS(graph, 0, layerIndex);
2598
Finn Williams85d36712021-01-26 22:30:06 +00002599 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002600 CHECK_VALID_SIZE(inputs.size(), 1);
2601
Finn Williams85d36712021-01-26 22:30:06 +00002602 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002603
2604 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2605 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2606 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2607 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2608 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2609 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2610
2611 // Check numViews and numDimensions corresponds to the ones already serialized ...
2612 // numViews == flatBufferViewSizes.size();
2613 // foreach: numDimensions == flatBufferViewSizes[x].size();
2614
2615 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2616 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2617 {
2618 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2619 {
2620 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2621 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2622 }
2623 }
2624
2625 auto layerName = GetLayerName(graph, layerIndex);
2626 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2627
2628 // I could have as many outputs as views ...
2629 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2630 {
2631 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2632 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2633 }
2634
2635 RegisterInputSlots(graph, layerIndex, layer);
2636 RegisterOutputSlots(graph, layerIndex, layer);
2637}
2638
Finn Williams85d36712021-01-26 22:30:06 +00002639armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00002640{
2641 armnn::LstmDescriptor desc;
2642
2643 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2644 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2645 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2646 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2647 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2648 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002649 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002650
2651 return desc;
2652}
2653
Finn Williams85d36712021-01-26 22:30:06 +00002654void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00002655{
2656 CHECK_LAYERS(graph, 0, layerIndex);
2657
2658 auto inputs = GetInputs(graph, layerIndex);
2659 CHECK_VALID_SIZE(inputs.size(), 3);
2660
2661 auto outputs = GetOutputs(graph, layerIndex);
2662 CHECK_VALID_SIZE(outputs.size(), 4);
2663
2664 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2665 auto layerName = GetLayerName(graph, layerIndex);
2666 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2667 auto flatBufferInputParams = flatBufferLayer->inputParams();
2668
2669 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2670
2671 armnn::LstmInputParams lstmInputParams;
2672
2673 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2674 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2675 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2676 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2677 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2678 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2679 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2680 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2681 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2682
2683 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2684 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2685 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2686 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2687 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2688 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2689 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2690 lstmInputParams.m_CellBias = &cellBias;
2691 lstmInputParams.m_OutputGateBias = &outputGateBias;
2692
2693 armnn::ConstTensor inputToInputWeights;
2694 armnn::ConstTensor recurrentToInputWeights;
2695 armnn::ConstTensor cellToInputWeights;
2696 armnn::ConstTensor inputGateBias;
2697 if (!lstmDescriptor.m_CifgEnabled)
2698 {
2699 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2700 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2701 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2702 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2703
2704 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2705 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2706 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2707 lstmInputParams.m_InputGateBias = &inputGateBias;
2708 }
2709
2710 armnn::ConstTensor projectionWeights;
2711 armnn::ConstTensor projectionBias;
2712 if (lstmDescriptor.m_ProjectionEnabled)
2713 {
2714 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2715 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2716
2717 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2718 lstmInputParams.m_ProjectionBias = &projectionBias;
2719 }
2720
2721 armnn::ConstTensor cellToForgetWeights;
2722 armnn::ConstTensor cellToOutputWeights;
2723 if (lstmDescriptor.m_PeepholeEnabled)
2724 {
2725 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2726 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2727
2728 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2729 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2730 }
2731
Jan Eilersf8c62972019-07-17 11:07:49 +01002732 armnn::ConstTensor inputLayerNormWeights;
2733 armnn::ConstTensor forgetLayerNormWeights;
2734 armnn::ConstTensor cellLayerNormWeights;
2735 armnn::ConstTensor outputLayerNormWeights;
2736 if (lstmDescriptor.m_LayerNormEnabled)
2737 {
2738 if (!lstmDescriptor.m_CifgEnabled)
2739 {
2740 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2741 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2742 }
2743 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2744 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2745 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2746
2747 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2748 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2749 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2750 }
2751
Jim Flynn11af3752019-03-19 17:22:29 +00002752 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2753
2754 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2755 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2756
2757 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2758 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2759
2760 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2761 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2762
2763 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2764 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2765
2766 RegisterInputSlots(graph, layerIndex, layer);
2767 RegisterOutputSlots(graph, layerIndex, layer);
2768}
2769
Finn Williams85d36712021-01-26 22:30:06 +00002770armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01002771{
2772 armnn::QLstmDescriptor desc;
2773
2774 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2775 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2776 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2777 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2778
2779 desc.m_CellClip = qLstmDescriptor->cellClip();
2780 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2781
2782 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2783 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2784 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2785 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2786
2787 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2788 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2789
2790 return desc;
2791}
2792
Finn Williams85d36712021-01-26 22:30:06 +00002793void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01002794{
2795 CHECK_LAYERS(graph, 0, layerIndex);
2796
2797 auto inputs = GetInputs(graph, layerIndex);
2798 CHECK_VALID_SIZE(inputs.size(), 3);
2799
2800 auto outputs = GetOutputs(graph, layerIndex);
2801 CHECK_VALID_SIZE(outputs.size(), 3);
2802
2803 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2804 auto layerName = GetLayerName(graph, layerIndex);
2805 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2806 auto flatBufferInputParams = flatBufferLayer->inputParams();
2807
2808 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2809 armnn::LstmInputParams qLstmInputParams;
2810
2811 // Mandatory params
2812 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2813 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2814 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2815 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2816 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2817 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2818 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2819 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2820 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2821
2822 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2823 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2824 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2825 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2826 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2827 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2828 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2829 qLstmInputParams.m_CellBias = &cellBias;
2830 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2831
2832 // Optional CIFG params
2833 armnn::ConstTensor inputToInputWeights;
2834 armnn::ConstTensor recurrentToInputWeights;
2835 armnn::ConstTensor inputGateBias;
2836
2837 if (!qLstmDescriptor.m_CifgEnabled)
2838 {
2839 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2840 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2841 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2842
2843 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2844 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2845 qLstmInputParams.m_InputGateBias = &inputGateBias;
2846 }
2847
2848 // Optional projection params
2849 armnn::ConstTensor projectionWeights;
2850 armnn::ConstTensor projectionBias;
2851
2852 if (qLstmDescriptor.m_ProjectionEnabled)
2853 {
2854 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2855 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2856
2857 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2858 qLstmInputParams.m_ProjectionBias = &projectionBias;
2859 }
2860
2861 // Optional peephole params
2862 armnn::ConstTensor cellToInputWeights;
2863 armnn::ConstTensor cellToForgetWeights;
2864 armnn::ConstTensor cellToOutputWeights;
2865
2866 if (qLstmDescriptor.m_PeepholeEnabled)
2867 {
2868 if (!qLstmDescriptor.m_CifgEnabled)
2869 {
2870 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2871 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2872 }
2873
2874 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2875 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2876
2877 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2878 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2879 }
2880
2881 // Optional layer norm params
2882 armnn::ConstTensor inputLayerNormWeights;
2883 armnn::ConstTensor forgetLayerNormWeights;
2884 armnn::ConstTensor cellLayerNormWeights;
2885 armnn::ConstTensor outputLayerNormWeights;
2886
2887 if (qLstmDescriptor.m_LayerNormEnabled)
2888 {
2889 if (!qLstmDescriptor.m_CifgEnabled)
2890 {
2891 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2892 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2893 }
2894
2895 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2896 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2897 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2898
2899 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2900 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2901 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2902 }
2903
2904 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
2905
2906 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
2907 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
2908
2909 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
2910 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
2911
2912 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
2913 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2914
2915 RegisterInputSlots(graph, layerIndex, layer);
2916 RegisterOutputSlots(graph, layerIndex, layer);
2917}
2918
Finn Williams85d36712021-01-26 22:30:06 +00002919void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01002920{
2921 CHECK_LAYERS(graph, 0, layerIndex);
2922
2923 auto inputs = GetInputs(graph, layerIndex);
2924 CHECK_VALID_SIZE(inputs.size(), 3);
2925
2926 auto outputs = GetOutputs(graph, layerIndex);
2927 CHECK_VALID_SIZE(outputs.size(), 2);
2928
2929 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2930 auto layerName = GetLayerName(graph, layerIndex);
2931 auto flatBufferInputParams = flatBufferLayer->inputParams();
2932
2933 armnn::QuantizedLstmInputParams lstmInputParams;
2934
2935 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2936 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2937 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2938 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2939 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2940 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2941 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2942 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2943 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2944 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2945 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2946 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2947
2948 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2949 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2950 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2951 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2952 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2953 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2954 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2955 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2956 lstmInputParams.m_InputGateBias = &inputGateBias;
2957 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2958 lstmInputParams.m_CellBias = &cellBias;
2959 lstmInputParams.m_OutputGateBias = &outputGateBias;
2960
2961 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2962
2963 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2964 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2965
2966 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2967 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2968
2969 RegisterInputSlots(graph, layerIndex, layer);
2970 RegisterOutputSlots(graph, layerIndex, layer);
2971}
2972
Finn Williams85d36712021-01-26 22:30:06 +00002973void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002974{
2975 CHECK_LAYERS(graph, 0, layerIndex);
2976
Finn Williams85d36712021-01-26 22:30:06 +00002977 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002978 CHECK_VALID_SIZE(inputs.size(), 1);
2979
Finn Williams85d36712021-01-26 22:30:06 +00002980 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002981 CHECK_VALID_SIZE(outputs.size(), 1);
2982
2983 const std::string layerName = GetLayerName(graph, layerIndex);
2984 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2985
2986 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2987 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2988
2989 RegisterInputSlots(graph, layerIndex, layer);
2990 RegisterOutputSlots(graph, layerIndex, layer);
2991}
2992
Finn Williams85d36712021-01-26 22:30:06 +00002993void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002994{
2995 CHECK_LAYERS(graph, 0, layerIndex);
2996
Finn Williams85d36712021-01-26 22:30:06 +00002997 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002998 CHECK_VALID_SIZE(inputs.size(), 2);
2999
Finn Williams85d36712021-01-26 22:30:06 +00003000 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003001 CHECK_VALID_SIZE(outputs.size(), 1);
3002
3003 const std::string layerName = GetLayerName(graph, layerIndex);
3004 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3005
3006 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3007 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3008
3009 RegisterInputSlots(graph, layerIndex, layer);
3010 RegisterOutputSlots(graph, layerIndex, layer);
3011}
3012
Finn Williams85d36712021-01-26 22:30:06 +00003013void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003014{
3015 CHECK_LAYERS(graph, 0, layerIndex);
3016 auto inputs = GetInputs(graph, layerIndex);
3017 CHECK_LOCATION();
3018 CHECK_VALID_SIZE(inputs.size(), 2);
3019
3020 auto outputs = GetOutputs(graph, layerIndex);
3021 CHECK_VALID_SIZE(outputs.size(), 2);
3022
3023 auto layerName = GetLayerName(graph, layerIndex);
3024 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3025
3026 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3027 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3028
3029 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3030 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3031
3032 RegisterInputSlots(graph, layerIndex, layer);
3033 RegisterOutputSlots(graph, layerIndex, layer);
3034}
3035
Finn Williams85d36712021-01-26 22:30:06 +00003036void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003037{
3038 CHECK_LAYERS(graph, 0, layerIndex);
3039 auto inputs = GetInputs(graph, layerIndex);
3040 CHECK_LOCATION();
3041 CHECK_VALID_SIZE(inputs.size(), 2);
3042
3043 auto outputs = GetOutputs(graph, layerIndex);
3044 CHECK_VALID_SIZE(outputs.size(), 1);
3045
3046 auto layerName = GetLayerName(graph, layerIndex);
3047 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3048
3049 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3050 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3051
3052 RegisterInputSlots(graph, layerIndex, layer);
3053 RegisterOutputSlots(graph, layerIndex, layer);
3054}
3055
Finn Williams85d36712021-01-26 22:30:06 +00003056void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003057{
3058 CHECK_LAYERS(graph, 0, layerIndex);
3059
3060 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3061
3062 auto inputs = GetInputs(graph, layerIndex);
3063 CHECK_VALID_SIZE(inputs.size(), 1);
3064
3065 auto outputs = GetOutputs(graph, layerIndex);
3066 CHECK_VALID_SIZE(outputs.size(), 1);
3067 auto outputInfo = ToTensorInfo(outputs[0]);
3068
3069 auto layerName = GetLayerName(graph, layerIndex);
3070 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
3071
3072 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3073 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3074
3075 RegisterInputSlots(graph, layerIndex, layer);
3076 RegisterOutputSlots(graph, layerIndex, layer);
3077}
3078
Finn Williams85d36712021-01-26 22:30:06 +00003079void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003080{
3081 CHECK_LAYERS(graph, 0, layerIndex);
3082
3083 auto inputs = GetInputs(graph, layerIndex);
3084 CHECK_VALID_SIZE(inputs.size(), 1);
3085
3086 auto outputs = GetOutputs(graph, layerIndex);
3087 CHECK_VALID_SIZE(outputs.size(), 1);
3088
3089 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3090 auto layerName = GetLayerName(graph, layerIndex);
3091 auto serializerDescriptor = serializerLayer->descriptor();
3092
3093 armnn::TransposeConvolution2dDescriptor descriptor;
3094 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3095 descriptor.m_PadRight = serializerDescriptor->padRight();
3096 descriptor.m_PadTop = serializerDescriptor->padTop();
3097 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3098 descriptor.m_StrideX = serializerDescriptor->strideX();
3099 descriptor.m_StrideY = serializerDescriptor->strideY();;
3100 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3101 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3102
3103 // weights & biases
3104 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3105 armnn::Optional<armnn::ConstTensor> optionalBiases;
3106 if (descriptor.m_BiasEnabled)
3107 {
3108 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3109 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3110 }
3111
3112 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3113 weights,
3114 optionalBiases,
3115 layerName.c_str());
3116
3117 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3118 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3119
3120 RegisterInputSlots(graph, layerIndex, layer);
3121 RegisterOutputSlots(graph, layerIndex, layer);
3122}
3123
Finn Williams85d36712021-01-26 22:30:06 +00003124void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003125{
3126 CHECK_LAYERS(graph, 0, layerIndex);
3127 auto inputs = GetInputs(graph, layerIndex);
3128
3129 auto outputs = GetOutputs(graph, layerIndex);
3130 CHECK_VALID_SIZE(outputs.size(), 1);
3131
3132 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3133 unsigned int axis = flatBufferDescriptor->axis();
3134 unsigned int numInputs = flatBufferDescriptor->numInputs();
3135 CHECK_VALID_SIZE(inputs.size(), numInputs);
3136
3137 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3138 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3139 flatBufferInputShape->begin() + flatBufferInputShape->size());
3140
3141 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3142 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3143
3144 for (unsigned int i=0; i<inputs.size(); ++i)
3145 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003146 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003147 if (descriptor.m_InputShape != inputShape)
3148 {
3149 std::stringstream ss;
3150 ss << "Shape of input "
3151 << i
3152 << " "
3153 << inputShape
3154 << " does not equal defined input shape "
3155 << descriptor.m_InputShape
3156 << ": "
3157 << CHECK_LOCATION().AsString();
3158 throw ParseException(ss.str());
3159 }
3160 }
3161
3162 auto layerName = GetLayerName(graph, layerIndex);
3163 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3164
3165 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3166 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3167
3168 RegisterInputSlots(graph, layerIndex, layer);
3169 RegisterOutputSlots(graph, layerIndex, layer);
3170}
3171
Finn Williams85d36712021-01-26 22:30:06 +00003172void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003173{
3174 CHECK_LAYERS(graph, 0, layerIndex);
3175
3176 auto inputs = GetInputs(graph, layerIndex);
3177 auto outputs = GetOutputs(graph, layerIndex);
3178
3179 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3180 auto fbDescriptor = fbLayer->descriptor();
3181
3182 armnn::StandInDescriptor descriptor;
3183 descriptor.m_NumInputs = fbDescriptor->numInputs();
3184 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3185
3186 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3187 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3188
3189 const std::string layerName = GetLayerName(graph, layerIndex);
3190 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3191
3192 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3193 {
3194 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3195 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3196 }
3197
3198 RegisterInputSlots(graph, layerIndex, layer);
3199 RegisterOutputSlots(graph, layerIndex, layer);
3200}
3201
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003202} // namespace armnnDeserializer