blob: c34797725f882646436e17dff38d057b5291d3b7 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Finn Williams85d36712021-01-26 22:30:06 +000037IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
38
39IDeserializer::~IDeserializer() = default;
40
41IDeserializer *IDeserializer::CreateRaw()
42{
43 return new IDeserializer();
44}
45
46IDeserializerPtr IDeserializer::Create()
47{
48 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
49}
50
51void IDeserializer::Destroy(IDeserializer *parser)
52{
53 delete parser;
54}
55
56armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
57{
58 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
59}
60
61armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
62{
63 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
64}
65
66BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
67{
68 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
69}
70
71BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
72{
73 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
74}
75
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000076namespace
77{
Kevin May43a799c2019-02-08 16:31:42 +000078
79const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
80
Finn Williams85d36712021-01-26 22:30:06 +000081 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000082 unsigned int layersIndex,
83 const CheckLocation& location)
84{
85 if (graph->layers() == nullptr)
86 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010087 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
88 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
89 "layers:{1} at {2}",
90 location.m_Function,
91 layersIndex,
92 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000093 }
94 else if (layersIndex >= graph->layers()->size())
95 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010096 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
97 location.m_Function,
98 layersIndex,
99 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000100 }
101}
102
Finn Williams85d36712021-01-26 22:30:06 +0000103void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000104 unsigned int layersIndex,
105 unsigned int layerIndex,
106 const CheckLocation& location)
107{
108 if (graph->layers() == nullptr)
109 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100110 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
111 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
112 "layers:{1} at {2}",
113 location.m_Function,
114 layersIndex,
115 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000116 }
117 else if (layersIndex >= graph->layers()->size())
118 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100119 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
120 "layers:{1} at {2}",
121 location.m_Function,
122 layersIndex,
123 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000124 }
125 else if (layerIndex >= graph->layers()[layersIndex].size()
126 && layerIndex != VIRTUAL_LAYER_ID)
127 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100128 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
129 "layers:{1} layer:{2} at {3}",
130 location.m_Function,
131 layersIndex,
132 layerIndex,
133 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000134 }
135}
136
Finn Williams85d36712021-01-26 22:30:06 +0000137void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000138 const CheckLocation& location)
139{
140 if (rawPtr == nullptr)
141 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100142 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
143 location.m_Function,
144 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000145 }
146}
147
Finn Williams85d36712021-01-26 22:30:06 +0000148void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000149 const CheckLocation& location)
150{
151 if (rawPtr == nullptr)
152 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100153 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
154 location.m_Function,
155 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000156 }
157}
158
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000159void CheckConstTensorSize(const unsigned int constTensorSize,
160 const unsigned int tensorSize,
161 const CheckLocation& location)
162{
163 if (constTensorSize != tensorSize)
164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100165 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
166 location.m_Function,
167 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000168 }
169}
170
Kevin May43a799c2019-02-08 16:31:42 +0000171#define CHECK_TENSOR_PTR(TENSOR_PTR) \
172 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
173
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000174#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
175 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
176
Mike Kellya0766c32019-02-19 17:22:07 +0000177#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
178 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
179
Kevin May43a799c2019-02-08 16:31:42 +0000180#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
181 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
182
183#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
184 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
185}
186
Saoirse Stewart263829c2019-02-19 15:54:14 +0000187bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
188{
189 const unsigned int actualSize = actual.GetNumDimensions();
190 if (actualSize != expected.size())
191 {
192 return false;
193 }
194
195 for (unsigned int i = 0u; i < actualSize; i++)
196 {
197 if (actual[i] != static_cast<unsigned int>(expected[i]))
198 {
199 return false;
200 }
201 }
202
203 return true;
204}
205
Finn Williams85d36712021-01-26 22:30:06 +0000206IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000207: m_Network(nullptr, nullptr),
208//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000209m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000210{
211 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000212 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
213 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
214 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
215 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
216 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
217 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
218 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
219 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
220 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
221 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
222 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
223 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
224 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
225 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
226 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
227 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
228 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
229 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
230 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
231 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
232 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
233 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
234 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
235 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
236 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
237 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
238 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
239 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
240 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
241 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
242 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
243 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
244 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
245 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
246 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
247 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
248 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
249 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
250 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
251 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
252 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
253 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000254 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000255 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
256 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
257 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
258 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
259 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
260 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
261 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
262 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
263 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
264 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
265 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
266 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
267 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
268 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
269 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
270 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000271}
272
Finn Williams85d36712021-01-26 22:30:06 +0000273LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000274{
275 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
276
277 switch(layerType)
278 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100279 case Layer::Layer_AbsLayer:
280 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000281 case Layer::Layer_ActivationLayer:
282 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000283 case Layer::Layer_AdditionLayer:
284 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100285 case Layer::Layer_ArgMinMaxLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000287 case Layer::Layer_BatchToSpaceNdLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000289 case Layer::Layer_BatchNormalizationLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100291 case Layer::Layer_ComparisonLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100293 case Layer::Layer_ConcatLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000295 case Layer::Layer_ConstantLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000297 case Layer::Layer_Convolution2dLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100299 case Layer::Layer_DepthToSpaceLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000301 case Layer::Layer_DepthwiseConvolution2dLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000303 case Layer::Layer_DequantizeLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000305 case Layer::Layer_DetectionPostProcessLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000307 case Layer::Layer_DivisionLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000309 case Layer::Layer_EqualLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000311 case Layer::Layer_ElementwiseUnaryLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000313 case Layer::Layer_FullyConnectedLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100315 case Layer::Layer_FillLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000317 case Layer::Layer_FloorLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000319 case Layer::Layer_GatherLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000321 case Layer::Layer_GreaterLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000323 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000324 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100325 case Layer::Layer_InstanceNormalizationLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000327 case Layer::Layer_L2NormalizationLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000329 case Layer::Layer_LogicalBinaryLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100331 case Layer::Layer_LogSoftmaxLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000333 case Layer::Layer_LstmLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000335 case Layer::Layer_MeanLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000337 case Layer::Layer_MinimumLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000339 case Layer::Layer_MaximumLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100341 case Layer::Layer_MergeLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000343 case Layer::Layer_MergerLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000345 case Layer::Layer_MultiplicationLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000347 case Layer::Layer_NormalizationLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000349 case Layer::Layer_OutputLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000351 case Layer::Layer_PadLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000353 case Layer::Layer_PermuteLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000355 case Layer::Layer_Pooling2dLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100357 case Layer::Layer_PreluLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100359 case Layer::Layer_QLstmLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000361 case Layer::Layer_QuantizeLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100363 case Layer::Layer_QuantizedLstmLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100365 case Layer::Layer_RankLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000367 case Layer::Layer_ReduceLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000369 case Layer::Layer_ReshapeLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000371 case Layer::Layer_ResizeBilinearLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100373 case Layer::Layer_ResizeLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000375 case Layer::Layer_RsqrtLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100377 case Layer::Layer_SliceLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000379 case Layer::Layer_SoftmaxLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000381 case Layer::Layer_SpaceToBatchNdLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100383 case Layer::Layer_SpaceToDepthLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000385 case Layer::Layer_SplitterLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100387 case Layer::Layer_StackLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100389 case Layer::Layer_StandInLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000391 case Layer::Layer_StridedSliceLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000393 case Layer::Layer_SubtractionLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100395 case Layer::Layer_SwitchLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100397 case Layer::Layer_TransposeConvolution2dLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000399 case Layer::Layer_TransposeLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000401 case Layer::Layer_NONE:
402 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100403 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000404 }
405}
406
Finn Williams85d36712021-01-26 22:30:06 +0000407std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000408{
409 auto layer = GetBaseLayer(graph, index);
410 assert(layer);
411 return layer->layerName()->str();
412}
413
Finn Williams85d36712021-01-26 22:30:06 +0000414int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000415{
416 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
417
418 if (layerType == Layer::Layer_InputLayer)
419 {
420 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
421 }
422 else if ( layerType == Layer::Layer_OutputLayer )
423 {
424 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
425 }
426 return 0;
427}
428
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000429armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000430{
431 switch (dataLayout)
432 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000433 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000434 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000435 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000436 default:
437 return armnn::DataLayout::NCHW;
438 }
439}
440
Mike Kellyaf484012019-02-20 16:53:11 +0000441armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
442{
443 switch (function)
444 {
445 case armnnSerializer::ActivationFunction_Sigmoid:
446 return armnn::ActivationFunction::Sigmoid;
447 case armnnSerializer::ActivationFunction_TanH:
448 return armnn::ActivationFunction::TanH;
449 case armnnSerializer::ActivationFunction_Linear:
450 return armnn::ActivationFunction::Linear;
451 case armnnSerializer::ActivationFunction_ReLu:
452 return armnn::ActivationFunction::ReLu;
453 case armnnSerializer::ActivationFunction_BoundedReLu:
454 return armnn::ActivationFunction::BoundedReLu;
455 case armnnSerializer::ActivationFunction_LeakyReLu:
456 return armnn::ActivationFunction::LeakyReLu;
457 case armnnSerializer::ActivationFunction_Abs:
458 return armnn::ActivationFunction::Abs;
459 case armnnSerializer::ActivationFunction_Sqrt:
460 return armnn::ActivationFunction::Sqrt;
461 case armnnSerializer::ActivationFunction_Square:
462 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000463 case armnnSerializer::ActivationFunction_Elu:
464 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000465 case armnnSerializer::ActivationFunction_HardSwish:
466 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000467 default:
468 return armnn::ActivationFunction::Sigmoid;
469 }
470}
471
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100472armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
473{
474 switch (function)
475 {
476 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
477 return armnn::ArgMinMaxFunction::Max;
478 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
479 default:
480 return armnn::ArgMinMaxFunction::Min;
481 }
482}
483
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100484armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
485{
486 switch (operation)
487 {
488 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
489 return armnn::ComparisonOperation::Equal;
490 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
491 return armnn::ComparisonOperation::Greater;
492 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
493 return armnn::ComparisonOperation::GreaterOrEqual;
494 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
495 return armnn::ComparisonOperation::Less;
496 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
497 return armnn::ComparisonOperation::LessOrEqual;
498 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
499 default:
500 return armnn::ComparisonOperation::NotEqual;
501 }
502}
503
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000504armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
505{
506 switch (operation)
507 {
508 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
509 return armnn::ReduceOperation::Sum;
510 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
511 return armnn::ReduceOperation::Max;
512 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
513 return armnn::ReduceOperation::Mean;
514 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
515 return armnn::ReduceOperation::Min;
516 default:
517 return armnn::ReduceOperation::Sum;
518 }
519}
520
James Conroyaba90cd2020-11-06 16:28:18 +0000521armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
522{
523 switch (operation)
524 {
525 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
526 return armnn::LogicalBinaryOperation::LogicalAnd;
527 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
528 return armnn::LogicalBinaryOperation::LogicalOr;
529 default:
530 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
531 }
532}
533
josh minor4a3c6102020-01-06 16:40:46 -0600534armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
535{
536 switch (operation)
537 {
538 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
539 return armnn::UnaryOperation::Abs;
540 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
541 return armnn::UnaryOperation::Rsqrt;
542 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
543 return armnn::UnaryOperation::Sqrt;
544 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
545 return armnn::UnaryOperation::Exp;
546 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
547 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000548 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
549 return armnn::UnaryOperation::LogicalNot;
josh minor4a3c6102020-01-06 16:40:46 -0600550 default:
551 throw armnn::InvalidArgumentException("Unary operation unknown");
552 }
553}
554
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100555armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
556{
557 switch (method)
558 {
559 case armnnSerializer::ResizeMethod_NearestNeighbor:
560 return armnn::ResizeMethod::NearestNeighbor;
561 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000562 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100563 default:
564 return armnn::ResizeMethod::NearestNeighbor;
565 }
566}
567
Finn Williams85d36712021-01-26 22:30:06 +0000568armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000569{
570 armnn::DataType type;
571 CHECK_TENSOR_PTR(tensorPtr);
572
573 switch (tensorPtr->dataType())
574 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000575 case DataType_QAsymmS8:
576 type = armnn::DataType::QAsymmS8;
577 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000578 case DataType_QSymmS8:
579 type = armnn::DataType::QSymmS8;
580 break;
Kevin May43a799c2019-02-08 16:31:42 +0000581 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000582 case DataType_QAsymmU8:
583 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000584 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000585 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000586 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000587 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000588 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000589 case DataType_Signed32:
590 type = armnn::DataType::Signed32;
591 break;
Kevin May43a799c2019-02-08 16:31:42 +0000592 case DataType_Float32:
593 type = armnn::DataType::Float32;
594 break;
595 case DataType_Float16:
596 type = armnn::DataType::Float16;
597 break;
598 case DataType_Boolean:
599 type = armnn::DataType::Boolean;
600 break;
601 default:
602 {
603 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100604 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
605 tensorPtr->dataType(),
606 EnumNameDataType(tensorPtr->dataType()),
607 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000608 }
609 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000610
Colm Donelan800b2812021-02-12 12:43:35 +0000611 float quantizationScale = tensorPtr->quantizationScale();
612 int32_t quantizationOffset = tensorPtr->quantizationOffset();
613
Finn Williams2605b232020-06-10 15:53:46 +0100614 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
615 {
Colm Donelan800b2812021-02-12 12:43:35 +0000616 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100617 type,
618 quantizationScale,
619 quantizationOffset);
620 }
Colm Donelan800b2812021-02-12 12:43:35 +0000621 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
622 {
623 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
624 type,
625 quantizationScale,
626 quantizationOffset);
627 return result;
628 }
Kevin May43a799c2019-02-08 16:31:42 +0000629
630 auto dimensions = tensorPtr->dimensions();
631 unsigned int size = dimensions->size();
632 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000633 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
634 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
635 // For backwards compatibility check if the dimensionSpecificity vector is present first.
636 // The default is to have dimensionSpecificity set to all true's anyway.
637 if (tensorPtr->dimensionSpecificity() != nullptr)
638 {
639 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
640 size = dimensionSpecificity->size();
641 for (unsigned int i = 0; i < size; ++i)
642 {
643 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
644 }
645 }
646 // Construct a TensorShape
647 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000648
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000649 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000650 if (quantizationScales)
651 {
652 unsigned int quantizationScalesSize = quantizationScales->size();
653 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
654 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000655 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000656 type,
657 scales,
658 quantizationDim);
659 return result;
660 }
661
Kevin May43a799c2019-02-08 16:31:42 +0000662 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000663 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000664 type,
665 quantizationScale,
666 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000667
Kevin May43a799c2019-02-08 16:31:42 +0000668 return result;
669}
670
Finn Williams85d36712021-01-26 22:30:06 +0000671armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000672{
673 CHECK_CONST_TENSOR_PTR(constTensorPtr);
674 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
675
676 switch (constTensorPtr->data_type())
677 {
678 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000679 {
680 auto byteData = constTensorPtr->data_as_ByteData()->data();
681 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
682 return armnn::ConstTensor(tensorInfo, byteData->data());
683 }
Mike Kellya0766c32019-02-19 17:22:07 +0000684 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000685 {
686 auto shortData = constTensorPtr->data_as_ShortData()->data();
687 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
688 return armnn::ConstTensor(tensorInfo, shortData->data());
689 }
Mike Kellya0766c32019-02-19 17:22:07 +0000690 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000691 {
692 auto intData = constTensorPtr->data_as_IntData()->data();
693 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
694 return armnn::ConstTensor(tensorInfo, intData->data());
695 }
Mike Kellya0766c32019-02-19 17:22:07 +0000696 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000697 {
698 auto longData = constTensorPtr->data_as_LongData()->data();
699 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
700 return armnn::ConstTensor(tensorInfo, longData->data());
701 }
Mike Kellya0766c32019-02-19 17:22:07 +0000702 default:
703 {
704 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100705 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
706 constTensorPtr->data_type(),
707 EnumNameConstTensorData(constTensorPtr->data_type()),
708 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000709 }
710 }
711}
712
Finn Williams85d36712021-01-26 22:30:06 +0000713TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000714{
715 CHECK_LAYERS(graphPtr, 0, layerIndex);
716 auto layer = GetBaseLayer(graphPtr, layerIndex);
717 const auto& numInputs = layer->inputSlots()->size();
718
719 TensorRawPtrVector result(numInputs);
720
721 for (unsigned int i=0; i<numInputs; ++i)
722 {
723 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
724 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
725 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
726 }
727 return result;
728}
729
Finn Williams85d36712021-01-26 22:30:06 +0000730TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000731{
732 CHECK_LAYERS(graphPtr, 0, layerIndex);
733 auto layer = GetBaseLayer(graphPtr, layerIndex);
734 const auto& numOutputs = layer->outputSlots()->size();
735
736 TensorRawPtrVector result(numOutputs);
737
738 for (unsigned int i=0; i<numOutputs; ++i)
739 {
740 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
741 }
742 return result;
743}
744
Finn Williams85d36712021-01-26 22:30:06 +0000745void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000746{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000747 CHECK_LAYERS(graph, 0, layerIndex);
748 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100749 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
750 "layerName: {1} / {2}",
751 layerIndex,
752 layerName,
753 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000754}
755
Finn Williams85d36712021-01-26 22:30:06 +0000756void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000757{
758 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000759 m_InputBindings.clear();
760 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000761}
762
Kevin May43a799c2019-02-08 16:31:42 +0000763
Finn Williams85d36712021-01-26 22:30:06 +0000764INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000765{
766 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000767 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
768 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000769}
770
Finn Williams85d36712021-01-26 22:30:06 +0000771armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000772{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000773 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000774 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
775 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
776 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000777}
778
Finn Williams85d36712021-01-26 22:30:06 +0000779GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000780{
781 if (binaryContent == nullptr)
782 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100783 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
784 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000785 }
786 flatbuffers::Verifier verifier(binaryContent, len);
787 if (verifier.VerifyBuffer<SerializedGraph>() == false)
788 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100789 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
790 "flatbuffers format. size:{0} {1}",
791 len,
792 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000793 }
794 return GetSerializedGraph(binaryContent);
795}
796
Finn Williams85d36712021-01-26 22:30:06 +0000797INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000798{
799 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100800 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000801 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000802 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000803 {
804 if (layer->layer_type() != Layer_InputLayer &&
805 layer->layer_type() != Layer_OutputLayer)
806 {
807 // lookup and call the parser function
808 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000809 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000810 }
811 ++layerIndex;
812 }
813
Derek Lamberti8ddae332019-02-21 16:29:43 +0000814 SetupInputLayers(graph);
815 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000816
817 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100818 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000819 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100820 Connections& connections = graphIt.second;
821 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000822 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100823 const unsigned int outputSlotIndex = outputIt.first;
824 IOutputSlot* outputSlot = outputIt.second;
825 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000826 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100827 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000828 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100829 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000830 }
Kevin May43a799c2019-02-08 16:31:42 +0000831 }
832 }
833 }
834
835 return std::move(m_Network);
836}
837
Finn Williams85d36712021-01-26 22:30:06 +0000838BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000839 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000840{
Jan Eilers8eb25602020-03-09 12:13:48 +0000841 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000842 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000843 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000844 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000845 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000846 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000847 }
848 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100849 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
850 name,
851 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000852}
853
Finn Williams85d36712021-01-26 22:30:06 +0000854BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000855 const std::string& name) const
856{
Jan Eilers8eb25602020-03-09 12:13:48 +0000857 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000858 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000859 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000860 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000861 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000862 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000863 }
864 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100865 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
866 name,
867 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000868}
869
Finn Williams85d36712021-01-26 22:30:06 +0000870unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000871{
872 for (unsigned int i = 0; i < graph->layers()->size(); i++)
873 {
874 auto layer = graph->layers()->Get(i);
875 if (layer->layer_type() == Layer::Layer_InputLayer)
876 {
877 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
878 if (layerBindingId == targetId)
879 {
880 return i;
881 }
882 }
883 }
884 throw ParseException("Input layer with given layerBindingId not found");
885}
886
Finn Williams85d36712021-01-26 22:30:06 +0000887unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000888{
889 for (unsigned int i = 0; i < graph->layers()->size(); i++)
890 {
891 auto layer = graph->layers()->Get(i);
892 if (layer->layer_type() == Layer::Layer_OutputLayer)
893 {
894 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
895 if (layerBindingId == targetId)
896 {
897 return i;
898 }
899 }
900 }
901 throw ParseException("Output layer with given layerBindingId not found");
902}
903
Finn Williams85d36712021-01-26 22:30:06 +0000904unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100905{
906 for (unsigned int i = 0; i < graph->layers()->size(); i++)
907 {
908 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
909 if (layer->index() == targetIndex)
910 {
911 return i;
912 }
913 }
914 throw ParseException("Layer with given index not found");
915}
916
Finn Williams85d36712021-01-26 22:30:06 +0000917IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000918{
Finn Williams85d36712021-01-26 22:30:06 +0000919 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000920
921 if (graph->featureVersions())
922 {
923 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
924 }
925
926 return versions;
927}
928
Finn Williams85d36712021-01-26 22:30:06 +0000929void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000930{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000931 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100932 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000933 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100934 m_InputBindings.reserve(numInputs);
935
936 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000937 {
Tee Jungaa920c52019-11-05 10:48:25 +0000938 unsigned int inputLayerIndex = 0xFFFFFFFF;
939 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
940 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100941 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000942 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
943 }
944 else
945 {
946 const int inputId = graph->inputIds()->Get(i);
947 inputLayerIndex = GetInputLayerInVector(graph, inputId);
948 }
949
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100950 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000951
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100952 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
953 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100954 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000955
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100956 IConnectableLayer* inputLayer =
957 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000958
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100959 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
960 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
961 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
962
Derek Lamberti8ddae332019-02-21 16:29:43 +0000963 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100964 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000965 }
966}
967
Finn Williams85d36712021-01-26 22:30:06 +0000968void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000969{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000970 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100971 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000972 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100973 m_OutputBindings.reserve(numOutputs);
974
975 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000976 {
Tee Jungaa920c52019-11-05 10:48:25 +0000977 unsigned int outputLayerIndex = 0xFFFFFFFF;
978 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
979 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100980 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000981 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
982 }
983 else
984 {
985 const int outputId = graph->outputIds()->Get(i);
986 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
987 }
988
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100989 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000990
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100991 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
992 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Teresa Charlin4441d942021-03-12 16:29:03 +0000993 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000994
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100995 IConnectableLayer* outputLayer =
996 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000997
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100998 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
999
1000 unsigned int sourceLayerIndex =
1001 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
1002 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Teresa Charlin4441d942021-03-12 16:29:03 +00001003 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(0)->tensorInfo());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001004
Derek Lamberti8ddae332019-02-21 16:29:43 +00001005 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001006 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001007 }
1008}
1009
Finn Williams85d36712021-01-26 22:30:06 +00001010void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001011 uint32_t layerIndex,
1012 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001013{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001014 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001015 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001016 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1017 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001018 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001019 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1020 " for layer index: {2} {3}",
1021 baseLayer->outputSlots()->size(),
1022 layer->GetNumOutputSlots(),
1023 layerIndex,
1024 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001025 }
1026
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001027 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001028 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001029 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1030 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1031 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1032 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001033 }
1034}
1035
Finn Williams85d36712021-01-26 22:30:06 +00001036void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001037 uint32_t layerIndex,
1038 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001039{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001040 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001041 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001042 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1043 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001044 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001045 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1046 " for layer index:{2} {3}",
1047 baseLayer->inputSlots()->size(),
1048 layer->GetNumInputSlots(),
1049 layerIndex,
1050 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001051 }
1052
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001053 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001054 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001055 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1056 auto fbConnection = fbInputSlot->connection();
1057 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1058 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001059 }
1060}
1061
Finn Williams85d36712021-01-26 22:30:06 +00001062void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001063 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001064 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001065{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001066 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001067 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001068 m_GraphConnections[sourceLayerIndex] = Connections();
1069 }
1070
1071 Connections& connections = m_GraphConnections[sourceLayerIndex];
1072 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1073 {
1074 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001075 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001076 else
1077 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001078 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001079 }
1080}
Kevin May43a799c2019-02-08 16:31:42 +00001081
Finn Williams85d36712021-01-26 22:30:06 +00001082void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001083 uint32_t outputSlotIndex,
1084 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001085{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001086 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1087 {
1088 m_GraphConnections[sourceLayerIndex] = Connections();
1089 }
1090
1091 Connections& connections = m_GraphConnections[sourceLayerIndex];
1092 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1093 {
1094 throw ParseException("Same output slot index processed twice");
1095 }
1096
1097 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001098}
1099
Finn Williams85d36712021-01-26 22:30:06 +00001100void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001101{
1102 CHECK_LAYERS(graph, 0, layerIndex);
1103 auto inputs = GetInputs(graph, layerIndex);
1104 CHECK_LOCATION();
1105 CHECK_VALID_SIZE(inputs.size(), 1);
1106
1107 auto outputs = GetOutputs(graph, layerIndex);
1108 CHECK_VALID_SIZE(outputs.size(), 1);
1109
1110 auto layerName = GetLayerName(graph, layerIndex);
1111
josh minor4a3c6102020-01-06 16:40:46 -06001112 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1113 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001114 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1115 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1116
1117 RegisterInputSlots(graph, layerIndex, layer);
1118 RegisterOutputSlots(graph, layerIndex, layer);
1119}
1120
Finn Williams85d36712021-01-26 22:30:06 +00001121void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001122{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001123 CHECK_LAYERS(graph, 0, layerIndex);
1124 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001125 CHECK_LOCATION();
1126 CHECK_VALID_SIZE(inputs.size(), 1);
1127
Derek Lamberti8ddae332019-02-21 16:29:43 +00001128 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001129 CHECK_VALID_SIZE(outputs.size(), 1);
1130
Derek Lamberti8ddae332019-02-21 16:29:43 +00001131 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001132 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001133 auto serializerDescriptor = serializerLayer->descriptor();
1134
1135 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001136 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001137 descriptor.m_A = serializerDescriptor->a();
1138 descriptor.m_B = serializerDescriptor->b();
1139
1140 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1141 layerName.c_str());
1142 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1143 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1144
Derek Lamberti8ddae332019-02-21 16:29:43 +00001145 RegisterInputSlots(graph, layerIndex, layer);
1146 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001147}
1148
Finn Williams85d36712021-01-26 22:30:06 +00001149void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001150{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001151 CHECK_LAYERS(graph, 0, layerIndex);
1152 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001153 CHECK_LOCATION();
1154 CHECK_VALID_SIZE(inputs.size(), 2);
1155
Derek Lamberti8ddae332019-02-21 16:29:43 +00001156 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001157 CHECK_VALID_SIZE(outputs.size(), 1);
1158
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001159 auto layerName = GetLayerName(graph, layerIndex);
1160 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001161
1162 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1163 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1164
Derek Lamberti8ddae332019-02-21 16:29:43 +00001165 RegisterInputSlots(graph, layerIndex, layer);
1166 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001167}
1168
Finn Williams85d36712021-01-26 22:30:06 +00001169void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001170{
1171 CHECK_LAYERS(graph, 0, layerIndex);
1172 auto inputs = GetInputs(graph, layerIndex);
1173 CHECK_LOCATION();
1174 CHECK_VALID_SIZE(inputs.size(), 1);
1175
1176 auto outputs = GetOutputs(graph, layerIndex);
1177 CHECK_VALID_SIZE(outputs.size(), 1);
1178
1179 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1180 auto serializerDescriptor = serializerLayer->descriptor();
1181
1182 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001183 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001184 descriptor.m_Axis = serializerDescriptor->axis();
1185 auto layerName = GetLayerName(graph, layerIndex);
1186 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1187
1188 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1189 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1190
1191 RegisterInputSlots(graph, layerIndex, layer);
1192 RegisterOutputSlots(graph, layerIndex, layer);
1193}
1194
Finn Williams85d36712021-01-26 22:30:06 +00001195void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001196{
1197 CHECK_LAYERS(graph, 0, layerIndex);
1198
Finn Williams85d36712021-01-26 22:30:06 +00001199 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001200 CHECK_VALID_SIZE(inputs.size(), 1);
1201
Finn Williams85d36712021-01-26 22:30:06 +00001202 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001203 CHECK_VALID_SIZE(outputs.size(), 1);
1204
1205 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1206 auto flatBufferCrops = flatBufferDescriptor->crops();
1207 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1208
1209 if (flatBufferCrops->Length() % 2 != 0)
1210 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001211 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001212 }
1213
1214 std::vector<std::pair<unsigned int, unsigned int>> crops;
1215 crops.reserve(flatBufferCrops->Length() / 2);
1216 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1217 {
1218 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1219 }
1220
1221 armnn::BatchToSpaceNdDescriptor descriptor;
1222 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1223 descriptor.m_BlockShape =
1224 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1225 descriptor.m_Crops = crops;
1226
1227 auto layerName = GetLayerName(graph, layerIndex);
1228 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1229
1230 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1231 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1232
1233 RegisterInputSlots(graph, layerIndex, layer);
1234 RegisterOutputSlots(graph, layerIndex, layer);
1235}
1236
Finn Williams85d36712021-01-26 22:30:06 +00001237void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001238{
1239 CHECK_LAYERS(graph, 0, layerIndex);
1240
1241 auto inputs = GetInputs(graph, layerIndex);
1242 CHECK_VALID_SIZE(inputs.size(), 1);
1243
1244 auto outputs = GetOutputs(graph, layerIndex);
1245 CHECK_VALID_SIZE(outputs.size(), 1);
1246 auto outputInfo = ToTensorInfo(outputs[0]);
1247
ruoyan015c7ab052019-03-04 14:48:02 +00001248 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001249
1250 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1251 auto serializerDescriptor = serializerLayer->descriptor();
1252
1253 armnn::BatchNormalizationDescriptor descriptor;
1254 descriptor.m_Eps = serializerDescriptor->eps();
1255 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1256
1257 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1258 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1259 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1260 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1261
1262 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1263 mean,
1264 variance,
1265 beta,
1266 gamma,
1267 layerName.c_str());
1268 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1269
1270 RegisterInputSlots(graph, layerIndex, layer);
1271 RegisterOutputSlots(graph, layerIndex, layer);
1272}
1273
Finn Williams85d36712021-01-26 22:30:06 +00001274void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001275{
1276 CHECK_LAYERS(graph, 0, layerIndex);
1277 CHECK_LOCATION();
1278
1279 auto outputs = GetOutputs(graph, layerIndex);
1280 CHECK_VALID_SIZE(outputs.size(), 1);
1281
1282 auto layerName = GetLayerName(graph, layerIndex);
1283
1284 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1285 auto serializerInput = serializerLayer->input();
1286
1287 armnn::ConstTensor input = ToConstTensor(serializerInput);
1288
1289 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1290
1291 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1292 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1293
1294 RegisterOutputSlots(graph, layerIndex, layer);
1295}
1296
Finn Williams85d36712021-01-26 22:30:06 +00001297void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001298{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001299 CHECK_LAYERS(graph, 0, layerIndex);
1300 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001301 CHECK_LOCATION();
1302 CHECK_VALID_SIZE(inputs.size(), 1);
1303
Derek Lamberti8ddae332019-02-21 16:29:43 +00001304 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001305 CHECK_VALID_SIZE(outputs.size(), 1);
1306
Derek Lamberti8ddae332019-02-21 16:29:43 +00001307 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001308 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001309 auto serializerDescriptor = serializerLayer->descriptor();
1310
1311 armnn::Convolution2dDescriptor descriptor;
1312 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1313 descriptor.m_PadRight = serializerDescriptor->padRight();
1314 descriptor.m_PadTop = serializerDescriptor->padTop();
1315 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1316 descriptor.m_StrideX = serializerDescriptor->strideX();
1317 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001318 descriptor.m_DilationX = serializerDescriptor->dilationX();
1319 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001320 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1321 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1322
1323 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1324 armnn::ConstTensor biases;
1325
Matteo Martincighfc598e12019-05-14 10:36:13 +01001326 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001327 if (descriptor.m_BiasEnabled)
1328 {
1329 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001330 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001331 }
1332 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1333 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001334 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001335 layerName.c_str());
1336 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1337 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1338
Derek Lamberti8ddae332019-02-21 16:29:43 +00001339 RegisterInputSlots(graph, layerIndex, layer);
1340 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001341}
1342
Finn Williams85d36712021-01-26 22:30:06 +00001343void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001344{
1345 CHECK_LAYERS(graph, 0, layerIndex);
1346
1347 auto inputs = GetInputs(graph, layerIndex);
1348 CHECK_VALID_SIZE(inputs.size(), 1);
1349
1350 auto outputs = GetOutputs(graph, layerIndex);
1351 CHECK_VALID_SIZE(outputs.size(), 1);
1352
1353 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1354
1355 armnn::DepthToSpaceDescriptor descriptor;
1356 descriptor.m_BlockSize = fbDescriptor->blockSize();
1357 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1358
1359 auto layerName = GetLayerName(graph, layerIndex);
1360 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1361
1362 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1363 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1364
1365 RegisterInputSlots(graph, layerIndex, layer);
1366 RegisterOutputSlots(graph, layerIndex, layer);
1367}
1368
Finn Williams85d36712021-01-26 22:30:06 +00001369void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001370{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001371 CHECK_LAYERS(graph, 0, layerIndex);
1372 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001373 CHECK_LOCATION();
1374 CHECK_VALID_SIZE(inputs.size(), 1);
1375
Derek Lamberti8ddae332019-02-21 16:29:43 +00001376 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001377 CHECK_VALID_SIZE(outputs.size(), 1);
1378
Derek Lamberti8ddae332019-02-21 16:29:43 +00001379 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001380 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001381 auto serializerDescriptor = serializerLayer->descriptor();
1382
1383 armnn::DepthwiseConvolution2dDescriptor descriptor;
1384 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1385 descriptor.m_PadRight = serializerDescriptor->padRight();
1386 descriptor.m_PadTop = serializerDescriptor->padTop();
1387 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1388 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001389 descriptor.m_StrideY = serializerDescriptor->strideY();
1390 descriptor.m_DilationX = serializerDescriptor->dilationX();
1391 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001392 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1393 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1394
1395 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1396 armnn::ConstTensor biases;
1397
Matteo Martincighfc598e12019-05-14 10:36:13 +01001398 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001399 if (descriptor.m_BiasEnabled)
1400 {
1401 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001402 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001403 }
1404 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1405 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001406 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001407 layerName.c_str());
1408
1409 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1410 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1411
Derek Lamberti8ddae332019-02-21 16:29:43 +00001412 RegisterInputSlots(graph, layerIndex, layer);
1413 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001414}
1415
Finn Williams85d36712021-01-26 22:30:06 +00001416void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001417{
1418 CHECK_LAYERS(graph, 0, layerIndex);
1419 auto inputs = GetInputs(graph, layerIndex);
1420 CHECK_LOCATION();
1421 CHECK_VALID_SIZE(inputs.size(), 2);
1422
1423 auto outputs = GetOutputs(graph, layerIndex);
1424 CHECK_VALID_SIZE(outputs.size(), 4);
1425
1426 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1427 auto layerName = GetLayerName(graph, layerIndex);
1428 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1429
1430 armnn::DetectionPostProcessDescriptor descriptor;
1431 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1432 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1433 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1434 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1435 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1436 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1437 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1438 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1439 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1440 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1441 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1442
1443 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1444
1445 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1446 anchors,
1447 layerName.c_str());
1448
1449 for (unsigned int i = 0; i < 4; i++)
1450 {
1451 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1452 }
1453
1454 RegisterInputSlots(graph, layerIndex, layer);
1455 RegisterOutputSlots(graph, layerIndex, layer);
1456}
1457
Finn Williams85d36712021-01-26 22:30:06 +00001458void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001459{
1460 CHECK_LAYERS(graph, 0, layerIndex);
1461 auto inputs = GetInputs(graph, layerIndex);
1462 CHECK_LOCATION();
1463 CHECK_VALID_SIZE(inputs.size(), 2);
1464
1465 auto outputs = GetOutputs(graph, layerIndex);
1466 CHECK_VALID_SIZE(outputs.size(), 1);
1467
1468 auto layerName = GetLayerName(graph, layerIndex);
1469 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1470
1471 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1472 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1473
1474 RegisterInputSlots(graph, layerIndex, layer);
1475 RegisterOutputSlots(graph, layerIndex, layer);
1476}
1477
Finn Williams85d36712021-01-26 22:30:06 +00001478void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001479{
1480 CHECK_LAYERS(graph, 0, layerIndex);
1481 auto inputs = GetInputs(graph, layerIndex);
1482 CHECK_LOCATION();
1483 CHECK_VALID_SIZE(inputs.size(), 2);
1484
1485 auto outputs = GetOutputs(graph, layerIndex);
1486 CHECK_VALID_SIZE(outputs.size(), 1);
1487
1488 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001489 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1490 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001491
1492 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1493 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1494
1495 RegisterInputSlots(graph, layerIndex, layer);
1496 RegisterOutputSlots(graph, layerIndex, layer);
1497}
1498
Finn Williams85d36712021-01-26 22:30:06 +00001499void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001500{
1501 CHECK_LAYERS(graph, 0, layerIndex);
1502 auto inputs = GetInputs(graph, layerIndex);
1503 CHECK_LOCATION();
1504 CHECK_VALID_SIZE(inputs.size(), 1);
1505
1506 auto outputs = GetOutputs(graph, layerIndex);
1507 CHECK_VALID_SIZE(outputs.size(), 1);
1508
1509 auto layerName = GetLayerName(graph, layerIndex);
1510 armnn::FillDescriptor descriptor(1.0f);
1511 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1512
1513 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1514 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1515
1516 RegisterInputSlots(graph, layerIndex, layer);
1517 RegisterOutputSlots(graph, layerIndex, layer);
1518}
1519
Finn Williams85d36712021-01-26 22:30:06 +00001520void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001521{
1522 CHECK_LAYERS(graph, 0, layerIndex);
1523 auto inputs = GetInputs(graph, layerIndex);
1524 CHECK_LOCATION();
1525 CHECK_VALID_SIZE(inputs.size(), 2);
1526
1527 auto outputs = GetOutputs(graph, layerIndex);
1528 CHECK_VALID_SIZE(outputs.size(), 1);
1529
1530 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001531 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1532 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001533
1534 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1535 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1536
1537 RegisterInputSlots(graph, layerIndex, layer);
1538 RegisterOutputSlots(graph, layerIndex, layer);
1539}
1540
Finn Williams85d36712021-01-26 22:30:06 +00001541void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001542{
1543 CHECK_LAYERS(graph, 0, layerIndex);
1544
1545 auto inputs = GetInputs(graph, layerIndex);
1546 CHECK_VALID_SIZE(inputs.size(), 1);
1547
1548 auto outputs = GetOutputs(graph, layerIndex);
1549 CHECK_VALID_SIZE(outputs.size(), 1);
1550
1551 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1552 auto fbDescriptor = fbLayer->descriptor();
1553
1554 armnn::InstanceNormalizationDescriptor descriptor;
1555 descriptor.m_Gamma = fbDescriptor->gamma();
1556 descriptor.m_Beta = fbDescriptor->beta();
1557 descriptor.m_Eps = fbDescriptor->eps();
1558 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1559
1560 const std::string layerName = GetLayerName(graph, layerIndex);
1561 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1562
1563 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1564 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1565
1566 RegisterInputSlots(graph, layerIndex, layer);
1567 RegisterOutputSlots(graph, layerIndex, layer);
1568}
1569
Finn Williams85d36712021-01-26 22:30:06 +00001570void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001571{
1572 CHECK_LAYERS(graph, 0, layerIndex);
1573
1574 auto inputs = GetInputs(graph, layerIndex);
1575 CHECK_VALID_SIZE(inputs.size(), 1);
1576
1577 auto outputs = GetOutputs(graph, layerIndex);
1578 CHECK_VALID_SIZE(outputs.size(), 1);
1579 auto outputInfo = ToTensorInfo(outputs[0]);
1580
1581 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1582 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1583
1584 auto layerName = GetLayerName(graph, layerIndex);
1585 armnn::L2NormalizationDescriptor descriptor;
1586 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001587 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001588
1589 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1590 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1591
1592 RegisterInputSlots(graph, layerIndex, layer);
1593 RegisterOutputSlots(graph, layerIndex, layer);
1594}
1595
Finn Williams85d36712021-01-26 22:30:06 +00001596void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001597{
1598 CHECK_LAYERS(graph, 0, layerIndex);
1599 CHECK_LOCATION();
1600
1601 auto inputs = GetInputs(graph, layerIndex);
1602 CHECK_VALID_SIZE(inputs.size(), 2);
1603
1604 auto outputs = GetOutputs(graph, layerIndex);
1605 CHECK_VALID_SIZE(outputs.size(), 1);
1606
1607 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1608 auto fbDescriptor = fbLayer->descriptor();
1609
1610 armnn::LogicalBinaryDescriptor descriptor;
1611 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1612
1613 const std::string& layerName = GetLayerName(graph, layerIndex);
1614 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1615
1616 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1617 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1618
1619 RegisterInputSlots(graph, layerIndex, layer);
1620 RegisterOutputSlots(graph, layerIndex, layer);
1621}
1622
Finn Williams85d36712021-01-26 22:30:06 +00001623void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001624{
1625 CHECK_LAYERS(graph, 0, layerIndex);
1626
Finn Williams85d36712021-01-26 22:30:06 +00001627 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001628 CHECK_VALID_SIZE(inputs.size(), 1);
1629
Finn Williams85d36712021-01-26 22:30:06 +00001630 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001631 CHECK_VALID_SIZE(outputs.size(), 1);
1632
1633 armnn::LogSoftmaxDescriptor descriptor;
1634 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1635 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1636 auto layerName = GetLayerName(graph, layerIndex);
1637
1638 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1639
1640 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1641 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1642
1643 RegisterInputSlots(graph, layerIndex, layer);
1644 RegisterOutputSlots(graph, layerIndex, layer);
1645}
1646
Finn Williams85d36712021-01-26 22:30:06 +00001647void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001648{
1649 CHECK_LAYERS(graph, 0, layerIndex);
1650 auto inputs = GetInputs(graph, layerIndex);
1651 CHECK_LOCATION();
1652 CHECK_VALID_SIZE(inputs.size(), 2);
1653
1654 auto outputs = GetOutputs(graph, layerIndex);
1655 CHECK_VALID_SIZE(outputs.size(), 1);
1656
1657 auto layerName = GetLayerName(graph, layerIndex);
1658 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1659
1660 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1661 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1662
1663 RegisterInputSlots(graph, layerIndex, layer);
1664 RegisterOutputSlots(graph, layerIndex, layer);
1665}
1666
Finn Williams85d36712021-01-26 22:30:06 +00001667void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001668{
1669 CHECK_LAYERS(graph, 0, layerIndex);
1670 auto inputs = GetInputs(graph, layerIndex);
1671 CHECK_LOCATION();
1672 CHECK_VALID_SIZE(inputs.size(), 2);
1673
1674 auto outputs = GetOutputs(graph, layerIndex);
1675 CHECK_VALID_SIZE(outputs.size(), 1);
1676
1677 auto layerName = GetLayerName(graph, layerIndex);
1678 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1679
1680 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1681 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1682
1683 RegisterInputSlots(graph, layerIndex, layer);
1684 RegisterOutputSlots(graph, layerIndex, layer);
1685}
1686
Jim Flynne242f2d2019-05-22 14:24:13 +01001687const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1688 unsigned int layerIndex)
1689{
1690 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1691
1692 switch (layerType)
1693 {
1694 case Layer::Layer_ConcatLayer:
1695 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1696 case Layer::Layer_MergerLayer:
1697 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1698 default:
1699 throw armnn::Exception("unknown layer type, should be concat or merger");
1700 }
1701}
1702
Finn Williams85d36712021-01-26 22:30:06 +00001703void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001704{
1705 CHECK_LAYERS(graph, 0, layerIndex);
1706 CHECK_LOCATION();
1707
1708 auto inputs = GetInputs(graph, layerIndex);
1709 CHECK_VALID_SIZE(inputs.size(), 2);
1710
1711 auto outputs = GetOutputs(graph, layerIndex);
1712 CHECK_VALID_SIZE(outputs.size(), 1);
1713
1714 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1715 auto fbDescriptor = fbLayer->descriptor();
1716
1717 armnn::ComparisonDescriptor descriptor;
1718 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1719
1720 const std::string& layerName = GetLayerName(graph, layerIndex);
1721 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1722
1723 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1724 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1725
1726 RegisterInputSlots(graph, layerIndex, layer);
1727 RegisterOutputSlots(graph, layerIndex, layer);
1728}
1729
Finn Williams85d36712021-01-26 22:30:06 +00001730void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06001731{
1732 CHECK_LAYERS(graph, 0, layerIndex);
1733 CHECK_LOCATION();
1734
1735 auto inputs = GetInputs(graph, layerIndex);
1736 CHECK_VALID_SIZE(inputs.size(), 1);
1737
1738 auto outputs = GetOutputs(graph, layerIndex);
1739 CHECK_VALID_SIZE(outputs.size(), 1);
1740
1741 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1742 auto fbDescriptor = fbLayer->descriptor();
1743
1744 armnn::ElementwiseUnaryDescriptor descriptor;
1745 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1746
1747 const std::string& layerName = GetLayerName(graph, layerIndex);
1748 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1749
1750 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1751 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1752
1753 RegisterInputSlots(graph, layerIndex, layer);
1754 RegisterOutputSlots(graph, layerIndex, layer);
1755}
1756
Finn Williams85d36712021-01-26 22:30:06 +00001757void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001758{
1759 CHECK_LAYERS(graph, 0, layerIndex);
1760 CHECK_LOCATION();
1761
1762 auto outputs = GetOutputs(graph, layerIndex);
1763 CHECK_VALID_SIZE(outputs.size(), 1);
1764
Jim Flynnac25a1b2019-02-28 10:40:49 +00001765 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001766 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1767 unsigned int numViews = originsDescriptor->numViews();
1768 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001769
1770 // can now check the number of inputs == number of views
1771 auto inputs = GetInputs(graph, layerIndex);
1772 CHECK_VALID_SIZE(inputs.size(), numViews);
1773
1774 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001775 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001776 for (unsigned int v = 0; v < numViews; ++v)
1777 {
1778 auto originPtr = originsPtr->Get(v);
1779 for (unsigned int d = 0; d < numDimensions; ++d)
1780 {
1781 uint32_t value = originPtr->data()->Get(d);
1782 descriptor.SetViewOriginCoord(v, d, value);
1783 }
1784 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001785 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001786
Jim Flynn906f9462019-05-10 13:55:21 +01001787 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001788 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1789 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1790
1791 RegisterInputSlots(graph, layerIndex, layer);
1792 RegisterOutputSlots(graph, layerIndex, layer);
1793}
1794
Finn Williams85d36712021-01-26 22:30:06 +00001795void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001796{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001797 CHECK_LAYERS(graph, 0, layerIndex);
1798 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001799 CHECK_LOCATION();
1800 CHECK_VALID_SIZE(inputs.size(), 2);
1801
Derek Lamberti8ddae332019-02-21 16:29:43 +00001802 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001803 CHECK_VALID_SIZE(outputs.size(), 1);
1804
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001805 auto layerName = GetLayerName(graph, layerIndex);
1806 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001807
1808 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1809 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1810
Derek Lamberti8ddae332019-02-21 16:29:43 +00001811 RegisterInputSlots(graph, layerIndex, layer);
1812 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001813}
1814
Finn Williams85d36712021-01-26 22:30:06 +00001815void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001816{
1817 CHECK_LAYERS(graph, 0, layerIndex);
1818 CHECK_LOCATION();
1819
1820 auto inputs = GetInputs(graph, layerIndex);
1821 CHECK_VALID_SIZE(inputs.size(), 1);
1822
1823 auto outputs = GetOutputs(graph, layerIndex);
1824 CHECK_VALID_SIZE(outputs.size(), 1);
1825
1826 auto layerName = GetLayerName(graph, layerIndex);
1827
1828 armnn::IConnectableLayer* layer;
1829
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001830 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001831
1832 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1833 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1834
1835 RegisterInputSlots(graph, layerIndex, layer);
1836 RegisterOutputSlots(graph, layerIndex, layer);
1837}
1838
Finn Williams85d36712021-01-26 22:30:06 +00001839void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001840{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001841 CHECK_LAYERS(graph, 0, layerIndex);
1842 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001843 CHECK_LOCATION();
1844 CHECK_VALID_SIZE(inputs.size(), 1);
1845
Derek Lamberti8ddae332019-02-21 16:29:43 +00001846 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001847 CHECK_VALID_SIZE(outputs.size(), 1);
1848
Derek Lamberti8ddae332019-02-21 16:29:43 +00001849 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001850 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001851 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1852
1853 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1854 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1855 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1856
1857 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1858
1859 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001860 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001861 if (flatBufferDescriptor->biasEnabled())
1862 {
1863 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001864 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001865 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001866 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1867 weightsTensor,
1868 optionalBiases,
1869 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001870
1871 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1872 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1873
Derek Lamberti8ddae332019-02-21 16:29:43 +00001874 RegisterInputSlots(graph, layerIndex, layer);
1875 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001876}
1877
Finn Williams85d36712021-01-26 22:30:06 +00001878void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001879{
1880 CHECK_LAYERS(graph, 0, layerIndex);
1881
Finn Williams85d36712021-01-26 22:30:06 +00001882 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001883 CHECK_VALID_SIZE(inputs.size(), 1);
1884
Finn Williams85d36712021-01-26 22:30:06 +00001885 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001886 CHECK_VALID_SIZE(outputs.size(), 1);
1887
1888 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1889 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001890 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001891
1892 if (flatBufferPadList->Length() % 2 != 0)
1893 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001894 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
1895 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001896 }
1897
1898 std::vector<std::pair<unsigned int, unsigned int>> padList;
1899 padList.reserve(flatBufferPadList->Length() / 2);
1900 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1901 {
1902 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1903 }
1904
David Monahan34757812019-06-19 11:47:21 +01001905 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001906
1907 auto layerName = GetLayerName(graph, layerIndex);
1908 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1909
1910 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1911 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1912
1913 RegisterInputSlots(graph, layerIndex, layer);
1914 RegisterOutputSlots(graph, layerIndex, layer);
1915}
1916
Finn Williams85d36712021-01-26 22:30:06 +00001917void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001918{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001919 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001920
1921 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001922 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001923
Derek Lamberti8ddae332019-02-21 16:29:43 +00001924 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001925 CHECK_VALID_SIZE(inputs.size(), 1);
1926
Derek Lamberti8ddae332019-02-21 16:29:43 +00001927 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001928 CHECK_VALID_SIZE(outputs.size(), 1);
1929 auto outputInfo = ToTensorInfo(outputs[0]);
1930
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001931 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001932 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1933
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001934 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001935 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1936
Derek Lamberti8ddae332019-02-21 16:29:43 +00001937 RegisterInputSlots(graph, layerIndex, layer);
1938 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001939}
1940
Finn Williams85d36712021-01-26 22:30:06 +00001941armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPoolingDescriptor(PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001942 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001943{
Jan Eilers8eb25602020-03-09 12:13:48 +00001944 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001945 armnn::Pooling2dDescriptor desc;
1946
1947 switch (pooling2dDesc->poolType())
1948 {
1949 case PoolingAlgorithm_Average:
1950 {
1951 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001952 break;
1953 }
1954 case PoolingAlgorithm_Max:
1955 {
1956 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001957 break;
1958 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00001959 case PoolingAlgorithm_L2:
1960 {
1961 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
1962 break;
1963 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001964 default:
1965 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001966 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001967 }
1968 }
1969
1970 switch (pooling2dDesc->outputShapeRounding())
1971 {
1972 case OutputShapeRounding_Floor:
1973 {
1974 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1975 break;
1976 }
1977 case OutputShapeRounding_Ceiling:
1978 {
1979 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1980 break;
1981 }
1982 default:
1983 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001984 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001985 }
1986 }
1987
1988 switch (pooling2dDesc->paddingMethod())
1989 {
1990 case PaddingMethod_Exclude:
1991 {
1992 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1993 break;
1994 }
1995 case PaddingMethod_IgnoreValue:
1996 {
1997 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1998 break;
1999 }
2000 default:
2001 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002002 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002003 }
2004 }
2005
2006 switch (pooling2dDesc->dataLayout())
2007 {
2008 case DataLayout_NCHW:
2009 {
2010 desc.m_DataLayout = armnn::DataLayout::NCHW;
2011 break;
2012 }
2013 case DataLayout_NHWC:
2014 {
2015 desc.m_DataLayout = armnn::DataLayout::NHWC;
2016 break;
2017 }
2018 default:
2019 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002020 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002021 }
2022 }
2023
2024 desc.m_PadRight = pooling2dDesc->padRight();
2025 desc.m_PadLeft = pooling2dDesc->padLeft();
2026 desc.m_PadBottom = pooling2dDesc->padBottom();
2027 desc.m_PadTop = pooling2dDesc->padTop();
2028 desc.m_StrideX = pooling2dDesc->strideX();
2029 desc.m_StrideY = pooling2dDesc->strideY();
2030 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2031 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2032
2033 return desc;
2034}
2035
Finn Williams85d36712021-01-26 22:30:06 +00002036
2037
2038void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002039{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002040 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002041
Derek Lamberti8ddae332019-02-21 16:29:43 +00002042 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002043 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002044 CHECK_VALID_SIZE(inputs.size(), 1);
2045
Derek Lamberti8ddae332019-02-21 16:29:43 +00002046 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002047 CHECK_VALID_SIZE(outputs.size(), 1);
2048 auto outputInfo = ToTensorInfo(outputs[0]);
2049
2050 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002051 auto layerName = GetLayerName(graph, layerIndex);
2052 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002053 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2054
Derek Lamberti8ddae332019-02-21 16:29:43 +00002055 RegisterInputSlots(graph, layerIndex, layer);
2056 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002057}
2058
Finn Williams85d36712021-01-26 22:30:06 +00002059void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002060{
2061 CHECK_LAYERS(graph, 0, layerIndex);
2062
2063 auto inputs = GetInputs(graph, layerIndex);
2064 CHECK_VALID_SIZE(inputs.size(), 1);
2065
2066 auto outputs = GetOutputs(graph, layerIndex);
2067 CHECK_VALID_SIZE(outputs.size(), 1);
2068 auto outputInfo = ToTensorInfo(outputs[0]);
2069
2070 auto layerName = GetLayerName(graph, layerIndex);
2071 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2072 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2073
2074 RegisterInputSlots(graph, layerIndex, layer);
2075 RegisterOutputSlots(graph, layerIndex, layer);
2076}
2077
Finn Williams85d36712021-01-26 22:30:06 +00002078armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002079 const std::vector<uint32_t>& targetDimsIn)
2080{
2081 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2082 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2083
2084 if (stretchDim != targetDimsIn.end())
2085 {
2086 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2087 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002088 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2089 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002090 }
2091
2092 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002093 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002094 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2095
2096 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2097 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2098 }
2099
2100 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2101
2102 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2103 reshapeInfo.SetShape(outputShape);
2104
2105 return reshapeInfo;
2106}
2107
Finn Williams85d36712021-01-26 22:30:06 +00002108void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002109{
2110 CHECK_LAYERS(graph, 0, layerIndex);
2111
Finn Williams85d36712021-01-26 22:30:06 +00002112 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002113 CHECK_VALID_SIZE(inputs.size(), 1);
2114
Finn Williams85d36712021-01-26 22:30:06 +00002115 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002116 CHECK_VALID_SIZE(outputs.size(), 1);
2117
2118 auto layerName = GetLayerName(graph, layerIndex);
2119 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2120
2121 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2122 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2123
2124 RegisterInputSlots(graph, layerIndex, layer);
2125 RegisterOutputSlots(graph, layerIndex, layer);
2126}
2127
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002128void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2129{
2130 CHECK_LAYERS(graph, 0, layerIndex);
2131 CHECK_LOCATION();
2132
2133 auto inputs = GetInputs(graph, layerIndex);
2134 CHECK_VALID_SIZE(inputs.size(), 1);
2135
2136 auto outputs = GetOutputs(graph, layerIndex);
2137 CHECK_VALID_SIZE(outputs.size(), 1);
2138
2139 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2140 auto fbDescriptor = fbLayer->descriptor();
2141 auto flatBufferAxis = fbDescriptor->axis();
2142
2143 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002144 descriptor.m_KeepDims = fbDescriptor->keepDims();
2145 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2146 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2147
2148 const std::string& layerName = GetLayerName(graph, layerIndex);
2149 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2150
2151 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2152 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2153
2154 RegisterInputSlots(graph, layerIndex, layer);
2155 RegisterOutputSlots(graph, layerIndex, layer);
2156}
2157
Finn Williams85d36712021-01-26 22:30:06 +00002158void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002159{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002160 CHECK_LAYERS(graph, 0, layerIndex);
2161 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002162
Derek Lamberti8ddae332019-02-21 16:29:43 +00002163 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002164 CHECK_VALID_SIZE(outputs.size(), 1);
2165
2166 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2167 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2168
Derek Lamberti8ddae332019-02-21 16:29:43 +00002169 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002170 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2171
Finn Williams85d36712021-01-26 22:30:06 +00002172 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002173 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2174
2175 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2176 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2177
2178 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2179 {
2180 std::stringstream ss;
2181 ss << "New shape defined in reshape parameters "
2182 << reshapeOutputTensorShape
2183 << " does not equal output shape "
2184 << actualOutputTensorInfo.GetShape()
2185 << ": "
2186 << CHECK_LOCATION().AsString();
2187 throw ParseException(ss.str());
2188 }
2189
2190 armnn::ReshapeDescriptor reshapeDesc;
2191 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2192
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002193 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002194 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2195 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2196
Derek Lamberti8ddae332019-02-21 16:29:43 +00002197 RegisterInputSlots(graph, layerIndex, layer);
2198 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002199}
2200
Finn Williams85d36712021-01-26 22:30:06 +00002201void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002202{
2203 CHECK_LAYERS(graph, 0, layerIndex);
2204
Finn Williams85d36712021-01-26 22:30:06 +00002205 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002206 CHECK_VALID_SIZE(inputs.size(), 1);
2207
Finn Williams85d36712021-01-26 22:30:06 +00002208 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002209 CHECK_VALID_SIZE(outputs.size(), 1);
2210
2211 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2212
2213 armnn::ResizeDescriptor descriptor;
2214 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2215 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2216 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2217 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002218 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2219 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002220
2221 auto layerName = GetLayerName(graph, layerIndex);
2222 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2223
2224 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2225 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2226
2227 RegisterInputSlots(graph, layerIndex, layer);
2228 RegisterOutputSlots(graph, layerIndex, layer);
2229}
2230
Finn Williams85d36712021-01-26 22:30:06 +00002231void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002232{
2233 CHECK_LAYERS(graph, 0, layerIndex);
2234
Finn Williams85d36712021-01-26 22:30:06 +00002235 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002236 CHECK_VALID_SIZE(inputs.size(), 1);
2237
Finn Williams85d36712021-01-26 22:30:06 +00002238 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002239 CHECK_VALID_SIZE(outputs.size(), 1);
2240
2241 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2242
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002243 armnn::ResizeDescriptor descriptor;
2244 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002245 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002246 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2247 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002248 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2249 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002250
2251 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002252 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002253
2254 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2255 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2256
2257 RegisterInputSlots(graph, layerIndex, layer);
2258 RegisterOutputSlots(graph, layerIndex, layer);
2259}
2260
Finn Williams85d36712021-01-26 22:30:06 +00002261void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002262{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002263 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002264
Finn Williams85d36712021-01-26 22:30:06 +00002265 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002266 CHECK_VALID_SIZE(inputs.size(), 1);
2267
Finn Williams85d36712021-01-26 22:30:06 +00002268 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002269 CHECK_VALID_SIZE(outputs.size(), 1);
2270
2271 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002272 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002273 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002274
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002275 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2276
2277 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2278 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2279
Derek Lamberti8ddae332019-02-21 16:29:43 +00002280 RegisterInputSlots(graph, layerIndex, layer);
2281 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002282}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002283
Finn Williams85d36712021-01-26 22:30:06 +00002284void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002285{
2286 CHECK_LAYERS(graph, 0, layerIndex);
2287
Finn Williams85d36712021-01-26 22:30:06 +00002288 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002289 CHECK_VALID_SIZE(inputs.size(), 1);
2290
Finn Williams85d36712021-01-26 22:30:06 +00002291 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002292 CHECK_VALID_SIZE(outputs.size(), 1);
2293
2294 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2295 auto flatBufferPadList = flatBufferDescriptor->padList();
2296 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2297
2298 if (flatBufferPadList->Length() % 2 != 0)
2299 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002300 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2301 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002302 }
2303
2304 std::vector<std::pair<unsigned int, unsigned int>> padList;
2305 padList.reserve(flatBufferPadList->Length() / 2);
2306 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2307 {
2308 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2309 }
2310
2311 armnn::SpaceToBatchNdDescriptor descriptor;
2312 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2313 descriptor.m_BlockShape =
2314 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2315 descriptor.m_PadList = padList;
2316
2317 auto layerName = GetLayerName(graph, layerIndex);
2318 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2319
2320 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2321 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2322
2323 RegisterInputSlots(graph, layerIndex, layer);
2324 RegisterOutputSlots(graph, layerIndex, layer);
2325}
2326
Finn Williams85d36712021-01-26 22:30:06 +00002327void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002328{
2329 CHECK_LAYERS(graph, 0, layerIndex);
2330
Finn Williams85d36712021-01-26 22:30:06 +00002331 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002332 CHECK_VALID_SIZE(inputs.size(), 1);
2333
Finn Williams85d36712021-01-26 22:30:06 +00002334 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002335 CHECK_VALID_SIZE(outputs.size(), 1);
2336
2337 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2338
2339 armnn::SpaceToDepthDescriptor descriptor;
2340 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2341 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2342
2343 auto layerName = GetLayerName(graph, layerIndex);
2344 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2345
2346 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2347 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2348
2349 RegisterInputSlots(graph, layerIndex, layer);
2350 RegisterOutputSlots(graph, layerIndex, layer);
2351}
2352
Finn Williams85d36712021-01-26 22:30:06 +00002353armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2354 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002355 unsigned int layerIndex)
2356{
Jan Eilers8eb25602020-03-09 12:13:48 +00002357 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002358 armnn::NormalizationDescriptor desc;
2359
2360 switch (normalizationDescriptor->normChannelType())
2361 {
2362 case NormalizationAlgorithmChannel_Across:
2363 {
2364 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2365 break;
2366 }
2367 case NormalizationAlgorithmChannel_Within:
2368 {
2369 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2370 break;
2371 }
2372 default:
2373 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002374 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002375 }
2376 }
2377
2378 switch (normalizationDescriptor->normMethodType())
2379 {
2380 case NormalizationAlgorithmMethod_LocalBrightness:
2381 {
2382 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2383 break;
2384 }
2385 case NormalizationAlgorithmMethod_LocalContrast:
2386 {
2387 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2388 break;
2389 }
2390 default:
2391 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002392 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002393 }
2394 }
2395
2396 switch (normalizationDescriptor->dataLayout())
2397 {
2398 case DataLayout_NCHW:
2399 {
2400 desc.m_DataLayout = armnn::DataLayout::NCHW;
2401 break;
2402 }
2403 case DataLayout_NHWC:
2404 {
2405 desc.m_DataLayout = armnn::DataLayout::NHWC;
2406 break;
2407 }
2408 default:
2409 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002410 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002411 }
2412 }
2413
2414 desc.m_Alpha = normalizationDescriptor->alpha();
2415 desc.m_Beta = normalizationDescriptor->beta();
2416 desc.m_K = normalizationDescriptor->k();
2417 desc.m_NormSize = normalizationDescriptor->normSize();
2418
2419 return desc;
2420}
2421
Finn Williams85d36712021-01-26 22:30:06 +00002422void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002423{
2424 CHECK_LAYERS(graph, 0, layerIndex);
2425
2426 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2427
Finn Williams85d36712021-01-26 22:30:06 +00002428 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002429 CHECK_VALID_SIZE(inputs.size(), 1);
2430
Finn Williams85d36712021-01-26 22:30:06 +00002431 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002432 CHECK_VALID_SIZE(outputs.size(), 1);
2433
2434 auto outputInfo = ToTensorInfo(outputs[0]);
2435
2436 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2437 auto layerName = GetLayerName(graph, layerIndex);
2438
2439 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2440 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2441
2442 RegisterInputSlots(graph, layerIndex, layer);
2443 RegisterOutputSlots(graph, layerIndex, layer);
2444}
2445
Finn Williams85d36712021-01-26 22:30:06 +00002446void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002447{
2448 CHECK_LAYERS(graph, 0, layerIndex);
2449 auto inputs = GetInputs(graph, layerIndex);
2450 CHECK_LOCATION();
2451 CHECK_VALID_SIZE(inputs.size(), 1);
2452
2453 auto outputs = GetOutputs(graph, layerIndex);
2454 CHECK_VALID_SIZE(outputs.size(), 1);
2455
2456 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002457
josh minor4a3c6102020-01-06 16:40:46 -06002458 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2459 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002460 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2461 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2462
2463 RegisterInputSlots(graph, layerIndex, layer);
2464 RegisterOutputSlots(graph, layerIndex, layer);
2465}
2466
Finn Williams85d36712021-01-26 22:30:06 +00002467void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002468{
2469 CHECK_LAYERS(graph, 0, layerIndex);
2470
2471 auto inputs = GetInputs(graph, layerIndex);
2472 CHECK_VALID_SIZE(inputs.size(), 1);
2473
2474 auto outputs = GetOutputs(graph, layerIndex);
2475 CHECK_VALID_SIZE(outputs.size(), 1);
2476
2477 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2478
2479 auto fbBegin = fbDescriptor->begin();
2480 auto fbSize = fbDescriptor->size();
2481
2482 if (fbBegin->Length() != fbSize->Length())
2483 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002484 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2485 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002486 }
2487
2488 armnn::SliceDescriptor descriptor;
2489 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2490 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2491
2492 auto layerName = GetLayerName(graph, layerIndex);
2493 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2494
2495 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2496 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2497
2498 RegisterInputSlots(graph, layerIndex, layer);
2499 RegisterOutputSlots(graph, layerIndex, layer);
2500}
2501
Finn Williams85d36712021-01-26 22:30:06 +00002502void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002503{
2504 CHECK_LAYERS(graph, 0, layerIndex);
2505
Finn Williams85d36712021-01-26 22:30:06 +00002506 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002507 CHECK_VALID_SIZE(inputs.size(), 1);
2508
Finn Williams85d36712021-01-26 22:30:06 +00002509 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002510 CHECK_VALID_SIZE(outputs.size(), 1);
2511
2512 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2513
2514 auto flatBufferBegin = flatBufferDescriptor->begin();
2515 auto flatBufferEnd = flatBufferDescriptor->end();
2516 auto flatBufferStride = flatBufferDescriptor->stride();
2517
2518 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2519 flatBufferBegin->Length() == flatBufferStride->Length()))
2520 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002521 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2522 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002523 }
2524
2525 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2526 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2527 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2528
2529 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2530 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2531 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2532 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2533 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2534 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2535 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2536
2537 auto layerName = GetLayerName(graph, layerIndex);
2538 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2539
2540 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2541 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2542
2543 RegisterInputSlots(graph, layerIndex, layer);
2544 RegisterOutputSlots(graph, layerIndex, layer);
2545}
2546
Finn Williams85d36712021-01-26 22:30:06 +00002547void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002548{
2549 CHECK_LAYERS(graph, 0, layerIndex);
2550 auto inputs = GetInputs(graph, layerIndex);
2551 CHECK_LOCATION();
2552 CHECK_VALID_SIZE(inputs.size(), 2);
2553
2554 auto outputs = GetOutputs(graph, layerIndex);
2555 CHECK_VALID_SIZE(outputs.size(), 1);
2556
2557 auto layerName = GetLayerName(graph, layerIndex);
2558 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2559
2560 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2561 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2562
2563 RegisterInputSlots(graph, layerIndex, layer);
2564 RegisterOutputSlots(graph, layerIndex, layer);
2565}
2566
Finn Williams85d36712021-01-26 22:30:06 +00002567void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002568{
2569 CHECK_LAYERS(graph, 0, layerIndex);
2570
Finn Williams85d36712021-01-26 22:30:06 +00002571 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002572 CHECK_VALID_SIZE(inputs.size(), 2);
2573
Finn Williams85d36712021-01-26 22:30:06 +00002574 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002575 CHECK_VALID_SIZE(outputs.size(), 1);
2576
Teresa Charlin52664732020-06-29 16:27:03 +01002577 armnn::GatherDescriptor descriptor;
2578 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2579
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002580 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002581 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002582
2583 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002584 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2585
2586 RegisterInputSlots(graph, layerIndex, layer);
2587 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002588}
2589
Finn Williams85d36712021-01-26 22:30:06 +00002590void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002591{
2592 CHECK_LAYERS(graph, 0, layerIndex);
2593
Finn Williams85d36712021-01-26 22:30:06 +00002594 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002595 CHECK_VALID_SIZE(inputs.size(), 1);
2596
Finn Williams85d36712021-01-26 22:30:06 +00002597 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002598 CHECK_VALID_SIZE(outputs.size(), 1);
2599
2600 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2601 auto flatBufferAxis = flatBufferDescriptor->axis();
2602 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2603
2604 armnn::MeanDescriptor descriptor;
2605 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2606 descriptor.m_KeepDims = flatBufferKeepDims;
2607
2608 auto layerName = GetLayerName(graph, layerIndex);
2609 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2610
2611 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2612 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2613
2614 RegisterInputSlots(graph, layerIndex, layer);
2615 RegisterOutputSlots(graph, layerIndex, layer);
2616}
2617
Finn Williams85d36712021-01-26 22:30:06 +00002618void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00002619{
2620 CHECK_LAYERS(graph, 0, layerIndex);
2621
Finn Williams85d36712021-01-26 22:30:06 +00002622 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002623 CHECK_VALID_SIZE(inputs.size(), 1);
2624
Finn Williams85d36712021-01-26 22:30:06 +00002625 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00002626
2627 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2628 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2629 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2630 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2631 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2632 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2633
2634 // Check numViews and numDimensions corresponds to the ones already serialized ...
2635 // numViews == flatBufferViewSizes.size();
2636 // foreach: numDimensions == flatBufferViewSizes[x].size();
2637
2638 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2639 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2640 {
2641 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2642 {
2643 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2644 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2645 }
2646 }
2647
2648 auto layerName = GetLayerName(graph, layerIndex);
2649 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2650
2651 // I could have as many outputs as views ...
2652 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2653 {
2654 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2655 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2656 }
2657
2658 RegisterInputSlots(graph, layerIndex, layer);
2659 RegisterOutputSlots(graph, layerIndex, layer);
2660}
2661
Finn Williams85d36712021-01-26 22:30:06 +00002662armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00002663{
2664 armnn::LstmDescriptor desc;
2665
2666 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2667 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2668 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2669 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2670 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2671 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002672 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002673
2674 return desc;
2675}
2676
Finn Williams85d36712021-01-26 22:30:06 +00002677void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00002678{
2679 CHECK_LAYERS(graph, 0, layerIndex);
2680
2681 auto inputs = GetInputs(graph, layerIndex);
2682 CHECK_VALID_SIZE(inputs.size(), 3);
2683
2684 auto outputs = GetOutputs(graph, layerIndex);
2685 CHECK_VALID_SIZE(outputs.size(), 4);
2686
2687 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2688 auto layerName = GetLayerName(graph, layerIndex);
2689 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2690 auto flatBufferInputParams = flatBufferLayer->inputParams();
2691
2692 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2693
2694 armnn::LstmInputParams lstmInputParams;
2695
2696 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2697 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2698 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2699 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2700 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2701 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2702 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2703 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2704 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2705
2706 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2707 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2708 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2709 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2710 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2711 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2712 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2713 lstmInputParams.m_CellBias = &cellBias;
2714 lstmInputParams.m_OutputGateBias = &outputGateBias;
2715
2716 armnn::ConstTensor inputToInputWeights;
2717 armnn::ConstTensor recurrentToInputWeights;
2718 armnn::ConstTensor cellToInputWeights;
2719 armnn::ConstTensor inputGateBias;
2720 if (!lstmDescriptor.m_CifgEnabled)
2721 {
2722 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2723 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2724 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2725 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2726
2727 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2728 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2729 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2730 lstmInputParams.m_InputGateBias = &inputGateBias;
2731 }
2732
2733 armnn::ConstTensor projectionWeights;
2734 armnn::ConstTensor projectionBias;
2735 if (lstmDescriptor.m_ProjectionEnabled)
2736 {
2737 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2738 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2739
2740 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2741 lstmInputParams.m_ProjectionBias = &projectionBias;
2742 }
2743
2744 armnn::ConstTensor cellToForgetWeights;
2745 armnn::ConstTensor cellToOutputWeights;
2746 if (lstmDescriptor.m_PeepholeEnabled)
2747 {
2748 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2749 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2750
2751 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2752 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2753 }
2754
Jan Eilersf8c62972019-07-17 11:07:49 +01002755 armnn::ConstTensor inputLayerNormWeights;
2756 armnn::ConstTensor forgetLayerNormWeights;
2757 armnn::ConstTensor cellLayerNormWeights;
2758 armnn::ConstTensor outputLayerNormWeights;
2759 if (lstmDescriptor.m_LayerNormEnabled)
2760 {
2761 if (!lstmDescriptor.m_CifgEnabled)
2762 {
2763 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2764 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2765 }
2766 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2767 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2768 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2769
2770 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2771 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2772 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2773 }
2774
Jim Flynn11af3752019-03-19 17:22:29 +00002775 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2776
2777 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2778 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2779
2780 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2781 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2782
2783 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2784 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2785
2786 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2787 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2788
2789 RegisterInputSlots(graph, layerIndex, layer);
2790 RegisterOutputSlots(graph, layerIndex, layer);
2791}
2792
Finn Williams85d36712021-01-26 22:30:06 +00002793armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01002794{
2795 armnn::QLstmDescriptor desc;
2796
2797 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2798 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2799 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2800 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2801
2802 desc.m_CellClip = qLstmDescriptor->cellClip();
2803 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2804
2805 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2806 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2807 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2808 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2809
2810 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2811 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2812
2813 return desc;
2814}
2815
Finn Williams85d36712021-01-26 22:30:06 +00002816void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01002817{
2818 CHECK_LAYERS(graph, 0, layerIndex);
2819
2820 auto inputs = GetInputs(graph, layerIndex);
2821 CHECK_VALID_SIZE(inputs.size(), 3);
2822
2823 auto outputs = GetOutputs(graph, layerIndex);
2824 CHECK_VALID_SIZE(outputs.size(), 3);
2825
2826 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2827 auto layerName = GetLayerName(graph, layerIndex);
2828 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2829 auto flatBufferInputParams = flatBufferLayer->inputParams();
2830
2831 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2832 armnn::LstmInputParams qLstmInputParams;
2833
2834 // Mandatory params
2835 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2836 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2837 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2838 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2839 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2840 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2841 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2842 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2843 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2844
2845 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2846 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2847 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2848 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2849 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2850 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2851 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2852 qLstmInputParams.m_CellBias = &cellBias;
2853 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2854
2855 // Optional CIFG params
2856 armnn::ConstTensor inputToInputWeights;
2857 armnn::ConstTensor recurrentToInputWeights;
2858 armnn::ConstTensor inputGateBias;
2859
2860 if (!qLstmDescriptor.m_CifgEnabled)
2861 {
2862 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2863 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2864 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2865
2866 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2867 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2868 qLstmInputParams.m_InputGateBias = &inputGateBias;
2869 }
2870
2871 // Optional projection params
2872 armnn::ConstTensor projectionWeights;
2873 armnn::ConstTensor projectionBias;
2874
2875 if (qLstmDescriptor.m_ProjectionEnabled)
2876 {
2877 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2878 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2879
2880 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2881 qLstmInputParams.m_ProjectionBias = &projectionBias;
2882 }
2883
2884 // Optional peephole params
2885 armnn::ConstTensor cellToInputWeights;
2886 armnn::ConstTensor cellToForgetWeights;
2887 armnn::ConstTensor cellToOutputWeights;
2888
2889 if (qLstmDescriptor.m_PeepholeEnabled)
2890 {
2891 if (!qLstmDescriptor.m_CifgEnabled)
2892 {
2893 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2894 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2895 }
2896
2897 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2898 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2899
2900 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2901 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2902 }
2903
2904 // Optional layer norm params
2905 armnn::ConstTensor inputLayerNormWeights;
2906 armnn::ConstTensor forgetLayerNormWeights;
2907 armnn::ConstTensor cellLayerNormWeights;
2908 armnn::ConstTensor outputLayerNormWeights;
2909
2910 if (qLstmDescriptor.m_LayerNormEnabled)
2911 {
2912 if (!qLstmDescriptor.m_CifgEnabled)
2913 {
2914 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2915 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2916 }
2917
2918 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2919 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2920 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2921
2922 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2923 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2924 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2925 }
2926
2927 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
2928
2929 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
2930 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
2931
2932 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
2933 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
2934
2935 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
2936 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2937
2938 RegisterInputSlots(graph, layerIndex, layer);
2939 RegisterOutputSlots(graph, layerIndex, layer);
2940}
2941
Finn Williams85d36712021-01-26 22:30:06 +00002942void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01002943{
2944 CHECK_LAYERS(graph, 0, layerIndex);
2945
2946 auto inputs = GetInputs(graph, layerIndex);
2947 CHECK_VALID_SIZE(inputs.size(), 3);
2948
2949 auto outputs = GetOutputs(graph, layerIndex);
2950 CHECK_VALID_SIZE(outputs.size(), 2);
2951
2952 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2953 auto layerName = GetLayerName(graph, layerIndex);
2954 auto flatBufferInputParams = flatBufferLayer->inputParams();
2955
2956 armnn::QuantizedLstmInputParams lstmInputParams;
2957
2958 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2959 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2960 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2961 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2962 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2963 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2964 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2965 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2966 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2967 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2968 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2969 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2970
2971 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2972 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2973 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2974 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2975 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2976 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2977 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2978 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2979 lstmInputParams.m_InputGateBias = &inputGateBias;
2980 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2981 lstmInputParams.m_CellBias = &cellBias;
2982 lstmInputParams.m_OutputGateBias = &outputGateBias;
2983
2984 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2985
2986 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2987 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2988
2989 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2990 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2991
2992 RegisterInputSlots(graph, layerIndex, layer);
2993 RegisterOutputSlots(graph, layerIndex, layer);
2994}
2995
Finn Williams85d36712021-01-26 22:30:06 +00002996void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002997{
2998 CHECK_LAYERS(graph, 0, layerIndex);
2999
Finn Williams85d36712021-01-26 22:30:06 +00003000 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003001 CHECK_VALID_SIZE(inputs.size(), 1);
3002
Finn Williams85d36712021-01-26 22:30:06 +00003003 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003004 CHECK_VALID_SIZE(outputs.size(), 1);
3005
3006 const std::string layerName = GetLayerName(graph, layerIndex);
3007 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3008
3009 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3010 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3011
3012 RegisterInputSlots(graph, layerIndex, layer);
3013 RegisterOutputSlots(graph, layerIndex, layer);
3014}
3015
Finn Williams85d36712021-01-26 22:30:06 +00003016void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003017{
3018 CHECK_LAYERS(graph, 0, layerIndex);
3019
Finn Williams85d36712021-01-26 22:30:06 +00003020 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003021 CHECK_VALID_SIZE(inputs.size(), 2);
3022
Finn Williams85d36712021-01-26 22:30:06 +00003023 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003024 CHECK_VALID_SIZE(outputs.size(), 1);
3025
3026 const std::string layerName = GetLayerName(graph, layerIndex);
3027 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3028
3029 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3030 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3031
3032 RegisterInputSlots(graph, layerIndex, layer);
3033 RegisterOutputSlots(graph, layerIndex, layer);
3034}
3035
Finn Williams85d36712021-01-26 22:30:06 +00003036void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003037{
3038 CHECK_LAYERS(graph, 0, layerIndex);
3039 auto inputs = GetInputs(graph, layerIndex);
3040 CHECK_LOCATION();
3041 CHECK_VALID_SIZE(inputs.size(), 2);
3042
3043 auto outputs = GetOutputs(graph, layerIndex);
3044 CHECK_VALID_SIZE(outputs.size(), 2);
3045
3046 auto layerName = GetLayerName(graph, layerIndex);
3047 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3048
3049 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3050 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3051
3052 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3053 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3054
3055 RegisterInputSlots(graph, layerIndex, layer);
3056 RegisterOutputSlots(graph, layerIndex, layer);
3057}
3058
Finn Williams85d36712021-01-26 22:30:06 +00003059void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003060{
3061 CHECK_LAYERS(graph, 0, layerIndex);
3062 auto inputs = GetInputs(graph, layerIndex);
3063 CHECK_LOCATION();
3064 CHECK_VALID_SIZE(inputs.size(), 2);
3065
3066 auto outputs = GetOutputs(graph, layerIndex);
3067 CHECK_VALID_SIZE(outputs.size(), 1);
3068
3069 auto layerName = GetLayerName(graph, layerIndex);
3070 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3071
3072 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3073 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3074
3075 RegisterInputSlots(graph, layerIndex, layer);
3076 RegisterOutputSlots(graph, layerIndex, layer);
3077}
3078
Finn Williams85d36712021-01-26 22:30:06 +00003079void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003080{
3081 CHECK_LAYERS(graph, 0, layerIndex);
3082
3083 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3084
3085 auto inputs = GetInputs(graph, layerIndex);
3086 CHECK_VALID_SIZE(inputs.size(), 1);
3087
3088 auto outputs = GetOutputs(graph, layerIndex);
3089 CHECK_VALID_SIZE(outputs.size(), 1);
3090 auto outputInfo = ToTensorInfo(outputs[0]);
3091
3092 auto layerName = GetLayerName(graph, layerIndex);
3093 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
3094
3095 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3096 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3097
3098 RegisterInputSlots(graph, layerIndex, layer);
3099 RegisterOutputSlots(graph, layerIndex, layer);
3100}
3101
Finn Williams85d36712021-01-26 22:30:06 +00003102void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003103{
3104 CHECK_LAYERS(graph, 0, layerIndex);
3105
3106 auto inputs = GetInputs(graph, layerIndex);
3107 CHECK_VALID_SIZE(inputs.size(), 1);
3108
3109 auto outputs = GetOutputs(graph, layerIndex);
3110 CHECK_VALID_SIZE(outputs.size(), 1);
3111
3112 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3113 auto layerName = GetLayerName(graph, layerIndex);
3114 auto serializerDescriptor = serializerLayer->descriptor();
3115
3116 armnn::TransposeConvolution2dDescriptor descriptor;
3117 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3118 descriptor.m_PadRight = serializerDescriptor->padRight();
3119 descriptor.m_PadTop = serializerDescriptor->padTop();
3120 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3121 descriptor.m_StrideX = serializerDescriptor->strideX();
3122 descriptor.m_StrideY = serializerDescriptor->strideY();;
3123 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3124 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3125
3126 // weights & biases
3127 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3128 armnn::Optional<armnn::ConstTensor> optionalBiases;
3129 if (descriptor.m_BiasEnabled)
3130 {
3131 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3132 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3133 }
3134
3135 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3136 weights,
3137 optionalBiases,
3138 layerName.c_str());
3139
3140 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3141 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3142
3143 RegisterInputSlots(graph, layerIndex, layer);
3144 RegisterOutputSlots(graph, layerIndex, layer);
3145}
3146
Finn Williams85d36712021-01-26 22:30:06 +00003147void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003148{
3149 CHECK_LAYERS(graph, 0, layerIndex);
3150 auto inputs = GetInputs(graph, layerIndex);
3151
3152 auto outputs = GetOutputs(graph, layerIndex);
3153 CHECK_VALID_SIZE(outputs.size(), 1);
3154
3155 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3156 unsigned int axis = flatBufferDescriptor->axis();
3157 unsigned int numInputs = flatBufferDescriptor->numInputs();
3158 CHECK_VALID_SIZE(inputs.size(), numInputs);
3159
3160 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3161 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3162 flatBufferInputShape->begin() + flatBufferInputShape->size());
3163
3164 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3165 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3166
3167 for (unsigned int i=0; i<inputs.size(); ++i)
3168 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003169 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003170 if (descriptor.m_InputShape != inputShape)
3171 {
3172 std::stringstream ss;
3173 ss << "Shape of input "
3174 << i
3175 << " "
3176 << inputShape
3177 << " does not equal defined input shape "
3178 << descriptor.m_InputShape
3179 << ": "
3180 << CHECK_LOCATION().AsString();
3181 throw ParseException(ss.str());
3182 }
3183 }
3184
3185 auto layerName = GetLayerName(graph, layerIndex);
3186 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3187
3188 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3189 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3190
3191 RegisterInputSlots(graph, layerIndex, layer);
3192 RegisterOutputSlots(graph, layerIndex, layer);
3193}
3194
Finn Williams85d36712021-01-26 22:30:06 +00003195void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003196{
3197 CHECK_LAYERS(graph, 0, layerIndex);
3198
3199 auto inputs = GetInputs(graph, layerIndex);
3200 auto outputs = GetOutputs(graph, layerIndex);
3201
3202 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3203 auto fbDescriptor = fbLayer->descriptor();
3204
3205 armnn::StandInDescriptor descriptor;
3206 descriptor.m_NumInputs = fbDescriptor->numInputs();
3207 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3208
3209 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3210 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3211
3212 const std::string layerName = GetLayerName(graph, layerIndex);
3213 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3214
3215 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3216 {
3217 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3218 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3219 }
3220
3221 RegisterInputSlots(graph, layerIndex, layer);
3222 RegisterOutputSlots(graph, layerIndex, layer);
3223}
3224
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003225} // namespace armnnDeserializer