blob: 79cc3987d5357d754d8d283fb86dae1d5c934c28 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2017,2019-2023 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Simon Obutedb5804e2022-04-14 15:49:52 +010013#include <armnn/Logging.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000014
Matteo Martincighe011d202019-11-28 11:35:47 +000015#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000016#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010017#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000018#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010019#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000020
Kevin May43a799c2019-02-08 16:31:42 +000021#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022#include <VerificationHelpers.hpp>
23
Colm Donelan5b5c2222020-09-09 12:48:16 +010024#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Finn Williams85d36712021-01-26 22:30:06 +000038IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
39
40IDeserializer::~IDeserializer() = default;
41
42IDeserializer *IDeserializer::CreateRaw()
43{
44 return new IDeserializer();
45}
46
47IDeserializerPtr IDeserializer::Create()
48{
49 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
50}
51
52void IDeserializer::Destroy(IDeserializer *parser)
53{
54 delete parser;
55}
56
57armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
58{
59 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
60}
61
62armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
63{
64 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
65}
66
67BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
68{
69 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
70}
71
72BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
73{
74 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
75}
76
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000077namespace
78{
Kevin May43a799c2019-02-08 16:31:42 +000079
80const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
81
Finn Williams85d36712021-01-26 22:30:06 +000082 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000083 unsigned int layersIndex,
84 const CheckLocation& location)
85{
86 if (graph->layers() == nullptr)
87 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010088 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
89 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
90 "layers:{1} at {2}",
91 location.m_Function,
92 layersIndex,
93 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000094 }
95 else if (layersIndex >= graph->layers()->size())
96 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010097 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
98 location.m_Function,
99 layersIndex,
100 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000101 }
102}
103
Finn Williams85d36712021-01-26 22:30:06 +0000104void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000105 unsigned int layersIndex,
106 unsigned int layerIndex,
107 const CheckLocation& location)
108{
109 if (graph->layers() == nullptr)
110 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100111 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
112 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
113 "layers:{1} at {2}",
114 location.m_Function,
115 layersIndex,
116 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000117 }
118 else if (layersIndex >= graph->layers()->size())
119 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100120 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
121 "layers:{1} at {2}",
122 location.m_Function,
123 layersIndex,
124 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000125 }
126 else if (layerIndex >= graph->layers()[layersIndex].size()
127 && layerIndex != VIRTUAL_LAYER_ID)
128 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100129 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
130 "layers:{1} layer:{2} at {3}",
131 location.m_Function,
132 layersIndex,
133 layerIndex,
134 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000135 }
136}
137
Finn Williams85d36712021-01-26 22:30:06 +0000138void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000139 const CheckLocation& location)
140{
141 if (rawPtr == nullptr)
142 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100143 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
144 location.m_Function,
145 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000146 }
147}
148
Finn Williams85d36712021-01-26 22:30:06 +0000149void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000150 const CheckLocation& location)
151{
152 if (rawPtr == nullptr)
153 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100154 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
155 location.m_Function,
156 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000157 }
158}
159
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000160void CheckConstTensorSize(const unsigned int constTensorSize,
161 const unsigned int tensorSize,
162 const CheckLocation& location)
163{
164 if (constTensorSize != tensorSize)
165 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100166 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
167 location.m_Function,
168 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000169 }
170}
171
Kevin May43a799c2019-02-08 16:31:42 +0000172#define CHECK_TENSOR_PTR(TENSOR_PTR) \
173 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
174
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000175#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
176 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
177
Mike Kellya0766c32019-02-19 17:22:07 +0000178#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
179 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
180
Kevin May43a799c2019-02-08 16:31:42 +0000181#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
182 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
183
184#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
185 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
186}
187
Saoirse Stewart263829c2019-02-19 15:54:14 +0000188bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
189{
190 const unsigned int actualSize = actual.GetNumDimensions();
191 if (actualSize != expected.size())
192 {
193 return false;
194 }
195
196 for (unsigned int i = 0u; i < actualSize; i++)
197 {
198 if (actual[i] != static_cast<unsigned int>(expected[i]))
199 {
200 return false;
201 }
202 }
203
204 return true;
205}
206
Finn Williams85d36712021-01-26 22:30:06 +0000207IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000208: m_Network(nullptr, nullptr),
209//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000210m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000211{
212 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000213 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
214 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
215 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
216 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
Samuel Yapa04f4a12022-08-19 11:14:38 +0100217 m_ParserFunctions[Layer_BatchMatMulLayer] = &DeserializerImpl::ParseBatchMatMul;
Finn Williams85d36712021-01-26 22:30:06 +0000218 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
219 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
mathad01b392e982021-04-07 12:07:30 +0100220 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Simon Obute51f67772021-09-03 15:50:13 +0100221 m_ParserFunctions[Layer_ChannelShuffleLayer] = &DeserializerImpl::ParseChannelShuffle;
222 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
Finn Williams85d36712021-01-26 22:30:06 +0000223 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
224 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
225 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100226 m_ParserFunctions[Layer_Convolution3dLayer] = &DeserializerImpl::ParseConvolution3d;
Finn Williams85d36712021-01-26 22:30:06 +0000227 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
228 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
229 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
230 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
231 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
Mike Kelly3ec30772023-03-08 13:47:17 +0000232 m_ParserFunctions[Layer_ElementwiseBinaryLayer] = &DeserializerImpl::ParseElementwiseBinary;
Finn Williams85d36712021-01-26 22:30:06 +0000233 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
234 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
235 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
236 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
237 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
238 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100239 m_ParserFunctions[Layer_GatherNdLayer] = &DeserializerImpl::ParseGatherNd;
Finn Williams85d36712021-01-26 22:30:06 +0000240 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
241 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
242 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
243 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
244 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
245 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
246 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
247 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
248 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
249 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
250 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
251 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
252 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
253 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
254 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
255 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000256 m_ParserFunctions[Layer_Pooling3dLayer] = &DeserializerImpl::ParsePooling3d;
Finn Williams85d36712021-01-26 22:30:06 +0000257 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
258 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
259 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
260 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
261 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000262 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000263 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
264 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
265 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
266 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100267 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000268 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
269 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
270 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
271 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
272 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
273 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
274 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
275 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
276 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
277 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
278 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
279 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100280 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000281}
282
Finn Williams85d36712021-01-26 22:30:06 +0000283LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000284{
285 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
286
287 switch(layerType)
288 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100289 case Layer::Layer_AbsLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000291 case Layer::Layer_ActivationLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000293 case Layer::Layer_AdditionLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100295 case Layer::Layer_ArgMinMaxLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Samuel Yapa04f4a12022-08-19 11:14:38 +0100297 case Layer::Layer_BatchMatMulLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000299 case Layer::Layer_BatchToSpaceNdLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000301 case Layer::Layer_BatchNormalizationLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100303 case Layer::Layer_CastLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Simon Obute51f67772021-09-03 15:50:13 +0100305 case Layer::Layer_ChannelShuffleLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100307 case Layer::Layer_ComparisonLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100309 case Layer::Layer_ConcatLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000311 case Layer::Layer_ConstantLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000313 case Layer::Layer_Convolution2dLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100315 case Layer::Layer_Convolution3dLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100317 case Layer::Layer_DepthToSpaceLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000319 case Layer::Layer_DepthwiseConvolution2dLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000321 case Layer::Layer_DequantizeLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000323 case Layer::Layer_DetectionPostProcessLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000325 case Layer::Layer_DivisionLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000327 case Layer::Layer_EqualLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Mike Kelly3ec30772023-03-08 13:47:17 +0000329 case Layer::Layer_ElementwiseBinaryLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseBinaryLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000331 case Layer::Layer_ElementwiseUnaryLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000333 case Layer::Layer_FullyConnectedLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100335 case Layer::Layer_FillLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000337 case Layer::Layer_FloorLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000339 case Layer::Layer_GatherLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100341 case Layer::Layer_GatherNdLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherNdLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000343 case Layer::Layer_GreaterLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000345 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000346 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100347 case Layer::Layer_InstanceNormalizationLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000349 case Layer::Layer_L2NormalizationLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000351 case Layer::Layer_LogicalBinaryLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100353 case Layer::Layer_LogSoftmaxLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000355 case Layer::Layer_LstmLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000357 case Layer::Layer_MeanLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000359 case Layer::Layer_MinimumLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000361 case Layer::Layer_MaximumLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100363 case Layer::Layer_MergeLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000365 case Layer::Layer_MergerLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000367 case Layer::Layer_MultiplicationLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000369 case Layer::Layer_NormalizationLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000371 case Layer::Layer_OutputLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000373 case Layer::Layer_PadLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000375 case Layer::Layer_PermuteLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000377 case Layer::Layer_Pooling2dLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000379 case Layer::Layer_Pooling3dLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100381 case Layer::Layer_PreluLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100383 case Layer::Layer_QLstmLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000385 case Layer::Layer_QuantizeLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100387 case Layer::Layer_QuantizedLstmLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100389 case Layer::Layer_RankLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000391 case Layer::Layer_ReduceLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000393 case Layer::Layer_ReshapeLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000395 case Layer::Layer_ResizeBilinearLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100397 case Layer::Layer_ResizeLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000399 case Layer::Layer_RsqrtLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100401 case Layer::Layer_ShapeLayer:
402 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100403 case Layer::Layer_SliceLayer:
404 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000405 case Layer::Layer_SoftmaxLayer:
406 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000407 case Layer::Layer_SpaceToBatchNdLayer:
408 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100409 case Layer::Layer_SpaceToDepthLayer:
410 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000411 case Layer::Layer_SplitterLayer:
412 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100413 case Layer::Layer_StackLayer:
414 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100415 case Layer::Layer_StandInLayer:
416 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000417 case Layer::Layer_StridedSliceLayer:
418 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000419 case Layer::Layer_SubtractionLayer:
420 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100421 case Layer::Layer_SwitchLayer:
422 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100423 case Layer::Layer_TransposeConvolution2dLayer:
424 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000425 case Layer::Layer_TransposeLayer:
426 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100427 case Layer::Layer_UnidirectionalSequenceLstmLayer:
428 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000429 case Layer::Layer_NONE:
430 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100431 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000432 }
433}
434
Finn Williams85d36712021-01-26 22:30:06 +0000435std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000436{
437 auto layer = GetBaseLayer(graph, index);
438 assert(layer);
439 return layer->layerName()->str();
440}
441
Finn Williams85d36712021-01-26 22:30:06 +0000442int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000443{
444 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
445
446 if (layerType == Layer::Layer_InputLayer)
447 {
448 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
449 }
450 else if ( layerType == Layer::Layer_OutputLayer )
451 {
452 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
453 }
454 return 0;
455}
456
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000457armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000458{
459 switch (dataLayout)
460 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000462 return armnn::DataLayout::NHWC;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100463 case armnnSerializer::DataLayout::DataLayout_NDHWC:
464 return armnn::DataLayout::NDHWC;
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100465 case armnnSerializer::DataLayout::DataLayout_NCDHW:
466 return armnn::DataLayout::NCDHW;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000467 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000468 default:
469 return armnn::DataLayout::NCHW;
470 }
471}
472
Mike Kellyaf484012019-02-20 16:53:11 +0000473armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
474{
475 switch (function)
476 {
477 case armnnSerializer::ActivationFunction_Sigmoid:
478 return armnn::ActivationFunction::Sigmoid;
479 case armnnSerializer::ActivationFunction_TanH:
480 return armnn::ActivationFunction::TanH;
481 case armnnSerializer::ActivationFunction_Linear:
482 return armnn::ActivationFunction::Linear;
483 case armnnSerializer::ActivationFunction_ReLu:
484 return armnn::ActivationFunction::ReLu;
485 case armnnSerializer::ActivationFunction_BoundedReLu:
486 return armnn::ActivationFunction::BoundedReLu;
487 case armnnSerializer::ActivationFunction_LeakyReLu:
488 return armnn::ActivationFunction::LeakyReLu;
489 case armnnSerializer::ActivationFunction_Abs:
490 return armnn::ActivationFunction::Abs;
491 case armnnSerializer::ActivationFunction_Sqrt:
492 return armnn::ActivationFunction::Sqrt;
493 case armnnSerializer::ActivationFunction_Square:
494 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000495 case armnnSerializer::ActivationFunction_Elu:
496 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000497 case armnnSerializer::ActivationFunction_HardSwish:
498 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000499 default:
500 return armnn::ActivationFunction::Sigmoid;
501 }
502}
503
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100504armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
505{
506 switch (function)
507 {
508 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
509 return armnn::ArgMinMaxFunction::Max;
510 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
511 default:
512 return armnn::ArgMinMaxFunction::Min;
513 }
514}
515
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100516armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
517{
518 switch (operation)
519 {
520 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
521 return armnn::ComparisonOperation::Equal;
522 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
523 return armnn::ComparisonOperation::Greater;
524 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
525 return armnn::ComparisonOperation::GreaterOrEqual;
526 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
527 return armnn::ComparisonOperation::Less;
528 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
529 return armnn::ComparisonOperation::LessOrEqual;
530 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
531 default:
532 return armnn::ComparisonOperation::NotEqual;
533 }
534}
535
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000536armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
537{
538 switch (operation)
539 {
540 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
541 return armnn::ReduceOperation::Sum;
542 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
543 return armnn::ReduceOperation::Max;
544 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
545 return armnn::ReduceOperation::Mean;
546 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
547 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100548 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
549 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000550 default:
551 return armnn::ReduceOperation::Sum;
552 }
553}
554
James Conroyaba90cd2020-11-06 16:28:18 +0000555armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
556{
557 switch (operation)
558 {
559 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
560 return armnn::LogicalBinaryOperation::LogicalAnd;
561 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
562 return armnn::LogicalBinaryOperation::LogicalOr;
563 default:
564 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
565 }
566}
567
Mike Kelly3ec30772023-03-08 13:47:17 +0000568armnn::BinaryOperation ToElementwiseBinaryOperation(armnnSerializer::BinaryOperation operation)
569{
570 switch (operation)
571 {
572 case armnnSerializer::BinaryOperation::BinaryOperation_Add:
573 return armnn::BinaryOperation::Add;
574 case armnnSerializer::BinaryOperation::BinaryOperation_Div:
575 return armnn::BinaryOperation::Div;
576 case armnnSerializer::BinaryOperation::BinaryOperation_Maximum:
577 return armnn::BinaryOperation::Maximum;
578 case armnnSerializer::BinaryOperation::BinaryOperation_Minimum:
579 return armnn::BinaryOperation::Minimum;
580 case armnnSerializer::BinaryOperation::BinaryOperation_Mul:
581 return armnn::BinaryOperation::Mul;
582 case armnnSerializer::BinaryOperation::BinaryOperation_Sub:
583 return armnn::BinaryOperation::Sub;
John Mcloughlin0ec00872023-05-15 17:03:49 +0100584 case armnnSerializer::BinaryOperation::BinaryOperation_SqDiff:
585 return armnn::BinaryOperation::SqDiff;
586 case armnnSerializer::BinaryOperation::BinaryOperation_Power:
587 return armnn::BinaryOperation::Power;
Mike Kelly3ec30772023-03-08 13:47:17 +0000588 default:
589 throw armnn::InvalidArgumentException("Binary operation unknown");
590 }
591}
592
593armnn::UnaryOperation ToElementwiseUnaryOperation(armnnSerializer::UnaryOperation operation)
josh minor4a3c6102020-01-06 16:40:46 -0600594{
595 switch (operation)
596 {
597 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
598 return armnn::UnaryOperation::Abs;
Teresa Charlin93f0ad02023-03-23 15:28:02 +0000599 case armnnSerializer::UnaryOperation::UnaryOperation_Ceil:
600 return armnn::UnaryOperation::Ceil;
josh minor4a3c6102020-01-06 16:40:46 -0600601 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
602 return armnn::UnaryOperation::Rsqrt;
603 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
604 return armnn::UnaryOperation::Sqrt;
605 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
606 return armnn::UnaryOperation::Exp;
607 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
608 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000609 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
610 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100611 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
612 return armnn::UnaryOperation::Log;
613 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
614 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600615 default:
616 throw armnn::InvalidArgumentException("Unary operation unknown");
617 }
618}
619
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +0100620armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
621{
622 switch (paddingMode)
623 {
624 case armnnSerializer::PaddingMode::PaddingMode_Reflect:
625 return armnn::PaddingMode::Reflect;
626 case armnnSerializer::PaddingMode::PaddingMode_Symmetric:
627 return armnn::PaddingMode::Symmetric;
628 default:
629 return armnn::PaddingMode::Constant;
630 }
631}
632
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100633armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
634{
635 switch (method)
636 {
637 case armnnSerializer::ResizeMethod_NearestNeighbor:
638 return armnn::ResizeMethod::NearestNeighbor;
639 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000640 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100641 default:
642 return armnn::ResizeMethod::NearestNeighbor;
643 }
644}
645
Finn Williams85d36712021-01-26 22:30:06 +0000646armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000647{
648 armnn::DataType type;
649 CHECK_TENSOR_PTR(tensorPtr);
650
651 switch (tensorPtr->dataType())
652 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000653 case DataType_QAsymmS8:
654 type = armnn::DataType::QAsymmS8;
655 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000656 case DataType_QSymmS8:
657 type = armnn::DataType::QSymmS8;
658 break;
Kevin May43a799c2019-02-08 16:31:42 +0000659 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000660 case DataType_QAsymmU8:
661 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000662 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000663 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000664 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000665 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000666 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000667 case DataType_Signed32:
668 type = armnn::DataType::Signed32;
669 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100670 case DataType_Signed64:
671 type = armnn::DataType::Signed64;
672 break;
Kevin May43a799c2019-02-08 16:31:42 +0000673 case DataType_Float32:
674 type = armnn::DataType::Float32;
675 break;
676 case DataType_Float16:
677 type = armnn::DataType::Float16;
678 break;
679 case DataType_Boolean:
680 type = armnn::DataType::Boolean;
681 break;
682 default:
683 {
684 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100685 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
686 tensorPtr->dataType(),
687 EnumNameDataType(tensorPtr->dataType()),
688 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000689 }
690 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000691
Colm Donelan800b2812021-02-12 12:43:35 +0000692 float quantizationScale = tensorPtr->quantizationScale();
693 int32_t quantizationOffset = tensorPtr->quantizationOffset();
694
Finn Williams2605b232020-06-10 15:53:46 +0100695 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
696 {
Colm Donelan800b2812021-02-12 12:43:35 +0000697 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100698 type,
699 quantizationScale,
700 quantizationOffset);
701 }
Colm Donelan800b2812021-02-12 12:43:35 +0000702 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
703 {
704 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
705 type,
706 quantizationScale,
707 quantizationOffset);
708 return result;
709 }
Kevin May43a799c2019-02-08 16:31:42 +0000710
711 auto dimensions = tensorPtr->dimensions();
712 unsigned int size = dimensions->size();
713 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000714 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
715 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
716 // For backwards compatibility check if the dimensionSpecificity vector is present first.
717 // The default is to have dimensionSpecificity set to all true's anyway.
718 if (tensorPtr->dimensionSpecificity() != nullptr)
719 {
720 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
721 size = dimensionSpecificity->size();
722 for (unsigned int i = 0; i < size; ++i)
723 {
724 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
725 }
726 }
727 // Construct a TensorShape
728 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000729
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000730 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000731 if (quantizationScales)
732 {
733 unsigned int quantizationScalesSize = quantizationScales->size();
734 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
735 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000736 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000737 type,
738 scales,
739 quantizationDim);
740 return result;
741 }
742
Kevin May43a799c2019-02-08 16:31:42 +0000743 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000744 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000745 type,
746 quantizationScale,
747 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000748
Kevin May43a799c2019-02-08 16:31:42 +0000749 return result;
750}
751
Finn Williams85d36712021-01-26 22:30:06 +0000752armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000753{
754 CHECK_CONST_TENSOR_PTR(constTensorPtr);
755 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100756 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000757
758 switch (constTensorPtr->data_type())
759 {
760 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000761 {
762 auto byteData = constTensorPtr->data_as_ByteData()->data();
763 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
764 return armnn::ConstTensor(tensorInfo, byteData->data());
765 }
Mike Kellya0766c32019-02-19 17:22:07 +0000766 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000767 {
768 auto shortData = constTensorPtr->data_as_ShortData()->data();
769 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
770 return armnn::ConstTensor(tensorInfo, shortData->data());
771 }
Mike Kellya0766c32019-02-19 17:22:07 +0000772 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000773 {
774 auto intData = constTensorPtr->data_as_IntData()->data();
775 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
776 return armnn::ConstTensor(tensorInfo, intData->data());
777 }
Mike Kellya0766c32019-02-19 17:22:07 +0000778 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000779 {
780 auto longData = constTensorPtr->data_as_LongData()->data();
781 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
782 return armnn::ConstTensor(tensorInfo, longData->data());
783 }
Mike Kellya0766c32019-02-19 17:22:07 +0000784 default:
785 {
786 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100787 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
788 constTensorPtr->data_type(),
789 EnumNameConstTensorData(constTensorPtr->data_type()),
790 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000791 }
792 }
793}
794
Finn Williams85d36712021-01-26 22:30:06 +0000795TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000796{
797 CHECK_LAYERS(graphPtr, 0, layerIndex);
798 auto layer = GetBaseLayer(graphPtr, layerIndex);
799 const auto& numInputs = layer->inputSlots()->size();
800
801 TensorRawPtrVector result(numInputs);
802
803 for (unsigned int i=0; i<numInputs; ++i)
804 {
805 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
806 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
807 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
808 }
809 return result;
810}
811
Finn Williams85d36712021-01-26 22:30:06 +0000812TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000813{
814 CHECK_LAYERS(graphPtr, 0, layerIndex);
815 auto layer = GetBaseLayer(graphPtr, layerIndex);
816 const auto& numOutputs = layer->outputSlots()->size();
817
818 TensorRawPtrVector result(numOutputs);
819
820 for (unsigned int i=0; i<numOutputs; ++i)
821 {
822 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
823 }
824 return result;
825}
826
Finn Williams85d36712021-01-26 22:30:06 +0000827void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000828{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000829 CHECK_LAYERS(graph, 0, layerIndex);
830 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100831 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
832 "layerName: {1} / {2}",
833 layerIndex,
834 layerName,
835 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000836}
837
Finn Williams85d36712021-01-26 22:30:06 +0000838void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000839{
840 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000841 m_InputBindings.clear();
842 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000843}
844
Kevin May43a799c2019-02-08 16:31:42 +0000845
Finn Williams85d36712021-01-26 22:30:06 +0000846INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000847{
848 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000849 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
850 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000851}
852
Finn Williams85d36712021-01-26 22:30:06 +0000853armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000854{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000855 ResetParser();
Simon Obutedb5804e2022-04-14 15:49:52 +0100856 if (binaryContent.fail()) {
857 ARMNN_LOG(error) << (std::string("Cannot read input"));
858 throw ParseException("Unable to read Input stream data");
859 }
860 binaryContent.seekg(0, std::ios::end);
861 const std::streamoff size = binaryContent.tellg();
862 std::vector<char> content(static_cast<size_t>(size));
863 binaryContent.seekg(0);
864 binaryContent.read(content.data(), static_cast<std::streamsize>(size));
865 GraphPtr graph = LoadGraphFromBinary(reinterpret_cast<uint8_t*>(content.data()), static_cast<size_t>(size));
Derek Lamberti8ddae332019-02-21 16:29:43 +0000866 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000867}
868
Finn Williams85d36712021-01-26 22:30:06 +0000869GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000870{
871 if (binaryContent == nullptr)
872 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100873 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
874 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000875 }
876 flatbuffers::Verifier verifier(binaryContent, len);
877 if (verifier.VerifyBuffer<SerializedGraph>() == false)
878 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100879 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
880 "flatbuffers format. size:{0} {1}",
881 len,
882 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000883 }
884 return GetSerializedGraph(binaryContent);
885}
886
Finn Williams85d36712021-01-26 22:30:06 +0000887INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000888{
889 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100890 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000891 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000892 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000893 {
894 if (layer->layer_type() != Layer_InputLayer &&
895 layer->layer_type() != Layer_OutputLayer)
896 {
897 // lookup and call the parser function
898 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000899 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000900 }
901 ++layerIndex;
902 }
903
Derek Lamberti8ddae332019-02-21 16:29:43 +0000904 SetupInputLayers(graph);
905 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000906
907 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100908 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000909 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100910 Connections& connections = graphIt.second;
911 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000912 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100913 const unsigned int outputSlotIndex = outputIt.first;
914 IOutputSlot* outputSlot = outputIt.second;
915 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000916 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100917 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000918 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100919 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000920 }
Kevin May43a799c2019-02-08 16:31:42 +0000921 }
922 }
923 }
924
925 return std::move(m_Network);
926}
927
Finn Williams85d36712021-01-26 22:30:06 +0000928BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000929 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000930{
Jan Eilers8eb25602020-03-09 12:13:48 +0000931 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000932 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000933 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000934 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000935 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000936 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000937 }
938 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100939 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
940 name,
941 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000942}
943
Finn Williams85d36712021-01-26 22:30:06 +0000944BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000945 const std::string& name) const
946{
Jan Eilers8eb25602020-03-09 12:13:48 +0000947 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000948 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000949 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000950 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000951 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000952 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000953 }
954 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100955 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
956 name,
957 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000958}
959
Finn Williams85d36712021-01-26 22:30:06 +0000960unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000961{
962 for (unsigned int i = 0; i < graph->layers()->size(); i++)
963 {
964 auto layer = graph->layers()->Get(i);
965 if (layer->layer_type() == Layer::Layer_InputLayer)
966 {
967 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
968 if (layerBindingId == targetId)
969 {
970 return i;
971 }
972 }
973 }
974 throw ParseException("Input layer with given layerBindingId not found");
975}
976
Finn Williams85d36712021-01-26 22:30:06 +0000977unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000978{
979 for (unsigned int i = 0; i < graph->layers()->size(); i++)
980 {
981 auto layer = graph->layers()->Get(i);
982 if (layer->layer_type() == Layer::Layer_OutputLayer)
983 {
984 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
985 if (layerBindingId == targetId)
986 {
987 return i;
988 }
989 }
990 }
991 throw ParseException("Output layer with given layerBindingId not found");
992}
993
Finn Williams85d36712021-01-26 22:30:06 +0000994unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100995{
996 for (unsigned int i = 0; i < graph->layers()->size(); i++)
997 {
998 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
999 if (layer->index() == targetIndex)
1000 {
1001 return i;
1002 }
1003 }
1004 throw ParseException("Layer with given index not found");
1005}
1006
Finn Williams85d36712021-01-26 22:30:06 +00001007IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +00001008{
Finn Williams85d36712021-01-26 22:30:06 +00001009 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +00001010
1011 if (graph->featureVersions())
1012 {
1013 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +01001014 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +01001015 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +00001016 }
1017
1018 return versions;
1019}
1020
Finn Williams85d36712021-01-26 22:30:06 +00001021void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001022{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001023 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001024 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001025 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001026 m_InputBindings.reserve(numInputs);
1027
1028 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001029 {
Tee Jungaa920c52019-11-05 10:48:25 +00001030 unsigned int inputLayerIndex = 0xFFFFFFFF;
1031 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1032 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001033 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001034 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
1035 }
1036 else
1037 {
1038 const int inputId = graph->inputIds()->Get(i);
1039 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1040 }
1041
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001042 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001043
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001044 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1045 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001046 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +00001047
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001048 IConnectableLayer* inputLayer =
1049 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001050
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001051 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
1052 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1053 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1054
Derek Lamberti8ddae332019-02-21 16:29:43 +00001055 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001056 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001057 }
1058}
1059
Finn Williams85d36712021-01-26 22:30:06 +00001060void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001061{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001062 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001063 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001064 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001065 m_OutputBindings.reserve(numOutputs);
1066
1067 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001068 {
Tee Jungaa920c52019-11-05 10:48:25 +00001069 unsigned int outputLayerIndex = 0xFFFFFFFF;
1070 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1071 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001072 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001073 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1074 }
1075 else
1076 {
1077 const int outputId = graph->outputIds()->Get(i);
1078 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1079 }
1080
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001081 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001082
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001083 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1084 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001085 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001086
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001087 IConnectableLayer* outputLayer =
1088 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001089
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001090 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001091 unsigned int sourceLayerIndex =
1092 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001093 unsigned int outputSlotIndex =
1094 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001095 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001096 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1097 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001098 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001099 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001100 }
1101}
1102
Finn Williams85d36712021-01-26 22:30:06 +00001103void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001104 uint32_t layerIndex,
1105 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001106{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001107 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001108 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001109 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1110 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001111 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001112 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1113 " for layer index: {2} {3}",
1114 baseLayer->outputSlots()->size(),
1115 layer->GetNumOutputSlots(),
1116 layerIndex,
1117 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001118 }
1119
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001120 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001121 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001122 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1123 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1124 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1125 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001126 }
1127}
1128
Finn Williams85d36712021-01-26 22:30:06 +00001129void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001130 uint32_t layerIndex,
1131 armnn::IConnectableLayer* layer,
1132 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001133{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001134 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001135 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001136 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001137
1138 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001139 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001140 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1141 " for layer index:{2} {3}",
1142 baseLayer->inputSlots()->size(),
1143 layer->GetNumInputSlots(),
1144 layerIndex,
1145 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001146 }
1147
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001148 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001149 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001150 // Check if slot should be ignored.
1151 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1152 {
1153 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1154 auto fbConnection = fbInputSlot->connection();
1155 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1156 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1157 }
Kevin May43a799c2019-02-08 16:31:42 +00001158 }
1159}
1160
Finn Williams85d36712021-01-26 22:30:06 +00001161void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001162 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001163 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001164{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001165 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001166 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001167 m_GraphConnections[sourceLayerIndex] = Connections();
1168 }
1169
1170 Connections& connections = m_GraphConnections[sourceLayerIndex];
1171 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1172 {
1173 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001174 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001175 else
1176 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001177 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001178 }
1179}
Kevin May43a799c2019-02-08 16:31:42 +00001180
Finn Williams85d36712021-01-26 22:30:06 +00001181void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001182 uint32_t outputSlotIndex,
1183 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001184{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001185 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1186 {
1187 m_GraphConnections[sourceLayerIndex] = Connections();
1188 }
1189
1190 Connections& connections = m_GraphConnections[sourceLayerIndex];
1191 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1192 {
1193 throw ParseException("Same output slot index processed twice");
1194 }
1195
1196 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001197}
1198
Finn Williams85d36712021-01-26 22:30:06 +00001199void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001200{
1201 CHECK_LAYERS(graph, 0, layerIndex);
1202 auto inputs = GetInputs(graph, layerIndex);
1203 CHECK_LOCATION();
1204 CHECK_VALID_SIZE(inputs.size(), 1);
1205
1206 auto outputs = GetOutputs(graph, layerIndex);
1207 CHECK_VALID_SIZE(outputs.size(), 1);
1208
1209 auto layerName = GetLayerName(graph, layerIndex);
1210
josh minor4a3c6102020-01-06 16:40:46 -06001211 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1212 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001213 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1214 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1215
1216 RegisterInputSlots(graph, layerIndex, layer);
1217 RegisterOutputSlots(graph, layerIndex, layer);
1218}
1219
Finn Williams85d36712021-01-26 22:30:06 +00001220void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001221{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001222 CHECK_LAYERS(graph, 0, layerIndex);
1223 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001224 CHECK_LOCATION();
1225 CHECK_VALID_SIZE(inputs.size(), 1);
1226
Derek Lamberti8ddae332019-02-21 16:29:43 +00001227 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001228 CHECK_VALID_SIZE(outputs.size(), 1);
1229
Derek Lamberti8ddae332019-02-21 16:29:43 +00001230 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001231 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001232 auto serializerDescriptor = serializerLayer->descriptor();
1233
1234 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001235 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001236 descriptor.m_A = serializerDescriptor->a();
1237 descriptor.m_B = serializerDescriptor->b();
1238
1239 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1240 layerName.c_str());
1241 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1242 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1243
Derek Lamberti8ddae332019-02-21 16:29:43 +00001244 RegisterInputSlots(graph, layerIndex, layer);
1245 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001246}
1247
Finn Williams85d36712021-01-26 22:30:06 +00001248void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001249{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001250 CHECK_LAYERS(graph, 0, layerIndex);
1251 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001252 CHECK_LOCATION();
1253 CHECK_VALID_SIZE(inputs.size(), 2);
1254
Derek Lamberti8ddae332019-02-21 16:29:43 +00001255 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001256 CHECK_VALID_SIZE(outputs.size(), 1);
1257
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001258 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001259 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Add);
1260 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001261
1262 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1263 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1264
Derek Lamberti8ddae332019-02-21 16:29:43 +00001265 RegisterInputSlots(graph, layerIndex, layer);
1266 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001267}
1268
Finn Williams85d36712021-01-26 22:30:06 +00001269void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001270{
1271 CHECK_LAYERS(graph, 0, layerIndex);
1272 auto inputs = GetInputs(graph, layerIndex);
1273 CHECK_LOCATION();
1274 CHECK_VALID_SIZE(inputs.size(), 1);
1275
1276 auto outputs = GetOutputs(graph, layerIndex);
1277 CHECK_VALID_SIZE(outputs.size(), 1);
1278
1279 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1280 auto serializerDescriptor = serializerLayer->descriptor();
1281
1282 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001283 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001284 descriptor.m_Axis = serializerDescriptor->axis();
1285 auto layerName = GetLayerName(graph, layerIndex);
1286 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1287
1288 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1289 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1290
1291 RegisterInputSlots(graph, layerIndex, layer);
1292 RegisterOutputSlots(graph, layerIndex, layer);
1293}
1294
Samuel Yapa04f4a12022-08-19 11:14:38 +01001295void IDeserializer::DeserializerImpl::ParseBatchMatMul(GraphPtr graph, unsigned int layerIndex)
1296{
1297 CHECK_LAYERS(graph, 0, layerIndex);
1298
1299 auto inputs = GetInputs(graph, layerIndex);
1300 CHECK_LOCATION();
1301 CHECK_VALID_SIZE(inputs.size(), 2);
1302
1303 auto outputs = GetOutputs(graph, layerIndex);
1304 CHECK_VALID_SIZE(outputs.size(), 1);
1305
1306 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer();
1307 auto serializerDescriptor = serializerLayer->descriptor();
1308
1309 armnn::BatchMatMulDescriptor descriptor(serializerDescriptor->transposeX(),
1310 serializerDescriptor->transposeY(),
1311 serializerDescriptor->adjointX(),
1312 serializerDescriptor->adjointY(),
1313 ToDataLayout(serializerDescriptor->dataLayoutX()),
1314 ToDataLayout(serializerDescriptor->dataLayoutY()));
1315
1316 auto layerName = GetLayerName(graph, layerIndex);
1317 IConnectableLayer* layer = m_Network->AddBatchMatMulLayer(descriptor, layerName.c_str());
1318
1319 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1320 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1321
1322 RegisterInputSlots(graph, layerIndex, layer);
1323 RegisterOutputSlots(graph, layerIndex, layer);
1324}
1325
Finn Williams85d36712021-01-26 22:30:06 +00001326void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001327{
1328 CHECK_LAYERS(graph, 0, layerIndex);
1329
Finn Williams85d36712021-01-26 22:30:06 +00001330 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001331 CHECK_VALID_SIZE(inputs.size(), 1);
1332
Finn Williams85d36712021-01-26 22:30:06 +00001333 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001334 CHECK_VALID_SIZE(outputs.size(), 1);
1335
1336 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1337 auto flatBufferCrops = flatBufferDescriptor->crops();
1338 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1339
Mike Kelly51b8c312022-05-24 11:34:02 +01001340 if (flatBufferCrops->size() % 2 != 0)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001341 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001342 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001343 }
1344
1345 std::vector<std::pair<unsigned int, unsigned int>> crops;
Mike Kelly51b8c312022-05-24 11:34:02 +01001346 crops.reserve(flatBufferCrops->size() / 2);
1347 for (unsigned int i = 0; i < flatBufferCrops->size() - 1; i += 2)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001348 {
1349 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1350 }
1351
1352 armnn::BatchToSpaceNdDescriptor descriptor;
1353 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1354 descriptor.m_BlockShape =
1355 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1356 descriptor.m_Crops = crops;
1357
1358 auto layerName = GetLayerName(graph, layerIndex);
1359 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1360
1361 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1362 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1363
1364 RegisterInputSlots(graph, layerIndex, layer);
1365 RegisterOutputSlots(graph, layerIndex, layer);
1366}
1367
Finn Williams85d36712021-01-26 22:30:06 +00001368void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001369{
1370 CHECK_LAYERS(graph, 0, layerIndex);
1371
1372 auto inputs = GetInputs(graph, layerIndex);
1373 CHECK_VALID_SIZE(inputs.size(), 1);
1374
1375 auto outputs = GetOutputs(graph, layerIndex);
1376 CHECK_VALID_SIZE(outputs.size(), 1);
1377 auto outputInfo = ToTensorInfo(outputs[0]);
1378
ruoyan015c7ab052019-03-04 14:48:02 +00001379 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001380
1381 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1382 auto serializerDescriptor = serializerLayer->descriptor();
1383
1384 armnn::BatchNormalizationDescriptor descriptor;
1385 descriptor.m_Eps = serializerDescriptor->eps();
1386 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1387
1388 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1389 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1390 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1391 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1392
1393 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1394 mean,
1395 variance,
1396 beta,
1397 gamma,
1398 layerName.c_str());
1399 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1400
1401 RegisterInputSlots(graph, layerIndex, layer);
1402 RegisterOutputSlots(graph, layerIndex, layer);
1403}
1404
mathad01b392e982021-04-07 12:07:30 +01001405void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1406{
1407 CHECK_LAYERS(graph, 0, layerIndex);
1408 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1409 CHECK_LOCATION();
1410 CHECK_VALID_SIZE(inputs.size(), 1);
1411
1412 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1413 CHECK_VALID_SIZE(outputs.size(), 1);
1414
1415 auto layerName = GetLayerName(graph, layerIndex);
1416
1417 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1418
1419 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1420 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1421
1422 RegisterInputSlots(graph, layerIndex, layer);
1423 RegisterOutputSlots(graph, layerIndex, layer);
1424}
1425
Finn Williams85d36712021-01-26 22:30:06 +00001426void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001427{
1428 CHECK_LAYERS(graph, 0, layerIndex);
1429 CHECK_LOCATION();
1430
1431 auto outputs = GetOutputs(graph, layerIndex);
1432 CHECK_VALID_SIZE(outputs.size(), 1);
1433
1434 auto layerName = GetLayerName(graph, layerIndex);
1435
1436 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1437 auto serializerInput = serializerLayer->input();
1438
1439 armnn::ConstTensor input = ToConstTensor(serializerInput);
Cathal Corbett06902652022-04-14 17:55:11 +01001440 IConnectableLayer* layer;
Conor Kennedy76277882019-02-26 08:29:54 +00001441
Cathal Corbett06902652022-04-14 17:55:11 +01001442 // Required for when Constant Layer is used as an inputs to DepthwiseConvolution2d Layer.
1443 // Running a model that was created before weights layout scheme version was added to our flatbuffers
1444 // file ensuring older models can still be read and executed. featureVersion weights layout scheme 1
1445 // indicates a change in the depthwise weights layout within ArmNN from [M,I,H,W] --> [1,H,W,I*M]
1446 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1447 {
1448 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1449 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1450 PermutationVector permutationVector = { 3, 2, 0, 1 };
1451 armnn::TensorInfo weightsInfo = input.GetInfo();
1452 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1453 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1454 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1455 input.GetMemoryArea(), permuteBuffer.get(),
1456 GetDataTypeSize(weightsInfo.GetDataType()));
Conor Kennedy76277882019-02-26 08:29:54 +00001457
Cathal Corbett06902652022-04-14 17:55:11 +01001458 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1459 auto weightsShape = weightsInfo.GetShape();
1460 weightsInfo.SetShape({1,
1461 weightsShape[0],
1462 weightsShape[1],
1463 weightsShape[2]*weightsShape[3]});
Sadik Armagan361679d2022-08-02 09:17:23 +01001464 weightsInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001465
1466 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1467
1468 layer = m_Network->AddConstantLayer(weightsPermuted, layerName.c_str());
1469
1470 layer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1471
1472 RegisterOutputSlots(graph, layerIndex, layer);
1473
1474 return;
1475 }
1476 else
1477 {
1478 layer = m_Network->AddConstantLayer(input, layerName.c_str());
1479
1480 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Sadik Armagan361679d2022-08-02 09:17:23 +01001481 outputTensorInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001482 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1483 }
Conor Kennedy76277882019-02-26 08:29:54 +00001484
1485 RegisterOutputSlots(graph, layerIndex, layer);
1486}
1487
Finn Williams85d36712021-01-26 22:30:06 +00001488void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001489{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001490 CHECK_LAYERS(graph, 0, layerIndex);
1491 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001492 CHECK_LOCATION();
Mike Kellya0766c32019-02-19 17:22:07 +00001493
Derek Lamberti8ddae332019-02-21 16:29:43 +00001494 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001495 CHECK_VALID_SIZE(outputs.size(), 1);
1496
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001497 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1498
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001499 auto layerName = GetLayerName(graph, layerIndex);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001500 auto flatbufferDescriptor = flatBufferLayer->descriptor();
Mike Kellya0766c32019-02-19 17:22:07 +00001501
1502 armnn::Convolution2dDescriptor descriptor;
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001503 descriptor.m_PadLeft = flatbufferDescriptor->padLeft();
1504 descriptor.m_PadRight = flatbufferDescriptor->padRight();
1505 descriptor.m_PadTop = flatbufferDescriptor->padTop();
1506 descriptor.m_PadBottom = flatbufferDescriptor->padBottom();
1507 descriptor.m_StrideX = flatbufferDescriptor->strideX();
1508 descriptor.m_StrideY = flatbufferDescriptor->strideY();;
1509 descriptor.m_DilationX = flatbufferDescriptor->dilationX();
1510 descriptor.m_DilationY = flatbufferDescriptor->dilationY();;
1511 descriptor.m_BiasEnabled = flatbufferDescriptor->biasEnabled();;
1512 descriptor.m_DataLayout = ToDataLayout(flatbufferDescriptor->dataLayout());
Mike Kellya0766c32019-02-19 17:22:07 +00001513
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001514 armnn::IConnectableLayer* layer;
1515 std::vector<unsigned int> ignoreSlots {};
Mike Kellya0766c32019-02-19 17:22:07 +00001516
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001517 armnn::ConstTensor biasTensor;
1518 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1519 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1520 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Mike Kellya0766c32019-02-19 17:22:07 +00001521 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001522 // If the model stores weights and biases as members of the layer we have to read them from there
1523 // but add them to their own ConstantLayer for compatibility
1524 CHECK_VALID_SIZE(inputs.size(), 1);
1525
1526 layer = m_Network->AddConvolution2dLayer(descriptor,
1527 layerName.c_str());
1528
1529 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1530 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1531 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1532 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
1533 ignoreSlots.emplace_back(1u);
1534
1535 if (descriptor.m_BiasEnabled)
1536 {
1537 biasTensor = ToConstTensor(flatBufferLayer->biases());
1538 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1539 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1540 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
1541 ignoreSlots.emplace_back(2u);
1542 }
Mike Kellya0766c32019-02-19 17:22:07 +00001543 }
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001544 else
1545 {
1546 layer = m_Network->AddConvolution2dLayer(descriptor,
1547 layerName.c_str());
1548 uint32_t numInputs = descriptor.GetNumInputs();
1549 CHECK_VALID_SIZE(inputs.size(), numInputs);
1550 }
1551
Mike Kellya0766c32019-02-19 17:22:07 +00001552 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1553 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1554
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001555 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001556 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001557}
1558
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001559void IDeserializer::DeserializerImpl::ParseConvolution3d(GraphPtr graph, unsigned int layerIndex)
1560{
1561 CHECK_LAYERS(graph, 0, layerIndex);
1562 auto inputs = GetInputs(graph, layerIndex);
1563 CHECK_LOCATION();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001564
1565 auto outputs = GetOutputs(graph, layerIndex);
1566 CHECK_VALID_SIZE(outputs.size(), 1);
1567
1568 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1569 auto layerName = GetLayerName(graph, layerIndex);
1570 auto serializerDescriptor = serializerLayer->descriptor();
1571
1572 armnn::Convolution3dDescriptor descriptor;
1573 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1574 descriptor.m_PadRight = serializerDescriptor->padRight();
1575 descriptor.m_PadTop = serializerDescriptor->padTop();
1576 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1577 descriptor.m_PadFront = serializerDescriptor->padFront();
1578 descriptor.m_PadBack = serializerDescriptor->padBack();
1579 descriptor.m_StrideX = serializerDescriptor->strideX();
1580 descriptor.m_StrideY = serializerDescriptor->strideY();
1581 descriptor.m_StrideZ = serializerDescriptor->strideZ();
1582 descriptor.m_DilationX = serializerDescriptor->dilationX();
1583 descriptor.m_DilationY = serializerDescriptor->dilationY();
1584 descriptor.m_DilationZ = serializerDescriptor->dilationZ();
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001585 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001586 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1587
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001588 uint32_t numInputs = descriptor.GetNumInputs();
1589 CHECK_VALID_SIZE(inputs.size(), numInputs);
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001590
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001591 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1592
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001593 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1594 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1595
1596 RegisterInputSlots(graph, layerIndex, layer);
1597 RegisterOutputSlots(graph, layerIndex, layer);
1598}
1599
Finn Williams85d36712021-01-26 22:30:06 +00001600void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001601{
1602 CHECK_LAYERS(graph, 0, layerIndex);
1603
1604 auto inputs = GetInputs(graph, layerIndex);
1605 CHECK_VALID_SIZE(inputs.size(), 1);
1606
1607 auto outputs = GetOutputs(graph, layerIndex);
1608 CHECK_VALID_SIZE(outputs.size(), 1);
1609
1610 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1611
1612 armnn::DepthToSpaceDescriptor descriptor;
1613 descriptor.m_BlockSize = fbDescriptor->blockSize();
1614 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1615
1616 auto layerName = GetLayerName(graph, layerIndex);
1617 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1618
1619 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1620 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1621
1622 RegisterInputSlots(graph, layerIndex, layer);
1623 RegisterOutputSlots(graph, layerIndex, layer);
1624}
1625
Finn Williams85d36712021-01-26 22:30:06 +00001626void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001627{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001628 CHECK_LAYERS(graph, 0, layerIndex);
1629 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001630 CHECK_LOCATION();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001631
Derek Lamberti8ddae332019-02-21 16:29:43 +00001632 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001633 CHECK_VALID_SIZE(outputs.size(), 1);
1634
Derek Lamberti8ddae332019-02-21 16:29:43 +00001635 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001636 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001637 auto serializerDescriptor = serializerLayer->descriptor();
1638
1639 armnn::DepthwiseConvolution2dDescriptor descriptor;
Cathal Corbett06902652022-04-14 17:55:11 +01001640 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1641 descriptor.m_PadRight = serializerDescriptor->padRight();
1642 descriptor.m_PadTop = serializerDescriptor->padTop();
1643 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1644 descriptor.m_StrideX = serializerDescriptor->strideX();
1645 descriptor.m_StrideY = serializerDescriptor->strideY();
1646 descriptor.m_DilationX = serializerDescriptor->dilationX();
1647 descriptor.m_DilationY = serializerDescriptor->dilationY();
1648 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
1649 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001650
Jan Eilers53ef7952021-06-02 12:01:25 +01001651 IConnectableLayer* layer;
Cathal Corbett06902652022-04-14 17:55:11 +01001652 std::vector<unsigned int> ignoreSlots {};
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001653
Cathal Corbett06902652022-04-14 17:55:11 +01001654 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1655 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1656 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001657 {
Cathal Corbett06902652022-04-14 17:55:11 +01001658 CHECK_VALID_SIZE(inputs.size(), 1);
Jan Eilers53ef7952021-06-02 12:01:25 +01001659
Cathal Corbett06902652022-04-14 17:55:11 +01001660 // If the model stores weights and biases as members of the layer we have to read them from there
1661 // but add them to their own ConstantLayer for compatibility
1662 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1663 ignoreSlots.emplace_back(1u);
Jan Eilers53ef7952021-06-02 12:01:25 +01001664
1665 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001666 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001667
1668 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1669 if (descriptor.m_BiasEnabled)
1670 {
1671 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
1672 ignoreSlots.emplace_back(2u);
1673
1674 auto biasLayer = m_Network->AddConstantLayer(biases);
1675 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1676 biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
1677 }
1678
1679 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1680 {
1681 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1682 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1683 PermutationVector permutationVector = { 3, 2, 0, 1 };
1684 armnn::TensorInfo weightsInfo = weights.GetInfo();
1685 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1686 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1687 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1688 weights.GetMemoryArea(), permuteBuffer.get(),
1689 GetDataTypeSize(weightsInfo.GetDataType()));
1690
1691 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1692 auto weightsShape = weightsInfo.GetShape();
1693 weightsInfo.SetShape({1,
1694 weightsShape[0],
1695 weightsShape[1],
1696 weightsShape[2]*weightsShape[3]});
1697
1698 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1699
1700 auto weightsLayer = m_Network->AddConstantLayer(weightsPermuted);
1701 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1702 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1703 }
1704 else
1705 {
1706 auto weightsLayer = m_Network->AddConstantLayer(weights);
1707 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1708 weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
1709 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001710 }
1711 else
1712 {
1713 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001714 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001715 uint32_t numInputs = descriptor.GetNumInputs();
1716 CHECK_VALID_SIZE(inputs.size(), numInputs);
Jan Eilers53ef7952021-06-02 12:01:25 +01001717 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001718
1719 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1720 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1721
Cathal Corbett06902652022-04-14 17:55:11 +01001722 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001723 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001724}
1725
Finn Williams85d36712021-01-26 22:30:06 +00001726void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001727{
1728 CHECK_LAYERS(graph, 0, layerIndex);
1729 auto inputs = GetInputs(graph, layerIndex);
1730 CHECK_LOCATION();
1731 CHECK_VALID_SIZE(inputs.size(), 2);
1732
1733 auto outputs = GetOutputs(graph, layerIndex);
1734 CHECK_VALID_SIZE(outputs.size(), 4);
1735
1736 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1737 auto layerName = GetLayerName(graph, layerIndex);
1738 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1739
1740 armnn::DetectionPostProcessDescriptor descriptor;
1741 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1742 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1743 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1744 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1745 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1746 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1747 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1748 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1749 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1750 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1751 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1752
1753 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1754
1755 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1756 anchors,
1757 layerName.c_str());
1758
1759 for (unsigned int i = 0; i < 4; i++)
1760 {
1761 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1762 }
1763
1764 RegisterInputSlots(graph, layerIndex, layer);
1765 RegisterOutputSlots(graph, layerIndex, layer);
1766}
1767
Finn Williams85d36712021-01-26 22:30:06 +00001768void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001769{
1770 CHECK_LAYERS(graph, 0, layerIndex);
1771 auto inputs = GetInputs(graph, layerIndex);
1772 CHECK_LOCATION();
1773 CHECK_VALID_SIZE(inputs.size(), 2);
1774
1775 auto outputs = GetOutputs(graph, layerIndex);
1776 CHECK_VALID_SIZE(outputs.size(), 1);
1777
1778 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001779 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Div);
1780 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001781
1782 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1783 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1784
1785 RegisterInputSlots(graph, layerIndex, layer);
1786 RegisterOutputSlots(graph, layerIndex, layer);
1787}
1788
Finn Williams85d36712021-01-26 22:30:06 +00001789void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001790{
1791 CHECK_LAYERS(graph, 0, layerIndex);
1792 auto inputs = GetInputs(graph, layerIndex);
1793 CHECK_LOCATION();
1794 CHECK_VALID_SIZE(inputs.size(), 2);
1795
1796 auto outputs = GetOutputs(graph, layerIndex);
1797 CHECK_VALID_SIZE(outputs.size(), 1);
1798
1799 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001800 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1801 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001802
1803 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1804 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1805
1806 RegisterInputSlots(graph, layerIndex, layer);
1807 RegisterOutputSlots(graph, layerIndex, layer);
1808}
1809
Finn Williams85d36712021-01-26 22:30:06 +00001810void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001811{
1812 CHECK_LAYERS(graph, 0, layerIndex);
1813 auto inputs = GetInputs(graph, layerIndex);
1814 CHECK_LOCATION();
1815 CHECK_VALID_SIZE(inputs.size(), 1);
1816
1817 auto outputs = GetOutputs(graph, layerIndex);
1818 CHECK_VALID_SIZE(outputs.size(), 1);
1819
1820 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armaganfd0cae32021-11-08 17:18:31 +00001821 armnn::FillDescriptor descriptor;
1822 descriptor.m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
Keith Davis300ad562020-06-04 16:34:23 +01001823 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1824
1825 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1826 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1827
1828 RegisterInputSlots(graph, layerIndex, layer);
1829 RegisterOutputSlots(graph, layerIndex, layer);
1830}
1831
Finn Williams85d36712021-01-26 22:30:06 +00001832void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001833{
1834 CHECK_LAYERS(graph, 0, layerIndex);
1835 auto inputs = GetInputs(graph, layerIndex);
1836 CHECK_LOCATION();
1837 CHECK_VALID_SIZE(inputs.size(), 2);
1838
1839 auto outputs = GetOutputs(graph, layerIndex);
1840 CHECK_VALID_SIZE(outputs.size(), 1);
1841
1842 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001843 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1844 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001845
1846 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1847 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1848
1849 RegisterInputSlots(graph, layerIndex, layer);
1850 RegisterOutputSlots(graph, layerIndex, layer);
1851}
1852
Finn Williams85d36712021-01-26 22:30:06 +00001853void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001854{
1855 CHECK_LAYERS(graph, 0, layerIndex);
1856
1857 auto inputs = GetInputs(graph, layerIndex);
1858 CHECK_VALID_SIZE(inputs.size(), 1);
1859
1860 auto outputs = GetOutputs(graph, layerIndex);
1861 CHECK_VALID_SIZE(outputs.size(), 1);
1862
1863 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1864 auto fbDescriptor = fbLayer->descriptor();
1865
1866 armnn::InstanceNormalizationDescriptor descriptor;
1867 descriptor.m_Gamma = fbDescriptor->gamma();
1868 descriptor.m_Beta = fbDescriptor->beta();
1869 descriptor.m_Eps = fbDescriptor->eps();
1870 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1871
1872 const std::string layerName = GetLayerName(graph, layerIndex);
1873 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1874
1875 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1876 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1877
1878 RegisterInputSlots(graph, layerIndex, layer);
1879 RegisterOutputSlots(graph, layerIndex, layer);
1880}
1881
Finn Williams85d36712021-01-26 22:30:06 +00001882void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001883{
1884 CHECK_LAYERS(graph, 0, layerIndex);
1885
1886 auto inputs = GetInputs(graph, layerIndex);
1887 CHECK_VALID_SIZE(inputs.size(), 1);
1888
1889 auto outputs = GetOutputs(graph, layerIndex);
1890 CHECK_VALID_SIZE(outputs.size(), 1);
1891 auto outputInfo = ToTensorInfo(outputs[0]);
1892
1893 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1894 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1895
1896 auto layerName = GetLayerName(graph, layerIndex);
1897 armnn::L2NormalizationDescriptor descriptor;
1898 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001899 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001900
1901 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1902 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1903
1904 RegisterInputSlots(graph, layerIndex, layer);
1905 RegisterOutputSlots(graph, layerIndex, layer);
1906}
1907
Finn Williams85d36712021-01-26 22:30:06 +00001908void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001909{
1910 CHECK_LAYERS(graph, 0, layerIndex);
1911 CHECK_LOCATION();
1912
1913 auto inputs = GetInputs(graph, layerIndex);
1914 CHECK_VALID_SIZE(inputs.size(), 2);
1915
1916 auto outputs = GetOutputs(graph, layerIndex);
1917 CHECK_VALID_SIZE(outputs.size(), 1);
1918
1919 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1920 auto fbDescriptor = fbLayer->descriptor();
1921
1922 armnn::LogicalBinaryDescriptor descriptor;
1923 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1924
1925 const std::string& layerName = GetLayerName(graph, layerIndex);
1926 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1927
1928 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1929 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1930
1931 RegisterInputSlots(graph, layerIndex, layer);
1932 RegisterOutputSlots(graph, layerIndex, layer);
1933}
1934
Finn Williams85d36712021-01-26 22:30:06 +00001935void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001936{
1937 CHECK_LAYERS(graph, 0, layerIndex);
1938
Finn Williams85d36712021-01-26 22:30:06 +00001939 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001940 CHECK_VALID_SIZE(inputs.size(), 1);
1941
Finn Williams85d36712021-01-26 22:30:06 +00001942 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001943 CHECK_VALID_SIZE(outputs.size(), 1);
1944
1945 armnn::LogSoftmaxDescriptor descriptor;
1946 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1947 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1948 auto layerName = GetLayerName(graph, layerIndex);
1949
1950 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1951
1952 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1953 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1954
1955 RegisterInputSlots(graph, layerIndex, layer);
1956 RegisterOutputSlots(graph, layerIndex, layer);
1957}
1958
Finn Williams85d36712021-01-26 22:30:06 +00001959void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001960{
1961 CHECK_LAYERS(graph, 0, layerIndex);
1962 auto inputs = GetInputs(graph, layerIndex);
1963 CHECK_LOCATION();
1964 CHECK_VALID_SIZE(inputs.size(), 2);
1965
1966 auto outputs = GetOutputs(graph, layerIndex);
1967 CHECK_VALID_SIZE(outputs.size(), 1);
1968
1969 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001970 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Minimum);
1971 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001972
1973 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1974 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1975
1976 RegisterInputSlots(graph, layerIndex, layer);
1977 RegisterOutputSlots(graph, layerIndex, layer);
1978}
1979
Finn Williams85d36712021-01-26 22:30:06 +00001980void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001981{
1982 CHECK_LAYERS(graph, 0, layerIndex);
1983 auto inputs = GetInputs(graph, layerIndex);
1984 CHECK_LOCATION();
1985 CHECK_VALID_SIZE(inputs.size(), 2);
1986
1987 auto outputs = GetOutputs(graph, layerIndex);
1988 CHECK_VALID_SIZE(outputs.size(), 1);
1989
1990 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001991 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Maximum);
1992 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001993
1994 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1995 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1996
1997 RegisterInputSlots(graph, layerIndex, layer);
1998 RegisterOutputSlots(graph, layerIndex, layer);
1999}
2000
Jim Flynne242f2d2019-05-22 14:24:13 +01002001const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
2002 unsigned int layerIndex)
2003{
2004 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
2005
2006 switch (layerType)
2007 {
2008 case Layer::Layer_ConcatLayer:
2009 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
2010 case Layer::Layer_MergerLayer:
2011 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
2012 default:
2013 throw armnn::Exception("unknown layer type, should be concat or merger");
2014 }
2015}
Simon Obute51f67772021-09-03 15:50:13 +01002016void IDeserializer::DeserializerImpl::ParseChannelShuffle(GraphPtr graph, unsigned int layerIndex)
2017{
2018 CHECK_LAYERS(graph, 0, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002019
Simon Obute51f67772021-09-03 15:50:13 +01002020 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2021 CHECK_VALID_SIZE(inputs.size(), 1);
2022
2023 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2024 CHECK_VALID_SIZE(outputs.size(), 1);
2025
2026 armnn::ChannelShuffleDescriptor descriptor;
2027 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
2028 descriptor.m_NumGroups =
2029 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
2030
2031 auto layerName = GetLayerName(graph, layerIndex);
2032 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
2033
2034 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2035 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2036
2037 RegisterInputSlots(graph, layerIndex, layer);
2038 RegisterOutputSlots(graph, layerIndex, layer);
2039}
Finn Williams85d36712021-01-26 22:30:06 +00002040void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01002041{
2042 CHECK_LAYERS(graph, 0, layerIndex);
2043 CHECK_LOCATION();
2044
2045 auto inputs = GetInputs(graph, layerIndex);
2046 CHECK_VALID_SIZE(inputs.size(), 2);
2047
2048 auto outputs = GetOutputs(graph, layerIndex);
2049 CHECK_VALID_SIZE(outputs.size(), 1);
2050
2051 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
2052 auto fbDescriptor = fbLayer->descriptor();
2053
2054 armnn::ComparisonDescriptor descriptor;
2055 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
2056
2057 const std::string& layerName = GetLayerName(graph, layerIndex);
2058 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
2059
2060 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2061 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2062
2063 RegisterInputSlots(graph, layerIndex, layer);
2064 RegisterOutputSlots(graph, layerIndex, layer);
2065}
2066
Mike Kelly3ec30772023-03-08 13:47:17 +00002067void IDeserializer::DeserializerImpl::ParseElementwiseBinary(GraphPtr graph, unsigned int layerIndex)
2068{
2069 CHECK_LAYERS(graph, 0, layerIndex);
2070 CHECK_LOCATION();
2071
2072 auto inputs = GetInputs(graph, layerIndex);
2073 CHECK_VALID_SIZE(inputs.size(), 2);
2074
2075 auto outputs = GetOutputs(graph, layerIndex);
2076 CHECK_VALID_SIZE(outputs.size(), 1);
2077
2078 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseBinaryLayer();
2079 auto fbDescriptor = fbLayer->descriptor();
2080
2081 armnn::ElementwiseBinaryDescriptor descriptor;
2082 descriptor.m_Operation = ToElementwiseBinaryOperation(fbDescriptor->operation());
2083
2084 const std::string& layerName = GetLayerName(graph, layerIndex);
2085 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
2086
2087 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2088 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2089
2090 RegisterInputSlots(graph, layerIndex, layer);
2091 RegisterOutputSlots(graph, layerIndex, layer);
2092}
2093
Finn Williams85d36712021-01-26 22:30:06 +00002094void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06002095{
2096 CHECK_LAYERS(graph, 0, layerIndex);
2097 CHECK_LOCATION();
2098
2099 auto inputs = GetInputs(graph, layerIndex);
2100 CHECK_VALID_SIZE(inputs.size(), 1);
2101
2102 auto outputs = GetOutputs(graph, layerIndex);
2103 CHECK_VALID_SIZE(outputs.size(), 1);
2104
2105 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
2106 auto fbDescriptor = fbLayer->descriptor();
2107
2108 armnn::ElementwiseUnaryDescriptor descriptor;
Mike Kelly3ec30772023-03-08 13:47:17 +00002109 descriptor.m_Operation = ToElementwiseUnaryOperation(fbDescriptor->operation());
josh minor4a3c6102020-01-06 16:40:46 -06002110
2111 const std::string& layerName = GetLayerName(graph, layerIndex);
2112 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2113
2114 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2115 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2116
2117 RegisterInputSlots(graph, layerIndex, layer);
2118 RegisterOutputSlots(graph, layerIndex, layer);
2119}
2120
Finn Williams85d36712021-01-26 22:30:06 +00002121void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00002122{
2123 CHECK_LAYERS(graph, 0, layerIndex);
2124 CHECK_LOCATION();
2125
2126 auto outputs = GetOutputs(graph, layerIndex);
2127 CHECK_VALID_SIZE(outputs.size(), 1);
2128
Jim Flynnac25a1b2019-02-28 10:40:49 +00002129 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002130 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
2131 unsigned int numViews = originsDescriptor->numViews();
2132 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002133
2134 // can now check the number of inputs == number of views
2135 auto inputs = GetInputs(graph, layerIndex);
2136 CHECK_VALID_SIZE(inputs.size(), numViews);
2137
2138 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01002139 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002140 for (unsigned int v = 0; v < numViews; ++v)
2141 {
2142 auto originPtr = originsPtr->Get(v);
2143 for (unsigned int d = 0; d < numDimensions; ++d)
2144 {
2145 uint32_t value = originPtr->data()->Get(d);
2146 descriptor.SetViewOriginCoord(v, d, value);
2147 }
2148 }
Jim Flynne242f2d2019-05-22 14:24:13 +01002149 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002150
Jim Flynn906f9462019-05-10 13:55:21 +01002151 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002152 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2153 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2154
2155 RegisterInputSlots(graph, layerIndex, layer);
2156 RegisterOutputSlots(graph, layerIndex, layer);
2157}
2158
Finn Williams85d36712021-01-26 22:30:06 +00002159void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00002160{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002161 CHECK_LAYERS(graph, 0, layerIndex);
2162 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002163 CHECK_LOCATION();
2164 CHECK_VALID_SIZE(inputs.size(), 2);
2165
Derek Lamberti8ddae332019-02-21 16:29:43 +00002166 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002167 CHECK_VALID_SIZE(outputs.size(), 1);
2168
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002169 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00002170 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Mul);
2171 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00002172
2173 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2174 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2175
Derek Lamberti8ddae332019-02-21 16:29:43 +00002176 RegisterInputSlots(graph, layerIndex, layer);
2177 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00002178}
2179
Finn Williams85d36712021-01-26 22:30:06 +00002180void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002181{
2182 CHECK_LAYERS(graph, 0, layerIndex);
2183 CHECK_LOCATION();
2184
2185 auto inputs = GetInputs(graph, layerIndex);
2186 CHECK_VALID_SIZE(inputs.size(), 1);
2187
2188 auto outputs = GetOutputs(graph, layerIndex);
2189 CHECK_VALID_SIZE(outputs.size(), 1);
2190
2191 auto layerName = GetLayerName(graph, layerIndex);
2192
2193 armnn::IConnectableLayer* layer;
2194
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00002195 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002196
2197 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2198 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2199
2200 RegisterInputSlots(graph, layerIndex, layer);
2201 RegisterOutputSlots(graph, layerIndex, layer);
2202}
2203
Finn Williams85d36712021-01-26 22:30:06 +00002204void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002205{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002206 CHECK_LAYERS(graph, 0, layerIndex);
2207 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002208 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002209
Derek Lamberti8ddae332019-02-21 16:29:43 +00002210 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002211 CHECK_VALID_SIZE(outputs.size(), 1);
2212
Derek Lamberti8ddae332019-02-21 16:29:43 +00002213 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002214 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002215 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2216
2217 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
2218 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2219 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002220 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01002221
2222 armnn::IConnectableLayer* layer;
2223 std::vector<unsigned int> ignoreSlots {};
2224
2225 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
2226 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
2227 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002228 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002229 // If the model stores weights and biases as members of the layer we have to read them from there
2230 // but add them to their own ConstantLayer for compatibility
2231 CHECK_VALID_SIZE(inputs.size(), 1);
2232 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2233 layerName.c_str());
2234
2235 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
2236 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2237 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2238 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
2239 ignoreSlots.emplace_back(1u);
2240
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002241 if (fullyConnectedDescriptor.m_BiasEnabled)
2242 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002243 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
2244 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2245 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2246 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
2247 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002248 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002249 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01002250 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002251 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002252 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2253 layerName.c_str());
2254 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
2255 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002256 }
2257
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002258 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2259 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2260
Matthew Sloyan81beae32021-07-13 19:46:11 +01002261 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00002262 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002263}
2264
Finn Williams85d36712021-01-26 22:30:06 +00002265void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002266{
2267 CHECK_LAYERS(graph, 0, layerIndex);
2268
Finn Williams85d36712021-01-26 22:30:06 +00002269 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002270 CHECK_VALID_SIZE(inputs.size(), 1);
2271
Finn Williams85d36712021-01-26 22:30:06 +00002272 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002273 CHECK_VALID_SIZE(outputs.size(), 1);
2274
2275 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2276 auto flatBufferPadList = flatBufferDescriptor->padList();
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002277 auto paddingMode = flatBufferDescriptor->paddingMode();
David Monahan34757812019-06-19 11:47:21 +01002278 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002279
Mike Kelly51b8c312022-05-24 11:34:02 +01002280 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002281 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002282 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2283 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002284 }
2285
2286 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002287 padList.reserve(flatBufferPadList->size() / 2);
2288 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002289 {
2290 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2291 }
2292
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002293 armnn::PadDescriptor descriptor(padList, padValue, ToPaddingMode(paddingMode));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002294
2295 auto layerName = GetLayerName(graph, layerIndex);
2296 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2297
2298 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2299 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2300
2301 RegisterInputSlots(graph, layerIndex, layer);
2302 RegisterOutputSlots(graph, layerIndex, layer);
2303}
2304
Finn Williams85d36712021-01-26 22:30:06 +00002305void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002306{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002307 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002308
2309 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002310 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002311
Derek Lamberti8ddae332019-02-21 16:29:43 +00002312 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002313 CHECK_VALID_SIZE(inputs.size(), 1);
2314
Derek Lamberti8ddae332019-02-21 16:29:43 +00002315 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002316 CHECK_VALID_SIZE(outputs.size(), 1);
2317 auto outputInfo = ToTensorInfo(outputs[0]);
2318
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002319 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01002320 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002321
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002322 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002323 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2324
Derek Lamberti8ddae332019-02-21 16:29:43 +00002325 RegisterInputSlots(graph, layerIndex, layer);
2326 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002327}
2328
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002329armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002330 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002331{
Jan Eilers8eb25602020-03-09 12:13:48 +00002332 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002333 armnn::Pooling2dDescriptor desc;
2334
2335 switch (pooling2dDesc->poolType())
2336 {
2337 case PoolingAlgorithm_Average:
2338 {
2339 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002340 break;
2341 }
2342 case PoolingAlgorithm_Max:
2343 {
2344 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002345 break;
2346 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002347 case PoolingAlgorithm_L2:
2348 {
2349 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2350 break;
2351 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002352 default:
2353 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002354 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002355 }
2356 }
2357
2358 switch (pooling2dDesc->outputShapeRounding())
2359 {
2360 case OutputShapeRounding_Floor:
2361 {
2362 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2363 break;
2364 }
2365 case OutputShapeRounding_Ceiling:
2366 {
2367 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2368 break;
2369 }
2370 default:
2371 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002372 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002373 }
2374 }
2375
2376 switch (pooling2dDesc->paddingMethod())
2377 {
2378 case PaddingMethod_Exclude:
2379 {
2380 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2381 break;
2382 }
2383 case PaddingMethod_IgnoreValue:
2384 {
2385 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2386 break;
2387 }
2388 default:
2389 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002390 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002391 }
2392 }
2393
2394 switch (pooling2dDesc->dataLayout())
2395 {
2396 case DataLayout_NCHW:
2397 {
2398 desc.m_DataLayout = armnn::DataLayout::NCHW;
2399 break;
2400 }
2401 case DataLayout_NHWC:
2402 {
2403 desc.m_DataLayout = armnn::DataLayout::NHWC;
2404 break;
2405 }
2406 default:
2407 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002408 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002409 }
2410 }
2411
2412 desc.m_PadRight = pooling2dDesc->padRight();
2413 desc.m_PadLeft = pooling2dDesc->padLeft();
2414 desc.m_PadBottom = pooling2dDesc->padBottom();
2415 desc.m_PadTop = pooling2dDesc->padTop();
2416 desc.m_StrideX = pooling2dDesc->strideX();
2417 desc.m_StrideY = pooling2dDesc->strideY();
2418 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2419 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2420
2421 return desc;
2422}
2423
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002424armnn::Pooling3dDescriptor IDeserializer::DeserializerImpl::GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDesc,
2425 unsigned int layerIndex)
2426{
2427 IgnoreUnused(layerIndex);
2428 armnn::Pooling3dDescriptor desc;
Finn Williams85d36712021-01-26 22:30:06 +00002429
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002430 switch (pooling3dDesc->poolType())
2431 {
2432 case PoolingAlgorithm_Average:
2433 {
2434 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2435 break;
2436 }
2437 case PoolingAlgorithm_Max:
2438 {
2439 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
2440 break;
2441 }
2442 case PoolingAlgorithm_L2:
2443 {
2444 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2445 break;
2446 }
2447 default:
2448 {
2449 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
2450 }
2451 }
2452
2453 switch (pooling3dDesc->outputShapeRounding())
2454 {
2455 case OutputShapeRounding_Floor:
2456 {
2457 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2458 break;
2459 }
2460 case OutputShapeRounding_Ceiling:
2461 {
2462 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2463 break;
2464 }
2465 default:
2466 {
2467 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
2468 }
2469 }
2470
2471 switch (pooling3dDesc->paddingMethod())
2472 {
2473 case PaddingMethod_Exclude:
2474 {
2475 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2476 break;
2477 }
2478 case PaddingMethod_IgnoreValue:
2479 {
2480 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2481 break;
2482 }
2483 default:
2484 {
2485 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
2486 }
2487 }
2488
2489 switch (pooling3dDesc->dataLayout())
2490 {
2491 case DataLayout_NCDHW:
2492 {
2493 desc.m_DataLayout = armnn::DataLayout::NCDHW;
2494 break;
2495 }
2496 case DataLayout_NDHWC:
2497 {
2498 desc.m_DataLayout = armnn::DataLayout::NDHWC;
2499 break;
2500 }
2501 default:
2502 {
2503 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
2504 }
2505 }
2506
2507 desc.m_PadRight = pooling3dDesc->padRight();
2508 desc.m_PadLeft = pooling3dDesc->padLeft();
2509 desc.m_PadBottom = pooling3dDesc->padBottom();
2510 desc.m_PadTop = pooling3dDesc->padTop();
2511 desc.m_PadFront = pooling3dDesc->padFront();
2512 desc.m_PadBack = pooling3dDesc->padBack();
2513 desc.m_StrideX = pooling3dDesc->strideX();
2514 desc.m_StrideY = pooling3dDesc->strideY();
2515 desc.m_StrideZ = pooling3dDesc->strideZ();
2516 desc.m_PoolWidth = pooling3dDesc->poolWidth();
2517 desc.m_PoolHeight = pooling3dDesc->poolHeight();
2518 desc.m_PoolDepth = pooling3dDesc->poolDepth();
2519
2520 return desc;
2521}
Finn Williams85d36712021-01-26 22:30:06 +00002522
2523void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002524{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002525 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002526
Derek Lamberti8ddae332019-02-21 16:29:43 +00002527 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002528 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002529 CHECK_VALID_SIZE(inputs.size(), 1);
2530
Derek Lamberti8ddae332019-02-21 16:29:43 +00002531 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002532 CHECK_VALID_SIZE(outputs.size(), 1);
2533 auto outputInfo = ToTensorInfo(outputs[0]);
2534
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002535 auto pooling2dDescriptor = GetPooling2dDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002536 auto layerName = GetLayerName(graph, layerIndex);
2537 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002538 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2539
Derek Lamberti8ddae332019-02-21 16:29:43 +00002540 RegisterInputSlots(graph, layerIndex, layer);
2541 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002542}
2543
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002544void IDeserializer::DeserializerImpl::ParsePooling3d(GraphPtr graph, unsigned int layerIndex)
2545{
2546 CHECK_LAYERS(graph, 0, layerIndex);
2547
2548 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2549 auto inputs = GetInputs(graph, layerIndex);
2550 CHECK_VALID_SIZE(inputs.size(), 1);
2551
2552 auto outputs = GetOutputs(graph, layerIndex);
2553 CHECK_VALID_SIZE(outputs.size(), 1);
2554 auto outputInfo = ToTensorInfo(outputs[0]);
2555
2556 auto pooling3dDescriptor = GetPooling3dDescriptor(pooling3dDes, layerIndex);
2557 auto layerName = GetLayerName(graph, layerIndex);
2558 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2559 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2560
2561 RegisterInputSlots(graph, layerIndex, layer);
2562 RegisterOutputSlots(graph, layerIndex, layer);
2563}
2564
Finn Williams85d36712021-01-26 22:30:06 +00002565void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002566{
2567 CHECK_LAYERS(graph, 0, layerIndex);
2568
2569 auto inputs = GetInputs(graph, layerIndex);
2570 CHECK_VALID_SIZE(inputs.size(), 1);
2571
2572 auto outputs = GetOutputs(graph, layerIndex);
2573 CHECK_VALID_SIZE(outputs.size(), 1);
2574 auto outputInfo = ToTensorInfo(outputs[0]);
2575
2576 auto layerName = GetLayerName(graph, layerIndex);
2577 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2578 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2579
2580 RegisterInputSlots(graph, layerIndex, layer);
2581 RegisterOutputSlots(graph, layerIndex, layer);
2582}
2583
Finn Williams85d36712021-01-26 22:30:06 +00002584armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002585 const std::vector<uint32_t>& targetDimsIn)
2586{
2587 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2588 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2589
2590 if (stretchDim != targetDimsIn.end())
2591 {
2592 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2593 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002594 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2595 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002596 }
2597
2598 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002599 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002600 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2601
2602 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2603 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2604 }
2605
2606 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2607
2608 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2609 reshapeInfo.SetShape(outputShape);
2610
2611 return reshapeInfo;
2612}
2613
Finn Williams85d36712021-01-26 22:30:06 +00002614void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002615{
2616 CHECK_LAYERS(graph, 0, layerIndex);
2617
Finn Williams85d36712021-01-26 22:30:06 +00002618 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002619 CHECK_VALID_SIZE(inputs.size(), 1);
2620
Finn Williams85d36712021-01-26 22:30:06 +00002621 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002622 CHECK_VALID_SIZE(outputs.size(), 1);
2623
2624 auto layerName = GetLayerName(graph, layerIndex);
2625 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2626
2627 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2628 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2629
2630 RegisterInputSlots(graph, layerIndex, layer);
2631 RegisterOutputSlots(graph, layerIndex, layer);
2632}
2633
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002634void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2635{
2636 CHECK_LAYERS(graph, 0, layerIndex);
2637 CHECK_LOCATION();
2638
2639 auto inputs = GetInputs(graph, layerIndex);
2640 CHECK_VALID_SIZE(inputs.size(), 1);
2641
2642 auto outputs = GetOutputs(graph, layerIndex);
2643 CHECK_VALID_SIZE(outputs.size(), 1);
2644
2645 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2646 auto fbDescriptor = fbLayer->descriptor();
2647 auto flatBufferAxis = fbDescriptor->axis();
2648
2649 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002650 descriptor.m_KeepDims = fbDescriptor->keepDims();
2651 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2652 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2653
2654 const std::string& layerName = GetLayerName(graph, layerIndex);
2655 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2656
2657 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2658 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2659
2660 RegisterInputSlots(graph, layerIndex, layer);
2661 RegisterOutputSlots(graph, layerIndex, layer);
2662}
2663
Finn Williams85d36712021-01-26 22:30:06 +00002664void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002665{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002666 CHECK_LAYERS(graph, 0, layerIndex);
2667 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002668
Derek Lamberti8ddae332019-02-21 16:29:43 +00002669 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002670 CHECK_VALID_SIZE(outputs.size(), 1);
2671
2672 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2673 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2674
Derek Lamberti8ddae332019-02-21 16:29:43 +00002675 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002676 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2677
Finn Williams85d36712021-01-26 22:30:06 +00002678 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002679 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2680
2681 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2682 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2683
2684 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2685 {
2686 std::stringstream ss;
2687 ss << "New shape defined in reshape parameters "
2688 << reshapeOutputTensorShape
2689 << " does not equal output shape "
2690 << actualOutputTensorInfo.GetShape()
2691 << ": "
2692 << CHECK_LOCATION().AsString();
2693 throw ParseException(ss.str());
2694 }
2695
2696 armnn::ReshapeDescriptor reshapeDesc;
2697 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2698
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002699 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002700 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2701 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2702
Derek Lamberti8ddae332019-02-21 16:29:43 +00002703 RegisterInputSlots(graph, layerIndex, layer);
2704 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002705}
2706
Finn Williams85d36712021-01-26 22:30:06 +00002707void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002708{
2709 CHECK_LAYERS(graph, 0, layerIndex);
2710
Finn Williams85d36712021-01-26 22:30:06 +00002711 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002712 CHECK_VALID_SIZE(inputs.size(), 1);
2713
Finn Williams85d36712021-01-26 22:30:06 +00002714 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002715 CHECK_VALID_SIZE(outputs.size(), 1);
2716
2717 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2718
2719 armnn::ResizeDescriptor descriptor;
2720 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2721 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2722 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2723 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002724 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2725 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002726
2727 auto layerName = GetLayerName(graph, layerIndex);
2728 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2729
2730 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2731 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2732
2733 RegisterInputSlots(graph, layerIndex, layer);
2734 RegisterOutputSlots(graph, layerIndex, layer);
2735}
2736
Jan Eilers1b2654f2021-09-24 15:45:46 +01002737
2738/// @Note The ResizeBiliniar operation was deprecated and removed in favor of the Resize operation.
2739/// This function is kept for backwards compatibility.
Finn Williams85d36712021-01-26 22:30:06 +00002740void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002741{
2742 CHECK_LAYERS(graph, 0, layerIndex);
2743
Finn Williams85d36712021-01-26 22:30:06 +00002744 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002745 CHECK_VALID_SIZE(inputs.size(), 1);
2746
Finn Williams85d36712021-01-26 22:30:06 +00002747 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002748 CHECK_VALID_SIZE(outputs.size(), 1);
2749
2750 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2751
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002752 armnn::ResizeDescriptor descriptor;
2753 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002754 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002755 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2756 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002757 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2758 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002759
2760 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002761 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002762
2763 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2764 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2765
2766 RegisterInputSlots(graph, layerIndex, layer);
2767 RegisterOutputSlots(graph, layerIndex, layer);
2768}
2769
Keith Davis3ae3f972021-05-21 16:33:48 +01002770void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2771{
2772 CHECK_LAYERS(graph, 0, layerIndex);
2773
2774 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2775 CHECK_VALID_SIZE(inputs.size(), 1);
2776
2777 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2778 CHECK_VALID_SIZE(outputs.size(), 1);
2779
2780 auto layerName = GetLayerName(graph, layerIndex);
2781 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2782
2783 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2784 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2785
2786 RegisterInputSlots(graph, layerIndex, layer);
2787 RegisterOutputSlots(graph, layerIndex, layer);
2788}
2789
Finn Williams85d36712021-01-26 22:30:06 +00002790void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002791{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002792 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002793
Finn Williams85d36712021-01-26 22:30:06 +00002794 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002795 CHECK_VALID_SIZE(inputs.size(), 1);
2796
Finn Williams85d36712021-01-26 22:30:06 +00002797 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002798 CHECK_VALID_SIZE(outputs.size(), 1);
2799
2800 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002801 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Sadik Armaganfd0cae32021-11-08 17:18:31 +00002802 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002803 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002804
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002805 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2806
2807 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2808 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2809
Derek Lamberti8ddae332019-02-21 16:29:43 +00002810 RegisterInputSlots(graph, layerIndex, layer);
2811 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002812}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002813
Finn Williams85d36712021-01-26 22:30:06 +00002814void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002815{
2816 CHECK_LAYERS(graph, 0, layerIndex);
2817
Finn Williams85d36712021-01-26 22:30:06 +00002818 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002819 CHECK_VALID_SIZE(inputs.size(), 1);
2820
Finn Williams85d36712021-01-26 22:30:06 +00002821 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002822 CHECK_VALID_SIZE(outputs.size(), 1);
2823
2824 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2825 auto flatBufferPadList = flatBufferDescriptor->padList();
2826 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2827
Mike Kelly51b8c312022-05-24 11:34:02 +01002828 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002829 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002830 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2831 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002832 }
2833
2834 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002835 padList.reserve(flatBufferPadList->size() / 2);
2836 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002837 {
2838 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2839 }
2840
2841 armnn::SpaceToBatchNdDescriptor descriptor;
2842 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2843 descriptor.m_BlockShape =
2844 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2845 descriptor.m_PadList = padList;
2846
2847 auto layerName = GetLayerName(graph, layerIndex);
2848 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2849
2850 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2851 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2852
2853 RegisterInputSlots(graph, layerIndex, layer);
2854 RegisterOutputSlots(graph, layerIndex, layer);
2855}
2856
Finn Williams85d36712021-01-26 22:30:06 +00002857void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002858{
2859 CHECK_LAYERS(graph, 0, layerIndex);
2860
Finn Williams85d36712021-01-26 22:30:06 +00002861 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002862 CHECK_VALID_SIZE(inputs.size(), 1);
2863
Finn Williams85d36712021-01-26 22:30:06 +00002864 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002865 CHECK_VALID_SIZE(outputs.size(), 1);
2866
2867 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2868
2869 armnn::SpaceToDepthDescriptor descriptor;
2870 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2871 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2872
2873 auto layerName = GetLayerName(graph, layerIndex);
2874 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2875
2876 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2877 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2878
2879 RegisterInputSlots(graph, layerIndex, layer);
2880 RegisterOutputSlots(graph, layerIndex, layer);
2881}
2882
Finn Williams85d36712021-01-26 22:30:06 +00002883armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2884 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002885 unsigned int layerIndex)
2886{
Jan Eilers8eb25602020-03-09 12:13:48 +00002887 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002888 armnn::NormalizationDescriptor desc;
2889
2890 switch (normalizationDescriptor->normChannelType())
2891 {
2892 case NormalizationAlgorithmChannel_Across:
2893 {
2894 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2895 break;
2896 }
2897 case NormalizationAlgorithmChannel_Within:
2898 {
2899 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2900 break;
2901 }
2902 default:
2903 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002904 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002905 }
2906 }
2907
2908 switch (normalizationDescriptor->normMethodType())
2909 {
2910 case NormalizationAlgorithmMethod_LocalBrightness:
2911 {
2912 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2913 break;
2914 }
2915 case NormalizationAlgorithmMethod_LocalContrast:
2916 {
2917 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2918 break;
2919 }
2920 default:
2921 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002922 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002923 }
2924 }
2925
2926 switch (normalizationDescriptor->dataLayout())
2927 {
2928 case DataLayout_NCHW:
2929 {
2930 desc.m_DataLayout = armnn::DataLayout::NCHW;
2931 break;
2932 }
2933 case DataLayout_NHWC:
2934 {
2935 desc.m_DataLayout = armnn::DataLayout::NHWC;
2936 break;
2937 }
2938 default:
2939 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002940 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002941 }
2942 }
2943
2944 desc.m_Alpha = normalizationDescriptor->alpha();
2945 desc.m_Beta = normalizationDescriptor->beta();
2946 desc.m_K = normalizationDescriptor->k();
2947 desc.m_NormSize = normalizationDescriptor->normSize();
2948
2949 return desc;
2950}
2951
Finn Williams85d36712021-01-26 22:30:06 +00002952void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002953{
2954 CHECK_LAYERS(graph, 0, layerIndex);
2955
2956 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2957
Finn Williams85d36712021-01-26 22:30:06 +00002958 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002959 CHECK_VALID_SIZE(inputs.size(), 1);
2960
Finn Williams85d36712021-01-26 22:30:06 +00002961 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002962 CHECK_VALID_SIZE(outputs.size(), 1);
2963
2964 auto outputInfo = ToTensorInfo(outputs[0]);
2965
2966 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2967 auto layerName = GetLayerName(graph, layerIndex);
2968
2969 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2970 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2971
2972 RegisterInputSlots(graph, layerIndex, layer);
2973 RegisterOutputSlots(graph, layerIndex, layer);
2974}
2975
Finn Williams85d36712021-01-26 22:30:06 +00002976void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002977{
2978 CHECK_LAYERS(graph, 0, layerIndex);
2979 auto inputs = GetInputs(graph, layerIndex);
2980 CHECK_LOCATION();
2981 CHECK_VALID_SIZE(inputs.size(), 1);
2982
2983 auto outputs = GetOutputs(graph, layerIndex);
2984 CHECK_VALID_SIZE(outputs.size(), 1);
2985
2986 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002987
josh minor4a3c6102020-01-06 16:40:46 -06002988 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2989 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002990 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2991 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2992
2993 RegisterInputSlots(graph, layerIndex, layer);
2994 RegisterOutputSlots(graph, layerIndex, layer);
2995}
2996
Finn Williams85d36712021-01-26 22:30:06 +00002997void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002998{
2999 CHECK_LAYERS(graph, 0, layerIndex);
3000
3001 auto inputs = GetInputs(graph, layerIndex);
3002 CHECK_VALID_SIZE(inputs.size(), 1);
3003
3004 auto outputs = GetOutputs(graph, layerIndex);
3005 CHECK_VALID_SIZE(outputs.size(), 1);
3006
3007 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
3008
3009 auto fbBegin = fbDescriptor->begin();
3010 auto fbSize = fbDescriptor->size();
3011
Mike Kelly51b8c312022-05-24 11:34:02 +01003012 if (fbBegin->size() != fbSize->size())
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003013 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01003014 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
3015 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003016 }
3017
3018 armnn::SliceDescriptor descriptor;
3019 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
3020 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
3021
3022 auto layerName = GetLayerName(graph, layerIndex);
3023 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
3024
3025 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3026 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3027
3028 RegisterInputSlots(graph, layerIndex, layer);
3029 RegisterOutputSlots(graph, layerIndex, layer);
3030}
3031
Finn Williams85d36712021-01-26 22:30:06 +00003032void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003033{
3034 CHECK_LAYERS(graph, 0, layerIndex);
3035
Finn Williams85d36712021-01-26 22:30:06 +00003036 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003037 CHECK_VALID_SIZE(inputs.size(), 1);
3038
Finn Williams85d36712021-01-26 22:30:06 +00003039 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003040 CHECK_VALID_SIZE(outputs.size(), 1);
3041
3042 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
3043
3044 auto flatBufferBegin = flatBufferDescriptor->begin();
3045 auto flatBufferEnd = flatBufferDescriptor->end();
3046 auto flatBufferStride = flatBufferDescriptor->stride();
3047
Mike Kelly51b8c312022-05-24 11:34:02 +01003048 if (!(flatBufferBegin->size() == flatBufferEnd->size() &&
3049 flatBufferBegin->size() == flatBufferStride->size()))
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003050 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01003051 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
3052 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003053 }
3054
3055 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
3056 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
3057 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
3058
3059 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
3060 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
3061 descriptor.m_EndMask = flatBufferDescriptor->endMask();
3062 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
3063 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
3064 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
3065 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
3066
3067 auto layerName = GetLayerName(graph, layerIndex);
3068 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
3069
3070 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3071 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3072
3073 RegisterInputSlots(graph, layerIndex, layer);
3074 RegisterOutputSlots(graph, layerIndex, layer);
3075}
3076
Finn Williams85d36712021-01-26 22:30:06 +00003077void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00003078{
3079 CHECK_LAYERS(graph, 0, layerIndex);
3080 auto inputs = GetInputs(graph, layerIndex);
3081 CHECK_LOCATION();
3082 CHECK_VALID_SIZE(inputs.size(), 2);
3083
3084 auto outputs = GetOutputs(graph, layerIndex);
3085 CHECK_VALID_SIZE(outputs.size(), 1);
3086
3087 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00003088 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Sub);
3089 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Conor Kennedyda1f9752019-03-01 14:37:12 +00003090
3091 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3092 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3093
3094 RegisterInputSlots(graph, layerIndex, layer);
3095 RegisterOutputSlots(graph, layerIndex, layer);
3096}
3097
Finn Williams85d36712021-01-26 22:30:06 +00003098void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003099{
3100 CHECK_LAYERS(graph, 0, layerIndex);
3101
Finn Williams85d36712021-01-26 22:30:06 +00003102 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003103 CHECK_VALID_SIZE(inputs.size(), 2);
3104
Finn Williams85d36712021-01-26 22:30:06 +00003105 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003106 CHECK_VALID_SIZE(outputs.size(), 1);
3107
Teresa Charlin52664732020-06-29 16:27:03 +01003108 armnn::GatherDescriptor descriptor;
3109 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
3110
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003111 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01003112 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003113
3114 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003115 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3116
3117 RegisterInputSlots(graph, layerIndex, layer);
3118 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003119}
3120
Teresa Charlin6966bfa2022-04-25 17:14:50 +01003121void IDeserializer::DeserializerImpl::ParseGatherNd(GraphPtr graph, unsigned int layerIndex)
3122{
3123 CHECK_LAYERS(graph, 0, layerIndex);
3124
3125 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
3126 CHECK_VALID_SIZE(inputs.size(), 2);
3127
3128 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
3129 CHECK_VALID_SIZE(outputs.size(), 1);
3130
3131 auto layerName = GetLayerName(graph, layerIndex);
3132 IConnectableLayer* layer = m_Network->AddGatherNdLayer(layerName.c_str());
3133
3134 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3135 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3136
3137 RegisterInputSlots(graph, layerIndex, layer);
3138 RegisterOutputSlots(graph, layerIndex, layer);
3139}
3140
Finn Williams85d36712021-01-26 22:30:06 +00003141void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003142{
3143 CHECK_LAYERS(graph, 0, layerIndex);
3144
Finn Williams85d36712021-01-26 22:30:06 +00003145 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003146 CHECK_VALID_SIZE(inputs.size(), 1);
3147
Finn Williams85d36712021-01-26 22:30:06 +00003148 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003149 CHECK_VALID_SIZE(outputs.size(), 1);
3150
3151 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
3152 auto flatBufferAxis = flatBufferDescriptor->axis();
3153 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
3154
3155 armnn::MeanDescriptor descriptor;
3156 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
3157 descriptor.m_KeepDims = flatBufferKeepDims;
3158
3159 auto layerName = GetLayerName(graph, layerIndex);
3160 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
3161
3162 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3163 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3164
3165 RegisterInputSlots(graph, layerIndex, layer);
3166 RegisterOutputSlots(graph, layerIndex, layer);
3167}
3168
Finn Williams85d36712021-01-26 22:30:06 +00003169void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00003170{
3171 CHECK_LAYERS(graph, 0, layerIndex);
3172
Finn Williams85d36712021-01-26 22:30:06 +00003173 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003174 CHECK_VALID_SIZE(inputs.size(), 1);
3175
Finn Williams85d36712021-01-26 22:30:06 +00003176 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003177
3178 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
3179 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
3180 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
3181 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
3182 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
3183 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
3184
3185 // Check numViews and numDimensions corresponds to the ones already serialized ...
3186 // numViews == flatBufferViewSizes.size();
3187 // foreach: numDimensions == flatBufferViewSizes[x].size();
3188
3189 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
3190 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3191 {
3192 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
3193 {
3194 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
3195 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
3196 }
3197 }
3198
3199 auto layerName = GetLayerName(graph, layerIndex);
3200 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
3201
3202 // I could have as many outputs as views ...
3203 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3204 {
3205 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
3206 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
3207 }
3208
3209 RegisterInputSlots(graph, layerIndex, layer);
3210 RegisterOutputSlots(graph, layerIndex, layer);
3211}
3212
Finn Williams85d36712021-01-26 22:30:06 +00003213armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00003214{
3215 armnn::LstmDescriptor desc;
3216
3217 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
3218 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
3219 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
3220 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
3221 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
3222 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01003223 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00003224
3225 return desc;
3226}
3227
Finn Williams85d36712021-01-26 22:30:06 +00003228void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00003229{
3230 CHECK_LAYERS(graph, 0, layerIndex);
3231
3232 auto inputs = GetInputs(graph, layerIndex);
3233 CHECK_VALID_SIZE(inputs.size(), 3);
3234
3235 auto outputs = GetOutputs(graph, layerIndex);
3236 CHECK_VALID_SIZE(outputs.size(), 4);
3237
3238 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3239 auto layerName = GetLayerName(graph, layerIndex);
3240 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3241 auto flatBufferInputParams = flatBufferLayer->inputParams();
3242
3243 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
3244
3245 armnn::LstmInputParams lstmInputParams;
3246
3247 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3248 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3249 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3250 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3251 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3252 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3253 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3254 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3255 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3256
3257 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3258 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3259 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3260 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3261 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3262 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3263 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3264 lstmInputParams.m_CellBias = &cellBias;
3265 lstmInputParams.m_OutputGateBias = &outputGateBias;
3266
3267 armnn::ConstTensor inputToInputWeights;
3268 armnn::ConstTensor recurrentToInputWeights;
3269 armnn::ConstTensor cellToInputWeights;
3270 armnn::ConstTensor inputGateBias;
3271 if (!lstmDescriptor.m_CifgEnabled)
3272 {
3273 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3274 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3275 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3276 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3277
3278 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3279 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3280 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3281 lstmInputParams.m_InputGateBias = &inputGateBias;
3282 }
3283
3284 armnn::ConstTensor projectionWeights;
3285 armnn::ConstTensor projectionBias;
3286 if (lstmDescriptor.m_ProjectionEnabled)
3287 {
3288 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3289 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3290
3291 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3292 lstmInputParams.m_ProjectionBias = &projectionBias;
3293 }
3294
3295 armnn::ConstTensor cellToForgetWeights;
3296 armnn::ConstTensor cellToOutputWeights;
3297 if (lstmDescriptor.m_PeepholeEnabled)
3298 {
3299 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3300 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3301
3302 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3303 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3304 }
3305
Jan Eilersf8c62972019-07-17 11:07:49 +01003306 armnn::ConstTensor inputLayerNormWeights;
3307 armnn::ConstTensor forgetLayerNormWeights;
3308 armnn::ConstTensor cellLayerNormWeights;
3309 armnn::ConstTensor outputLayerNormWeights;
3310 if (lstmDescriptor.m_LayerNormEnabled)
3311 {
3312 if (!lstmDescriptor.m_CifgEnabled)
3313 {
3314 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3315 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3316 }
3317 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3318 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3319 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3320
3321 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3322 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3323 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3324 }
3325
Jim Flynn11af3752019-03-19 17:22:29 +00003326 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3327
3328 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3329 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3330
3331 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3332 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3333
3334 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
3335 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
3336
3337 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
3338 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
3339
3340 RegisterInputSlots(graph, layerIndex, layer);
3341 RegisterOutputSlots(graph, layerIndex, layer);
3342}
3343
Finn Williams85d36712021-01-26 22:30:06 +00003344armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01003345{
3346 armnn::QLstmDescriptor desc;
3347
3348 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
3349 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
3350 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
3351 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
3352
3353 desc.m_CellClip = qLstmDescriptor->cellClip();
3354 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
3355
3356 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
3357 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
3358 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
3359 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
3360
3361 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
3362 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
3363
3364 return desc;
3365}
3366
Finn Williams85d36712021-01-26 22:30:06 +00003367void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01003368{
3369 CHECK_LAYERS(graph, 0, layerIndex);
3370
3371 auto inputs = GetInputs(graph, layerIndex);
3372 CHECK_VALID_SIZE(inputs.size(), 3);
3373
3374 auto outputs = GetOutputs(graph, layerIndex);
3375 CHECK_VALID_SIZE(outputs.size(), 3);
3376
3377 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3378 auto layerName = GetLayerName(graph, layerIndex);
3379 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3380 auto flatBufferInputParams = flatBufferLayer->inputParams();
3381
3382 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
3383 armnn::LstmInputParams qLstmInputParams;
3384
3385 // Mandatory params
3386 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3387 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3388 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3389 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3390 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3391 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3392 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3393 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3394 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3395
3396 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3397 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3398 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3399 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3400 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3401 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3402 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
3403 qLstmInputParams.m_CellBias = &cellBias;
3404 qLstmInputParams.m_OutputGateBias = &outputGateBias;
3405
3406 // Optional CIFG params
3407 armnn::ConstTensor inputToInputWeights;
3408 armnn::ConstTensor recurrentToInputWeights;
3409 armnn::ConstTensor inputGateBias;
3410
3411 if (!qLstmDescriptor.m_CifgEnabled)
3412 {
3413 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3414 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3415 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3416
3417 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3418 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3419 qLstmInputParams.m_InputGateBias = &inputGateBias;
3420 }
3421
3422 // Optional projection params
3423 armnn::ConstTensor projectionWeights;
3424 armnn::ConstTensor projectionBias;
3425
3426 if (qLstmDescriptor.m_ProjectionEnabled)
3427 {
3428 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3429 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3430
3431 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3432 qLstmInputParams.m_ProjectionBias = &projectionBias;
3433 }
3434
3435 // Optional peephole params
3436 armnn::ConstTensor cellToInputWeights;
3437 armnn::ConstTensor cellToForgetWeights;
3438 armnn::ConstTensor cellToOutputWeights;
3439
3440 if (qLstmDescriptor.m_PeepholeEnabled)
3441 {
3442 if (!qLstmDescriptor.m_CifgEnabled)
3443 {
3444 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3445 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3446 }
3447
3448 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3449 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3450
3451 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3452 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3453 }
3454
3455 // Optional layer norm params
3456 armnn::ConstTensor inputLayerNormWeights;
3457 armnn::ConstTensor forgetLayerNormWeights;
3458 armnn::ConstTensor cellLayerNormWeights;
3459 armnn::ConstTensor outputLayerNormWeights;
3460
3461 if (qLstmDescriptor.m_LayerNormEnabled)
3462 {
3463 if (!qLstmDescriptor.m_CifgEnabled)
3464 {
3465 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3466 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3467 }
3468
3469 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3470 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3471 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3472
3473 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3474 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3475 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3476 }
3477
3478 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3479
3480 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3481 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3482
3483 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3484 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3485
3486 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3487 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3488
3489 RegisterInputSlots(graph, layerIndex, layer);
3490 RegisterOutputSlots(graph, layerIndex, layer);
3491}
3492
Finn Williams85d36712021-01-26 22:30:06 +00003493void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003494{
3495 CHECK_LAYERS(graph, 0, layerIndex);
3496
3497 auto inputs = GetInputs(graph, layerIndex);
3498 CHECK_VALID_SIZE(inputs.size(), 3);
3499
3500 auto outputs = GetOutputs(graph, layerIndex);
3501 CHECK_VALID_SIZE(outputs.size(), 2);
3502
3503 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3504 auto layerName = GetLayerName(graph, layerIndex);
3505 auto flatBufferInputParams = flatBufferLayer->inputParams();
3506
3507 armnn::QuantizedLstmInputParams lstmInputParams;
3508
3509 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3510 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3511 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3512 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3513 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3514 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3515 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3516 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3517 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3518 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3519 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3520 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3521
3522 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3523 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3524 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3525 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3526 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3527 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3528 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3529 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3530 lstmInputParams.m_InputGateBias = &inputGateBias;
3531 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3532 lstmInputParams.m_CellBias = &cellBias;
3533 lstmInputParams.m_OutputGateBias = &outputGateBias;
3534
3535 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3536
3537 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3538 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3539
3540 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3541 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3542
3543 RegisterInputSlots(graph, layerIndex, layer);
3544 RegisterOutputSlots(graph, layerIndex, layer);
3545}
3546
Finn Williams85d36712021-01-26 22:30:06 +00003547void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003548{
3549 CHECK_LAYERS(graph, 0, layerIndex);
3550
Finn Williams85d36712021-01-26 22:30:06 +00003551 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003552 CHECK_VALID_SIZE(inputs.size(), 1);
3553
Finn Williams85d36712021-01-26 22:30:06 +00003554 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003555 CHECK_VALID_SIZE(outputs.size(), 1);
3556
3557 const std::string layerName = GetLayerName(graph, layerIndex);
3558 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3559
3560 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3561 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3562
3563 RegisterInputSlots(graph, layerIndex, layer);
3564 RegisterOutputSlots(graph, layerIndex, layer);
3565}
3566
Finn Williams85d36712021-01-26 22:30:06 +00003567void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003568{
3569 CHECK_LAYERS(graph, 0, layerIndex);
3570
Finn Williams85d36712021-01-26 22:30:06 +00003571 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003572 CHECK_VALID_SIZE(inputs.size(), 2);
3573
Finn Williams85d36712021-01-26 22:30:06 +00003574 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003575 CHECK_VALID_SIZE(outputs.size(), 1);
3576
3577 const std::string layerName = GetLayerName(graph, layerIndex);
3578 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3579
3580 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3581 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3582
3583 RegisterInputSlots(graph, layerIndex, layer);
3584 RegisterOutputSlots(graph, layerIndex, layer);
3585}
3586
Finn Williams85d36712021-01-26 22:30:06 +00003587void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003588{
3589 CHECK_LAYERS(graph, 0, layerIndex);
3590 auto inputs = GetInputs(graph, layerIndex);
3591 CHECK_LOCATION();
3592 CHECK_VALID_SIZE(inputs.size(), 2);
3593
3594 auto outputs = GetOutputs(graph, layerIndex);
3595 CHECK_VALID_SIZE(outputs.size(), 2);
3596
3597 auto layerName = GetLayerName(graph, layerIndex);
3598 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3599
3600 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3601 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3602
3603 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3604 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3605
3606 RegisterInputSlots(graph, layerIndex, layer);
3607 RegisterOutputSlots(graph, layerIndex, layer);
3608}
3609
Finn Williams85d36712021-01-26 22:30:06 +00003610void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003611{
3612 CHECK_LAYERS(graph, 0, layerIndex);
3613 auto inputs = GetInputs(graph, layerIndex);
3614 CHECK_LOCATION();
3615 CHECK_VALID_SIZE(inputs.size(), 2);
3616
3617 auto outputs = GetOutputs(graph, layerIndex);
3618 CHECK_VALID_SIZE(outputs.size(), 1);
3619
3620 auto layerName = GetLayerName(graph, layerIndex);
3621 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3622
3623 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3624 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3625
3626 RegisterInputSlots(graph, layerIndex, layer);
3627 RegisterOutputSlots(graph, layerIndex, layer);
3628}
3629
Finn Williams85d36712021-01-26 22:30:06 +00003630void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003631{
3632 CHECK_LAYERS(graph, 0, layerIndex);
3633
3634 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3635
3636 auto inputs = GetInputs(graph, layerIndex);
3637 CHECK_VALID_SIZE(inputs.size(), 1);
3638
3639 auto outputs = GetOutputs(graph, layerIndex);
3640 CHECK_VALID_SIZE(outputs.size(), 1);
3641 auto outputInfo = ToTensorInfo(outputs[0]);
3642
3643 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01003644 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003645
3646 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3647 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3648
3649 RegisterInputSlots(graph, layerIndex, layer);
3650 RegisterOutputSlots(graph, layerIndex, layer);
3651}
3652
Finn Williams85d36712021-01-26 22:30:06 +00003653void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003654{
3655 CHECK_LAYERS(graph, 0, layerIndex);
3656
3657 auto inputs = GetInputs(graph, layerIndex);
3658 CHECK_VALID_SIZE(inputs.size(), 1);
3659
3660 auto outputs = GetOutputs(graph, layerIndex);
3661 CHECK_VALID_SIZE(outputs.size(), 1);
3662
3663 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3664 auto layerName = GetLayerName(graph, layerIndex);
3665 auto serializerDescriptor = serializerLayer->descriptor();
3666
3667 armnn::TransposeConvolution2dDescriptor descriptor;
3668 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3669 descriptor.m_PadRight = serializerDescriptor->padRight();
3670 descriptor.m_PadTop = serializerDescriptor->padTop();
3671 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3672 descriptor.m_StrideX = serializerDescriptor->strideX();
3673 descriptor.m_StrideY = serializerDescriptor->strideY();;
3674 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3675 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3676
3677 // weights & biases
3678 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3679 armnn::Optional<armnn::ConstTensor> optionalBiases;
3680 if (descriptor.m_BiasEnabled)
3681 {
3682 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3683 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3684 }
3685
3686 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3687 weights,
3688 optionalBiases,
3689 layerName.c_str());
3690
3691 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3692 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3693
3694 RegisterInputSlots(graph, layerIndex, layer);
3695 RegisterOutputSlots(graph, layerIndex, layer);
3696}
3697
Finn Williams85d36712021-01-26 22:30:06 +00003698void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003699{
3700 CHECK_LAYERS(graph, 0, layerIndex);
3701 auto inputs = GetInputs(graph, layerIndex);
3702
3703 auto outputs = GetOutputs(graph, layerIndex);
3704 CHECK_VALID_SIZE(outputs.size(), 1);
3705
3706 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3707 unsigned int axis = flatBufferDescriptor->axis();
3708 unsigned int numInputs = flatBufferDescriptor->numInputs();
3709 CHECK_VALID_SIZE(inputs.size(), numInputs);
3710
3711 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3712 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3713 flatBufferInputShape->begin() + flatBufferInputShape->size());
3714
3715 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3716 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3717
3718 for (unsigned int i=0; i<inputs.size(); ++i)
3719 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003720 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003721 if (descriptor.m_InputShape != inputShape)
3722 {
3723 std::stringstream ss;
3724 ss << "Shape of input "
3725 << i
3726 << " "
3727 << inputShape
3728 << " does not equal defined input shape "
3729 << descriptor.m_InputShape
3730 << ": "
3731 << CHECK_LOCATION().AsString();
3732 throw ParseException(ss.str());
3733 }
3734 }
3735
3736 auto layerName = GetLayerName(graph, layerIndex);
3737 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3738
3739 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3740 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3741
3742 RegisterInputSlots(graph, layerIndex, layer);
3743 RegisterOutputSlots(graph, layerIndex, layer);
3744}
3745
Finn Williams85d36712021-01-26 22:30:06 +00003746void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003747{
3748 CHECK_LAYERS(graph, 0, layerIndex);
3749
3750 auto inputs = GetInputs(graph, layerIndex);
3751 auto outputs = GetOutputs(graph, layerIndex);
3752
3753 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3754 auto fbDescriptor = fbLayer->descriptor();
3755
3756 armnn::StandInDescriptor descriptor;
3757 descriptor.m_NumInputs = fbDescriptor->numInputs();
3758 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3759
3760 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3761 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3762
3763 const std::string layerName = GetLayerName(graph, layerIndex);
3764 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3765
3766 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3767 {
3768 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3769 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3770 }
3771
3772 RegisterInputSlots(graph, layerIndex, layer);
3773 RegisterOutputSlots(graph, layerIndex, layer);
3774}
3775
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003776armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3777 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3778{
3779 armnn::UnidirectionalSequenceLstmDescriptor desc;
3780
3781 desc.m_ActivationFunc = descriptor->activationFunc();
3782 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3783 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3784 desc.m_CifgEnabled = descriptor->cifgEnabled();
3785 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3786 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3787 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3788 desc.m_TimeMajor = descriptor->timeMajor();
3789
3790 return desc;
3791}
3792
3793void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3794{
3795 CHECK_LAYERS(graph, 0, layerIndex);
3796
3797 auto inputs = GetInputs(graph, layerIndex);
3798 CHECK_VALID_SIZE(inputs.size(), 3);
3799
3800 auto outputs = GetOutputs(graph, layerIndex);
Mike Kelly12994962022-04-21 11:57:09 +01003801 CHECK_VALID_SIZE(outputs.size(), 3);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003802
3803 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3804 auto layerName = GetLayerName(graph, layerIndex);
3805 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3806 auto flatBufferInputParams = flatBufferLayer->inputParams();
3807
3808 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3809
3810 armnn::LstmInputParams lstmInputParams;
3811
3812 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3813 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3814 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3815 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3816 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3817 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3818 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3819 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3820 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3821
3822 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3823 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3824 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3825 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3826 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3827 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3828 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3829 lstmInputParams.m_CellBias = &cellBias;
3830 lstmInputParams.m_OutputGateBias = &outputGateBias;
3831
3832 armnn::ConstTensor inputToInputWeights;
3833 armnn::ConstTensor recurrentToInputWeights;
3834 armnn::ConstTensor cellToInputWeights;
3835 armnn::ConstTensor inputGateBias;
3836 if (!descriptor.m_CifgEnabled)
3837 {
3838 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3839 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3840 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3841
3842 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3843 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3844 lstmInputParams.m_InputGateBias = &inputGateBias;
3845
3846 if (descriptor.m_PeepholeEnabled)
3847 {
3848 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3849 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3850 }
3851 }
3852
3853 armnn::ConstTensor projectionWeights;
3854 armnn::ConstTensor projectionBias;
3855 if (descriptor.m_ProjectionEnabled)
3856 {
3857 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3858 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3859
3860 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3861 lstmInputParams.m_ProjectionBias = &projectionBias;
3862 }
3863
3864 armnn::ConstTensor cellToForgetWeights;
3865 armnn::ConstTensor cellToOutputWeights;
3866 if (descriptor.m_PeepholeEnabled)
3867 {
3868 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3869 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3870
3871 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3872 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3873 }
3874
3875 armnn::ConstTensor inputLayerNormWeights;
3876 armnn::ConstTensor forgetLayerNormWeights;
3877 armnn::ConstTensor cellLayerNormWeights;
3878 armnn::ConstTensor outputLayerNormWeights;
3879 if (descriptor.m_LayerNormEnabled)
3880 {
3881 if (!descriptor.m_CifgEnabled)
3882 {
3883 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3884 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3885 }
3886 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3887 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3888 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3889
3890 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3891 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3892 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3893 }
3894
3895 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3896 lstmInputParams,
3897 layerName.c_str());
3898
Mike Kelly12994962022-04-21 11:57:09 +01003899 armnn::TensorInfo outputTensorInfo0 = ToTensorInfo(outputs[0]);
3900 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo0);
3901
3902 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[1]);
3903 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo1);
3904
3905 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[2]);
3906 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo2);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003907
3908 RegisterInputSlots(graph, layerIndex, layer);
3909 RegisterOutputSlots(graph, layerIndex, layer);
3910}
3911
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003912} // namespace armnnDeserializer