blob: ed921880e0840c93ce07ff257f7797995e8baeac [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2017,2019-2023 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Simon Obutedb5804e2022-04-14 15:49:52 +010013#include <armnn/Logging.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000014
Matteo Martincighe011d202019-11-28 11:35:47 +000015#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000016#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010017#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000018#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010019#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000020
Kevin May43a799c2019-02-08 16:31:42 +000021#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022#include <VerificationHelpers.hpp>
23
Colm Donelan5b5c2222020-09-09 12:48:16 +010024#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Finn Williams85d36712021-01-26 22:30:06 +000038IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
39
40IDeserializer::~IDeserializer() = default;
41
42IDeserializer *IDeserializer::CreateRaw()
43{
44 return new IDeserializer();
45}
46
47IDeserializerPtr IDeserializer::Create()
48{
49 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
50}
51
52void IDeserializer::Destroy(IDeserializer *parser)
53{
54 delete parser;
55}
56
57armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
58{
59 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
60}
61
62armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
63{
64 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
65}
66
67BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
68{
69 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
70}
71
72BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
73{
74 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
75}
76
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000077namespace
78{
Kevin May43a799c2019-02-08 16:31:42 +000079
80const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
81
Finn Williams85d36712021-01-26 22:30:06 +000082 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000083 unsigned int layersIndex,
84 const CheckLocation& location)
85{
86 if (graph->layers() == nullptr)
87 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010088 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
89 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
90 "layers:{1} at {2}",
91 location.m_Function,
92 layersIndex,
93 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000094 }
95 else if (layersIndex >= graph->layers()->size())
96 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010097 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
98 location.m_Function,
99 layersIndex,
100 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000101 }
102}
103
Finn Williams85d36712021-01-26 22:30:06 +0000104void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000105 unsigned int layersIndex,
106 unsigned int layerIndex,
107 const CheckLocation& location)
108{
109 if (graph->layers() == nullptr)
110 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100111 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
112 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
113 "layers:{1} at {2}",
114 location.m_Function,
115 layersIndex,
116 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000117 }
118 else if (layersIndex >= graph->layers()->size())
119 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100120 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
121 "layers:{1} at {2}",
122 location.m_Function,
123 layersIndex,
124 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000125 }
126 else if (layerIndex >= graph->layers()[layersIndex].size()
127 && layerIndex != VIRTUAL_LAYER_ID)
128 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100129 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
130 "layers:{1} layer:{2} at {3}",
131 location.m_Function,
132 layersIndex,
133 layerIndex,
134 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000135 }
136}
137
Finn Williams85d36712021-01-26 22:30:06 +0000138void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000139 const CheckLocation& location)
140{
141 if (rawPtr == nullptr)
142 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100143 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
144 location.m_Function,
145 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000146 }
147}
148
Finn Williams85d36712021-01-26 22:30:06 +0000149void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000150 const CheckLocation& location)
151{
152 if (rawPtr == nullptr)
153 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100154 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
155 location.m_Function,
156 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000157 }
158}
159
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000160void CheckConstTensorSize(const unsigned int constTensorSize,
161 const unsigned int tensorSize,
162 const CheckLocation& location)
163{
164 if (constTensorSize != tensorSize)
165 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100166 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
167 location.m_Function,
168 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000169 }
170}
171
Kevin May43a799c2019-02-08 16:31:42 +0000172#define CHECK_TENSOR_PTR(TENSOR_PTR) \
173 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
174
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000175#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
176 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
177
Mike Kellya0766c32019-02-19 17:22:07 +0000178#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
179 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
180
Kevin May43a799c2019-02-08 16:31:42 +0000181#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
182 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
183
184#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
185 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
186}
187
Saoirse Stewart263829c2019-02-19 15:54:14 +0000188bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
189{
190 const unsigned int actualSize = actual.GetNumDimensions();
191 if (actualSize != expected.size())
192 {
193 return false;
194 }
195
196 for (unsigned int i = 0u; i < actualSize; i++)
197 {
198 if (actual[i] != static_cast<unsigned int>(expected[i]))
199 {
200 return false;
201 }
202 }
203
204 return true;
205}
206
Finn Williams85d36712021-01-26 22:30:06 +0000207IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000208: m_Network(nullptr, nullptr),
209//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000210m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000211{
212 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000213 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
214 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
215 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
216 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
Samuel Yapa04f4a12022-08-19 11:14:38 +0100217 m_ParserFunctions[Layer_BatchMatMulLayer] = &DeserializerImpl::ParseBatchMatMul;
Finn Williams85d36712021-01-26 22:30:06 +0000218 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
219 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
mathad01b392e982021-04-07 12:07:30 +0100220 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Simon Obute51f67772021-09-03 15:50:13 +0100221 m_ParserFunctions[Layer_ChannelShuffleLayer] = &DeserializerImpl::ParseChannelShuffle;
222 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
Finn Williams85d36712021-01-26 22:30:06 +0000223 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
224 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
225 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100226 m_ParserFunctions[Layer_Convolution3dLayer] = &DeserializerImpl::ParseConvolution3d;
Finn Williams85d36712021-01-26 22:30:06 +0000227 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
228 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
229 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
230 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
231 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
Mike Kelly3ec30772023-03-08 13:47:17 +0000232 m_ParserFunctions[Layer_ElementwiseBinaryLayer] = &DeserializerImpl::ParseElementwiseBinary;
Finn Williams85d36712021-01-26 22:30:06 +0000233 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
234 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
235 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
236 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
237 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
238 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100239 m_ParserFunctions[Layer_GatherNdLayer] = &DeserializerImpl::ParseGatherNd;
Finn Williams85d36712021-01-26 22:30:06 +0000240 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
241 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
242 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
243 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
244 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
245 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
246 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
247 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
248 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
249 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
250 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
251 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
252 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
253 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
254 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
255 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000256 m_ParserFunctions[Layer_Pooling3dLayer] = &DeserializerImpl::ParsePooling3d;
Finn Williams85d36712021-01-26 22:30:06 +0000257 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
258 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
259 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
260 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
261 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000262 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000263 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
264 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
265 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
266 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100267 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000268 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
269 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
270 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
271 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
272 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
273 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
274 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
275 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
276 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
277 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
278 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
279 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100280 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000281}
282
Finn Williams85d36712021-01-26 22:30:06 +0000283LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000284{
285 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
286
287 switch(layerType)
288 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100289 case Layer::Layer_AbsLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000291 case Layer::Layer_ActivationLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000293 case Layer::Layer_AdditionLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100295 case Layer::Layer_ArgMinMaxLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Samuel Yapa04f4a12022-08-19 11:14:38 +0100297 case Layer::Layer_BatchMatMulLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000299 case Layer::Layer_BatchToSpaceNdLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000301 case Layer::Layer_BatchNormalizationLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100303 case Layer::Layer_CastLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Simon Obute51f67772021-09-03 15:50:13 +0100305 case Layer::Layer_ChannelShuffleLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100307 case Layer::Layer_ComparisonLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100309 case Layer::Layer_ConcatLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000311 case Layer::Layer_ConstantLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000313 case Layer::Layer_Convolution2dLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100315 case Layer::Layer_Convolution3dLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100317 case Layer::Layer_DepthToSpaceLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000319 case Layer::Layer_DepthwiseConvolution2dLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000321 case Layer::Layer_DequantizeLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000323 case Layer::Layer_DetectionPostProcessLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000325 case Layer::Layer_DivisionLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000327 case Layer::Layer_EqualLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Mike Kelly3ec30772023-03-08 13:47:17 +0000329 case Layer::Layer_ElementwiseBinaryLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseBinaryLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000331 case Layer::Layer_ElementwiseUnaryLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000333 case Layer::Layer_FullyConnectedLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100335 case Layer::Layer_FillLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000337 case Layer::Layer_FloorLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000339 case Layer::Layer_GatherLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100341 case Layer::Layer_GatherNdLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherNdLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000343 case Layer::Layer_GreaterLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000345 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000346 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100347 case Layer::Layer_InstanceNormalizationLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000349 case Layer::Layer_L2NormalizationLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000351 case Layer::Layer_LogicalBinaryLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100353 case Layer::Layer_LogSoftmaxLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000355 case Layer::Layer_LstmLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000357 case Layer::Layer_MeanLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000359 case Layer::Layer_MinimumLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000361 case Layer::Layer_MaximumLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100363 case Layer::Layer_MergeLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000365 case Layer::Layer_MergerLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000367 case Layer::Layer_MultiplicationLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000369 case Layer::Layer_NormalizationLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000371 case Layer::Layer_OutputLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000373 case Layer::Layer_PadLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000375 case Layer::Layer_PermuteLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000377 case Layer::Layer_Pooling2dLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000379 case Layer::Layer_Pooling3dLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100381 case Layer::Layer_PreluLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100383 case Layer::Layer_QLstmLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000385 case Layer::Layer_QuantizeLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100387 case Layer::Layer_QuantizedLstmLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100389 case Layer::Layer_RankLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000391 case Layer::Layer_ReduceLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000393 case Layer::Layer_ReshapeLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000395 case Layer::Layer_ResizeBilinearLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100397 case Layer::Layer_ResizeLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000399 case Layer::Layer_RsqrtLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100401 case Layer::Layer_ShapeLayer:
402 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100403 case Layer::Layer_SliceLayer:
404 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000405 case Layer::Layer_SoftmaxLayer:
406 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000407 case Layer::Layer_SpaceToBatchNdLayer:
408 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100409 case Layer::Layer_SpaceToDepthLayer:
410 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000411 case Layer::Layer_SplitterLayer:
412 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100413 case Layer::Layer_StackLayer:
414 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100415 case Layer::Layer_StandInLayer:
416 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000417 case Layer::Layer_StridedSliceLayer:
418 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000419 case Layer::Layer_SubtractionLayer:
420 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100421 case Layer::Layer_SwitchLayer:
422 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100423 case Layer::Layer_TransposeConvolution2dLayer:
424 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000425 case Layer::Layer_TransposeLayer:
426 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100427 case Layer::Layer_UnidirectionalSequenceLstmLayer:
428 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000429 case Layer::Layer_NONE:
430 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100431 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000432 }
433}
434
Finn Williams85d36712021-01-26 22:30:06 +0000435std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000436{
437 auto layer = GetBaseLayer(graph, index);
438 assert(layer);
439 return layer->layerName()->str();
440}
441
Finn Williams85d36712021-01-26 22:30:06 +0000442int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000443{
444 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
445
446 if (layerType == Layer::Layer_InputLayer)
447 {
448 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
449 }
450 else if ( layerType == Layer::Layer_OutputLayer )
451 {
452 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
453 }
454 return 0;
455}
456
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000457armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000458{
459 switch (dataLayout)
460 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000462 return armnn::DataLayout::NHWC;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100463 case armnnSerializer::DataLayout::DataLayout_NDHWC:
464 return armnn::DataLayout::NDHWC;
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100465 case armnnSerializer::DataLayout::DataLayout_NCDHW:
466 return armnn::DataLayout::NCDHW;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000467 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000468 default:
469 return armnn::DataLayout::NCHW;
470 }
471}
472
Mike Kellyaf484012019-02-20 16:53:11 +0000473armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
474{
475 switch (function)
476 {
477 case armnnSerializer::ActivationFunction_Sigmoid:
478 return armnn::ActivationFunction::Sigmoid;
479 case armnnSerializer::ActivationFunction_TanH:
480 return armnn::ActivationFunction::TanH;
481 case armnnSerializer::ActivationFunction_Linear:
482 return armnn::ActivationFunction::Linear;
483 case armnnSerializer::ActivationFunction_ReLu:
484 return armnn::ActivationFunction::ReLu;
485 case armnnSerializer::ActivationFunction_BoundedReLu:
486 return armnn::ActivationFunction::BoundedReLu;
487 case armnnSerializer::ActivationFunction_LeakyReLu:
488 return armnn::ActivationFunction::LeakyReLu;
489 case armnnSerializer::ActivationFunction_Abs:
490 return armnn::ActivationFunction::Abs;
491 case armnnSerializer::ActivationFunction_Sqrt:
492 return armnn::ActivationFunction::Sqrt;
493 case armnnSerializer::ActivationFunction_Square:
494 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000495 case armnnSerializer::ActivationFunction_Elu:
496 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000497 case armnnSerializer::ActivationFunction_HardSwish:
498 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000499 default:
500 return armnn::ActivationFunction::Sigmoid;
501 }
502}
503
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100504armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
505{
506 switch (function)
507 {
508 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
509 return armnn::ArgMinMaxFunction::Max;
510 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
511 default:
512 return armnn::ArgMinMaxFunction::Min;
513 }
514}
515
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100516armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
517{
518 switch (operation)
519 {
520 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
521 return armnn::ComparisonOperation::Equal;
522 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
523 return armnn::ComparisonOperation::Greater;
524 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
525 return armnn::ComparisonOperation::GreaterOrEqual;
526 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
527 return armnn::ComparisonOperation::Less;
528 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
529 return armnn::ComparisonOperation::LessOrEqual;
530 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
531 default:
532 return armnn::ComparisonOperation::NotEqual;
533 }
534}
535
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000536armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
537{
538 switch (operation)
539 {
540 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
541 return armnn::ReduceOperation::Sum;
542 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
543 return armnn::ReduceOperation::Max;
544 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
545 return armnn::ReduceOperation::Mean;
546 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
547 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100548 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
549 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000550 default:
551 return armnn::ReduceOperation::Sum;
552 }
553}
554
James Conroyaba90cd2020-11-06 16:28:18 +0000555armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
556{
557 switch (operation)
558 {
559 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
560 return armnn::LogicalBinaryOperation::LogicalAnd;
561 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
562 return armnn::LogicalBinaryOperation::LogicalOr;
563 default:
564 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
565 }
566}
567
Mike Kelly3ec30772023-03-08 13:47:17 +0000568armnn::BinaryOperation ToElementwiseBinaryOperation(armnnSerializer::BinaryOperation operation)
569{
570 switch (operation)
571 {
572 case armnnSerializer::BinaryOperation::BinaryOperation_Add:
573 return armnn::BinaryOperation::Add;
574 case armnnSerializer::BinaryOperation::BinaryOperation_Div:
575 return armnn::BinaryOperation::Div;
576 case armnnSerializer::BinaryOperation::BinaryOperation_Maximum:
577 return armnn::BinaryOperation::Maximum;
578 case armnnSerializer::BinaryOperation::BinaryOperation_Minimum:
579 return armnn::BinaryOperation::Minimum;
580 case armnnSerializer::BinaryOperation::BinaryOperation_Mul:
581 return armnn::BinaryOperation::Mul;
582 case armnnSerializer::BinaryOperation::BinaryOperation_Sub:
583 return armnn::BinaryOperation::Sub;
584 default:
585 throw armnn::InvalidArgumentException("Binary operation unknown");
586 }
587}
588
589armnn::UnaryOperation ToElementwiseUnaryOperation(armnnSerializer::UnaryOperation operation)
josh minor4a3c6102020-01-06 16:40:46 -0600590{
591 switch (operation)
592 {
593 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
594 return armnn::UnaryOperation::Abs;
595 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
596 return armnn::UnaryOperation::Rsqrt;
597 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
598 return armnn::UnaryOperation::Sqrt;
599 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
600 return armnn::UnaryOperation::Exp;
601 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
602 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000603 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
604 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100605 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
606 return armnn::UnaryOperation::Log;
607 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
608 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600609 default:
610 throw armnn::InvalidArgumentException("Unary operation unknown");
611 }
612}
613
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +0100614armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
615{
616 switch (paddingMode)
617 {
618 case armnnSerializer::PaddingMode::PaddingMode_Reflect:
619 return armnn::PaddingMode::Reflect;
620 case armnnSerializer::PaddingMode::PaddingMode_Symmetric:
621 return armnn::PaddingMode::Symmetric;
622 default:
623 return armnn::PaddingMode::Constant;
624 }
625}
626
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100627armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
628{
629 switch (method)
630 {
631 case armnnSerializer::ResizeMethod_NearestNeighbor:
632 return armnn::ResizeMethod::NearestNeighbor;
633 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000634 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100635 default:
636 return armnn::ResizeMethod::NearestNeighbor;
637 }
638}
639
Finn Williams85d36712021-01-26 22:30:06 +0000640armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000641{
642 armnn::DataType type;
643 CHECK_TENSOR_PTR(tensorPtr);
644
645 switch (tensorPtr->dataType())
646 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000647 case DataType_QAsymmS8:
648 type = armnn::DataType::QAsymmS8;
649 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000650 case DataType_QSymmS8:
651 type = armnn::DataType::QSymmS8;
652 break;
Kevin May43a799c2019-02-08 16:31:42 +0000653 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000654 case DataType_QAsymmU8:
655 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000656 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000657 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000658 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000659 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000660 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000661 case DataType_Signed32:
662 type = armnn::DataType::Signed32;
663 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100664 case DataType_Signed64:
665 type = armnn::DataType::Signed64;
666 break;
Kevin May43a799c2019-02-08 16:31:42 +0000667 case DataType_Float32:
668 type = armnn::DataType::Float32;
669 break;
670 case DataType_Float16:
671 type = armnn::DataType::Float16;
672 break;
673 case DataType_Boolean:
674 type = armnn::DataType::Boolean;
675 break;
676 default:
677 {
678 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100679 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
680 tensorPtr->dataType(),
681 EnumNameDataType(tensorPtr->dataType()),
682 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000683 }
684 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000685
Colm Donelan800b2812021-02-12 12:43:35 +0000686 float quantizationScale = tensorPtr->quantizationScale();
687 int32_t quantizationOffset = tensorPtr->quantizationOffset();
688
Finn Williams2605b232020-06-10 15:53:46 +0100689 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
690 {
Colm Donelan800b2812021-02-12 12:43:35 +0000691 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100692 type,
693 quantizationScale,
694 quantizationOffset);
695 }
Colm Donelan800b2812021-02-12 12:43:35 +0000696 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
697 {
698 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
699 type,
700 quantizationScale,
701 quantizationOffset);
702 return result;
703 }
Kevin May43a799c2019-02-08 16:31:42 +0000704
705 auto dimensions = tensorPtr->dimensions();
706 unsigned int size = dimensions->size();
707 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000708 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
709 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
710 // For backwards compatibility check if the dimensionSpecificity vector is present first.
711 // The default is to have dimensionSpecificity set to all true's anyway.
712 if (tensorPtr->dimensionSpecificity() != nullptr)
713 {
714 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
715 size = dimensionSpecificity->size();
716 for (unsigned int i = 0; i < size; ++i)
717 {
718 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
719 }
720 }
721 // Construct a TensorShape
722 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000723
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000724 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000725 if (quantizationScales)
726 {
727 unsigned int quantizationScalesSize = quantizationScales->size();
728 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
729 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000730 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000731 type,
732 scales,
733 quantizationDim);
734 return result;
735 }
736
Kevin May43a799c2019-02-08 16:31:42 +0000737 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000738 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000739 type,
740 quantizationScale,
741 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000742
Kevin May43a799c2019-02-08 16:31:42 +0000743 return result;
744}
745
Finn Williams85d36712021-01-26 22:30:06 +0000746armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000747{
748 CHECK_CONST_TENSOR_PTR(constTensorPtr);
749 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100750 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000751
752 switch (constTensorPtr->data_type())
753 {
754 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000755 {
756 auto byteData = constTensorPtr->data_as_ByteData()->data();
757 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
758 return armnn::ConstTensor(tensorInfo, byteData->data());
759 }
Mike Kellya0766c32019-02-19 17:22:07 +0000760 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000761 {
762 auto shortData = constTensorPtr->data_as_ShortData()->data();
763 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
764 return armnn::ConstTensor(tensorInfo, shortData->data());
765 }
Mike Kellya0766c32019-02-19 17:22:07 +0000766 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000767 {
768 auto intData = constTensorPtr->data_as_IntData()->data();
769 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
770 return armnn::ConstTensor(tensorInfo, intData->data());
771 }
Mike Kellya0766c32019-02-19 17:22:07 +0000772 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000773 {
774 auto longData = constTensorPtr->data_as_LongData()->data();
775 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
776 return armnn::ConstTensor(tensorInfo, longData->data());
777 }
Mike Kellya0766c32019-02-19 17:22:07 +0000778 default:
779 {
780 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100781 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
782 constTensorPtr->data_type(),
783 EnumNameConstTensorData(constTensorPtr->data_type()),
784 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000785 }
786 }
787}
788
Finn Williams85d36712021-01-26 22:30:06 +0000789TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000790{
791 CHECK_LAYERS(graphPtr, 0, layerIndex);
792 auto layer = GetBaseLayer(graphPtr, layerIndex);
793 const auto& numInputs = layer->inputSlots()->size();
794
795 TensorRawPtrVector result(numInputs);
796
797 for (unsigned int i=0; i<numInputs; ++i)
798 {
799 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
800 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
801 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
802 }
803 return result;
804}
805
Finn Williams85d36712021-01-26 22:30:06 +0000806TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000807{
808 CHECK_LAYERS(graphPtr, 0, layerIndex);
809 auto layer = GetBaseLayer(graphPtr, layerIndex);
810 const auto& numOutputs = layer->outputSlots()->size();
811
812 TensorRawPtrVector result(numOutputs);
813
814 for (unsigned int i=0; i<numOutputs; ++i)
815 {
816 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
817 }
818 return result;
819}
820
Finn Williams85d36712021-01-26 22:30:06 +0000821void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000822{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000823 CHECK_LAYERS(graph, 0, layerIndex);
824 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100825 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
826 "layerName: {1} / {2}",
827 layerIndex,
828 layerName,
829 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000830}
831
Finn Williams85d36712021-01-26 22:30:06 +0000832void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000833{
834 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000835 m_InputBindings.clear();
836 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000837}
838
Kevin May43a799c2019-02-08 16:31:42 +0000839
Finn Williams85d36712021-01-26 22:30:06 +0000840INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000841{
842 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000843 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
844 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000845}
846
Finn Williams85d36712021-01-26 22:30:06 +0000847armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000848{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000849 ResetParser();
Simon Obutedb5804e2022-04-14 15:49:52 +0100850 if (binaryContent.fail()) {
851 ARMNN_LOG(error) << (std::string("Cannot read input"));
852 throw ParseException("Unable to read Input stream data");
853 }
854 binaryContent.seekg(0, std::ios::end);
855 const std::streamoff size = binaryContent.tellg();
856 std::vector<char> content(static_cast<size_t>(size));
857 binaryContent.seekg(0);
858 binaryContent.read(content.data(), static_cast<std::streamsize>(size));
859 GraphPtr graph = LoadGraphFromBinary(reinterpret_cast<uint8_t*>(content.data()), static_cast<size_t>(size));
Derek Lamberti8ddae332019-02-21 16:29:43 +0000860 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000861}
862
Finn Williams85d36712021-01-26 22:30:06 +0000863GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000864{
865 if (binaryContent == nullptr)
866 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100867 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
868 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000869 }
870 flatbuffers::Verifier verifier(binaryContent, len);
871 if (verifier.VerifyBuffer<SerializedGraph>() == false)
872 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100873 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
874 "flatbuffers format. size:{0} {1}",
875 len,
876 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000877 }
878 return GetSerializedGraph(binaryContent);
879}
880
Finn Williams85d36712021-01-26 22:30:06 +0000881INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000882{
883 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100884 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000885 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000886 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000887 {
888 if (layer->layer_type() != Layer_InputLayer &&
889 layer->layer_type() != Layer_OutputLayer)
890 {
891 // lookup and call the parser function
892 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000893 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000894 }
895 ++layerIndex;
896 }
897
Derek Lamberti8ddae332019-02-21 16:29:43 +0000898 SetupInputLayers(graph);
899 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000900
901 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100902 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000903 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100904 Connections& connections = graphIt.second;
905 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000906 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100907 const unsigned int outputSlotIndex = outputIt.first;
908 IOutputSlot* outputSlot = outputIt.second;
909 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000910 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100911 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000912 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100913 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000914 }
Kevin May43a799c2019-02-08 16:31:42 +0000915 }
916 }
917 }
918
919 return std::move(m_Network);
920}
921
Finn Williams85d36712021-01-26 22:30:06 +0000922BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000923 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000924{
Jan Eilers8eb25602020-03-09 12:13:48 +0000925 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000926 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000927 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000928 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000929 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000930 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000931 }
932 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100933 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
934 name,
935 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000936}
937
Finn Williams85d36712021-01-26 22:30:06 +0000938BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000939 const std::string& name) const
940{
Jan Eilers8eb25602020-03-09 12:13:48 +0000941 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000942 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000943 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000944 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000945 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000946 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000947 }
948 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100949 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
950 name,
951 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000952}
953
Finn Williams85d36712021-01-26 22:30:06 +0000954unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000955{
956 for (unsigned int i = 0; i < graph->layers()->size(); i++)
957 {
958 auto layer = graph->layers()->Get(i);
959 if (layer->layer_type() == Layer::Layer_InputLayer)
960 {
961 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
962 if (layerBindingId == targetId)
963 {
964 return i;
965 }
966 }
967 }
968 throw ParseException("Input layer with given layerBindingId not found");
969}
970
Finn Williams85d36712021-01-26 22:30:06 +0000971unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000972{
973 for (unsigned int i = 0; i < graph->layers()->size(); i++)
974 {
975 auto layer = graph->layers()->Get(i);
976 if (layer->layer_type() == Layer::Layer_OutputLayer)
977 {
978 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
979 if (layerBindingId == targetId)
980 {
981 return i;
982 }
983 }
984 }
985 throw ParseException("Output layer with given layerBindingId not found");
986}
987
Finn Williams85d36712021-01-26 22:30:06 +0000988unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100989{
990 for (unsigned int i = 0; i < graph->layers()->size(); i++)
991 {
992 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
993 if (layer->index() == targetIndex)
994 {
995 return i;
996 }
997 }
998 throw ParseException("Layer with given index not found");
999}
1000
Finn Williams85d36712021-01-26 22:30:06 +00001001IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +00001002{
Finn Williams85d36712021-01-26 22:30:06 +00001003 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +00001004
1005 if (graph->featureVersions())
1006 {
1007 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +01001008 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +01001009 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +00001010 }
1011
1012 return versions;
1013}
1014
Finn Williams85d36712021-01-26 22:30:06 +00001015void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001016{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001017 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001018 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001019 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001020 m_InputBindings.reserve(numInputs);
1021
1022 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001023 {
Tee Jungaa920c52019-11-05 10:48:25 +00001024 unsigned int inputLayerIndex = 0xFFFFFFFF;
1025 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1026 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001027 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001028 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
1029 }
1030 else
1031 {
1032 const int inputId = graph->inputIds()->Get(i);
1033 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1034 }
1035
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001036 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001037
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001038 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1039 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001040 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +00001041
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001042 IConnectableLayer* inputLayer =
1043 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001044
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001045 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
1046 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1047 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1048
Derek Lamberti8ddae332019-02-21 16:29:43 +00001049 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001050 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001051 }
1052}
1053
Finn Williams85d36712021-01-26 22:30:06 +00001054void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001055{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001056 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001057 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001058 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001059 m_OutputBindings.reserve(numOutputs);
1060
1061 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001062 {
Tee Jungaa920c52019-11-05 10:48:25 +00001063 unsigned int outputLayerIndex = 0xFFFFFFFF;
1064 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1065 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001066 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001067 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1068 }
1069 else
1070 {
1071 const int outputId = graph->outputIds()->Get(i);
1072 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1073 }
1074
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001075 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001076
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001077 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1078 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001079 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001080
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001081 IConnectableLayer* outputLayer =
1082 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001083
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001084 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001085 unsigned int sourceLayerIndex =
1086 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001087 unsigned int outputSlotIndex =
1088 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001089 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001090 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1091 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001092 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001093 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001094 }
1095}
1096
Finn Williams85d36712021-01-26 22:30:06 +00001097void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001098 uint32_t layerIndex,
1099 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001100{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001101 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001102 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001103 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1104 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001105 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001106 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1107 " for layer index: {2} {3}",
1108 baseLayer->outputSlots()->size(),
1109 layer->GetNumOutputSlots(),
1110 layerIndex,
1111 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001112 }
1113
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001114 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001115 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001116 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1117 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1118 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1119 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001120 }
1121}
1122
Finn Williams85d36712021-01-26 22:30:06 +00001123void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001124 uint32_t layerIndex,
1125 armnn::IConnectableLayer* layer,
1126 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001127{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001128 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001129 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001130 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001131
1132 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001133 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001134 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1135 " for layer index:{2} {3}",
1136 baseLayer->inputSlots()->size(),
1137 layer->GetNumInputSlots(),
1138 layerIndex,
1139 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001140 }
1141
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001142 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001143 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001144 // Check if slot should be ignored.
1145 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1146 {
1147 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1148 auto fbConnection = fbInputSlot->connection();
1149 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1150 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1151 }
Kevin May43a799c2019-02-08 16:31:42 +00001152 }
1153}
1154
Finn Williams85d36712021-01-26 22:30:06 +00001155void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001156 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001157 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001158{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001159 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001160 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001161 m_GraphConnections[sourceLayerIndex] = Connections();
1162 }
1163
1164 Connections& connections = m_GraphConnections[sourceLayerIndex];
1165 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1166 {
1167 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001168 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001169 else
1170 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001171 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001172 }
1173}
Kevin May43a799c2019-02-08 16:31:42 +00001174
Finn Williams85d36712021-01-26 22:30:06 +00001175void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001176 uint32_t outputSlotIndex,
1177 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001178{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001179 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1180 {
1181 m_GraphConnections[sourceLayerIndex] = Connections();
1182 }
1183
1184 Connections& connections = m_GraphConnections[sourceLayerIndex];
1185 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1186 {
1187 throw ParseException("Same output slot index processed twice");
1188 }
1189
1190 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001191}
1192
Finn Williams85d36712021-01-26 22:30:06 +00001193void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001194{
1195 CHECK_LAYERS(graph, 0, layerIndex);
1196 auto inputs = GetInputs(graph, layerIndex);
1197 CHECK_LOCATION();
1198 CHECK_VALID_SIZE(inputs.size(), 1);
1199
1200 auto outputs = GetOutputs(graph, layerIndex);
1201 CHECK_VALID_SIZE(outputs.size(), 1);
1202
1203 auto layerName = GetLayerName(graph, layerIndex);
1204
josh minor4a3c6102020-01-06 16:40:46 -06001205 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1206 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001207 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1208 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1209
1210 RegisterInputSlots(graph, layerIndex, layer);
1211 RegisterOutputSlots(graph, layerIndex, layer);
1212}
1213
Finn Williams85d36712021-01-26 22:30:06 +00001214void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001215{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001216 CHECK_LAYERS(graph, 0, layerIndex);
1217 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001218 CHECK_LOCATION();
1219 CHECK_VALID_SIZE(inputs.size(), 1);
1220
Derek Lamberti8ddae332019-02-21 16:29:43 +00001221 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001222 CHECK_VALID_SIZE(outputs.size(), 1);
1223
Derek Lamberti8ddae332019-02-21 16:29:43 +00001224 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001225 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001226 auto serializerDescriptor = serializerLayer->descriptor();
1227
1228 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001229 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001230 descriptor.m_A = serializerDescriptor->a();
1231 descriptor.m_B = serializerDescriptor->b();
1232
1233 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1234 layerName.c_str());
1235 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1236 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1237
Derek Lamberti8ddae332019-02-21 16:29:43 +00001238 RegisterInputSlots(graph, layerIndex, layer);
1239 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001240}
1241
Finn Williams85d36712021-01-26 22:30:06 +00001242void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001243{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001244 CHECK_LAYERS(graph, 0, layerIndex);
1245 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001246 CHECK_LOCATION();
1247 CHECK_VALID_SIZE(inputs.size(), 2);
1248
Derek Lamberti8ddae332019-02-21 16:29:43 +00001249 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001250 CHECK_VALID_SIZE(outputs.size(), 1);
1251
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001252 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001253 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Add);
1254 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001255
1256 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1257 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1258
Derek Lamberti8ddae332019-02-21 16:29:43 +00001259 RegisterInputSlots(graph, layerIndex, layer);
1260 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001261}
1262
Finn Williams85d36712021-01-26 22:30:06 +00001263void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001264{
1265 CHECK_LAYERS(graph, 0, layerIndex);
1266 auto inputs = GetInputs(graph, layerIndex);
1267 CHECK_LOCATION();
1268 CHECK_VALID_SIZE(inputs.size(), 1);
1269
1270 auto outputs = GetOutputs(graph, layerIndex);
1271 CHECK_VALID_SIZE(outputs.size(), 1);
1272
1273 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1274 auto serializerDescriptor = serializerLayer->descriptor();
1275
1276 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001277 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001278 descriptor.m_Axis = serializerDescriptor->axis();
1279 auto layerName = GetLayerName(graph, layerIndex);
1280 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1281
1282 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1283 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1284
1285 RegisterInputSlots(graph, layerIndex, layer);
1286 RegisterOutputSlots(graph, layerIndex, layer);
1287}
1288
Samuel Yapa04f4a12022-08-19 11:14:38 +01001289void IDeserializer::DeserializerImpl::ParseBatchMatMul(GraphPtr graph, unsigned int layerIndex)
1290{
1291 CHECK_LAYERS(graph, 0, layerIndex);
1292
1293 auto inputs = GetInputs(graph, layerIndex);
1294 CHECK_LOCATION();
1295 CHECK_VALID_SIZE(inputs.size(), 2);
1296
1297 auto outputs = GetOutputs(graph, layerIndex);
1298 CHECK_VALID_SIZE(outputs.size(), 1);
1299
1300 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer();
1301 auto serializerDescriptor = serializerLayer->descriptor();
1302
1303 armnn::BatchMatMulDescriptor descriptor(serializerDescriptor->transposeX(),
1304 serializerDescriptor->transposeY(),
1305 serializerDescriptor->adjointX(),
1306 serializerDescriptor->adjointY(),
1307 ToDataLayout(serializerDescriptor->dataLayoutX()),
1308 ToDataLayout(serializerDescriptor->dataLayoutY()));
1309
1310 auto layerName = GetLayerName(graph, layerIndex);
1311 IConnectableLayer* layer = m_Network->AddBatchMatMulLayer(descriptor, layerName.c_str());
1312
1313 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1314 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1315
1316 RegisterInputSlots(graph, layerIndex, layer);
1317 RegisterOutputSlots(graph, layerIndex, layer);
1318}
1319
Finn Williams85d36712021-01-26 22:30:06 +00001320void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001321{
1322 CHECK_LAYERS(graph, 0, layerIndex);
1323
Finn Williams85d36712021-01-26 22:30:06 +00001324 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001325 CHECK_VALID_SIZE(inputs.size(), 1);
1326
Finn Williams85d36712021-01-26 22:30:06 +00001327 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001328 CHECK_VALID_SIZE(outputs.size(), 1);
1329
1330 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1331 auto flatBufferCrops = flatBufferDescriptor->crops();
1332 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1333
Mike Kelly51b8c312022-05-24 11:34:02 +01001334 if (flatBufferCrops->size() % 2 != 0)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001335 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001336 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001337 }
1338
1339 std::vector<std::pair<unsigned int, unsigned int>> crops;
Mike Kelly51b8c312022-05-24 11:34:02 +01001340 crops.reserve(flatBufferCrops->size() / 2);
1341 for (unsigned int i = 0; i < flatBufferCrops->size() - 1; i += 2)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001342 {
1343 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1344 }
1345
1346 armnn::BatchToSpaceNdDescriptor descriptor;
1347 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1348 descriptor.m_BlockShape =
1349 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1350 descriptor.m_Crops = crops;
1351
1352 auto layerName = GetLayerName(graph, layerIndex);
1353 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1354
1355 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1356 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1357
1358 RegisterInputSlots(graph, layerIndex, layer);
1359 RegisterOutputSlots(graph, layerIndex, layer);
1360}
1361
Finn Williams85d36712021-01-26 22:30:06 +00001362void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001363{
1364 CHECK_LAYERS(graph, 0, layerIndex);
1365
1366 auto inputs = GetInputs(graph, layerIndex);
1367 CHECK_VALID_SIZE(inputs.size(), 1);
1368
1369 auto outputs = GetOutputs(graph, layerIndex);
1370 CHECK_VALID_SIZE(outputs.size(), 1);
1371 auto outputInfo = ToTensorInfo(outputs[0]);
1372
ruoyan015c7ab052019-03-04 14:48:02 +00001373 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001374
1375 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1376 auto serializerDescriptor = serializerLayer->descriptor();
1377
1378 armnn::BatchNormalizationDescriptor descriptor;
1379 descriptor.m_Eps = serializerDescriptor->eps();
1380 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1381
1382 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1383 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1384 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1385 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1386
1387 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1388 mean,
1389 variance,
1390 beta,
1391 gamma,
1392 layerName.c_str());
1393 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1394
1395 RegisterInputSlots(graph, layerIndex, layer);
1396 RegisterOutputSlots(graph, layerIndex, layer);
1397}
1398
mathad01b392e982021-04-07 12:07:30 +01001399void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1400{
1401 CHECK_LAYERS(graph, 0, layerIndex);
1402 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1403 CHECK_LOCATION();
1404 CHECK_VALID_SIZE(inputs.size(), 1);
1405
1406 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1407 CHECK_VALID_SIZE(outputs.size(), 1);
1408
1409 auto layerName = GetLayerName(graph, layerIndex);
1410
1411 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1412
1413 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1414 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1415
1416 RegisterInputSlots(graph, layerIndex, layer);
1417 RegisterOutputSlots(graph, layerIndex, layer);
1418}
1419
Finn Williams85d36712021-01-26 22:30:06 +00001420void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001421{
1422 CHECK_LAYERS(graph, 0, layerIndex);
1423 CHECK_LOCATION();
1424
1425 auto outputs = GetOutputs(graph, layerIndex);
1426 CHECK_VALID_SIZE(outputs.size(), 1);
1427
1428 auto layerName = GetLayerName(graph, layerIndex);
1429
1430 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1431 auto serializerInput = serializerLayer->input();
1432
1433 armnn::ConstTensor input = ToConstTensor(serializerInput);
Cathal Corbett06902652022-04-14 17:55:11 +01001434 IConnectableLayer* layer;
Conor Kennedy76277882019-02-26 08:29:54 +00001435
Cathal Corbett06902652022-04-14 17:55:11 +01001436 // Required for when Constant Layer is used as an inputs to DepthwiseConvolution2d Layer.
1437 // Running a model that was created before weights layout scheme version was added to our flatbuffers
1438 // file ensuring older models can still be read and executed. featureVersion weights layout scheme 1
1439 // indicates a change in the depthwise weights layout within ArmNN from [M,I,H,W] --> [1,H,W,I*M]
1440 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1441 {
1442 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1443 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1444 PermutationVector permutationVector = { 3, 2, 0, 1 };
1445 armnn::TensorInfo weightsInfo = input.GetInfo();
1446 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1447 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1448 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1449 input.GetMemoryArea(), permuteBuffer.get(),
1450 GetDataTypeSize(weightsInfo.GetDataType()));
Conor Kennedy76277882019-02-26 08:29:54 +00001451
Cathal Corbett06902652022-04-14 17:55:11 +01001452 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1453 auto weightsShape = weightsInfo.GetShape();
1454 weightsInfo.SetShape({1,
1455 weightsShape[0],
1456 weightsShape[1],
1457 weightsShape[2]*weightsShape[3]});
Sadik Armagan361679d2022-08-02 09:17:23 +01001458 weightsInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001459
1460 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1461
1462 layer = m_Network->AddConstantLayer(weightsPermuted, layerName.c_str());
1463
1464 layer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1465
1466 RegisterOutputSlots(graph, layerIndex, layer);
1467
1468 return;
1469 }
1470 else
1471 {
1472 layer = m_Network->AddConstantLayer(input, layerName.c_str());
1473
1474 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Sadik Armagan361679d2022-08-02 09:17:23 +01001475 outputTensorInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001476 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1477 }
Conor Kennedy76277882019-02-26 08:29:54 +00001478
1479 RegisterOutputSlots(graph, layerIndex, layer);
1480}
1481
Finn Williams85d36712021-01-26 22:30:06 +00001482void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001483{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001484 CHECK_LAYERS(graph, 0, layerIndex);
1485 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001486 CHECK_LOCATION();
Mike Kellya0766c32019-02-19 17:22:07 +00001487
Derek Lamberti8ddae332019-02-21 16:29:43 +00001488 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001489 CHECK_VALID_SIZE(outputs.size(), 1);
1490
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001491 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1492
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001493 auto layerName = GetLayerName(graph, layerIndex);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001494 auto flatbufferDescriptor = flatBufferLayer->descriptor();
Mike Kellya0766c32019-02-19 17:22:07 +00001495
1496 armnn::Convolution2dDescriptor descriptor;
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001497 descriptor.m_PadLeft = flatbufferDescriptor->padLeft();
1498 descriptor.m_PadRight = flatbufferDescriptor->padRight();
1499 descriptor.m_PadTop = flatbufferDescriptor->padTop();
1500 descriptor.m_PadBottom = flatbufferDescriptor->padBottom();
1501 descriptor.m_StrideX = flatbufferDescriptor->strideX();
1502 descriptor.m_StrideY = flatbufferDescriptor->strideY();;
1503 descriptor.m_DilationX = flatbufferDescriptor->dilationX();
1504 descriptor.m_DilationY = flatbufferDescriptor->dilationY();;
1505 descriptor.m_BiasEnabled = flatbufferDescriptor->biasEnabled();;
1506 descriptor.m_DataLayout = ToDataLayout(flatbufferDescriptor->dataLayout());
Mike Kellya0766c32019-02-19 17:22:07 +00001507
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001508 armnn::IConnectableLayer* layer;
1509 std::vector<unsigned int> ignoreSlots {};
Mike Kellya0766c32019-02-19 17:22:07 +00001510
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001511 armnn::ConstTensor biasTensor;
1512 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1513 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1514 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Mike Kellya0766c32019-02-19 17:22:07 +00001515 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001516 // If the model stores weights and biases as members of the layer we have to read them from there
1517 // but add them to their own ConstantLayer for compatibility
1518 CHECK_VALID_SIZE(inputs.size(), 1);
1519
1520 layer = m_Network->AddConvolution2dLayer(descriptor,
1521 layerName.c_str());
1522
1523 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1524 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1525 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1526 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
1527 ignoreSlots.emplace_back(1u);
1528
1529 if (descriptor.m_BiasEnabled)
1530 {
1531 biasTensor = ToConstTensor(flatBufferLayer->biases());
1532 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1533 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1534 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
1535 ignoreSlots.emplace_back(2u);
1536 }
Mike Kellya0766c32019-02-19 17:22:07 +00001537 }
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001538 else
1539 {
1540 layer = m_Network->AddConvolution2dLayer(descriptor,
1541 layerName.c_str());
1542 uint32_t numInputs = descriptor.GetNumInputs();
1543 CHECK_VALID_SIZE(inputs.size(), numInputs);
1544 }
1545
Mike Kellya0766c32019-02-19 17:22:07 +00001546 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1547 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1548
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001549 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001550 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001551}
1552
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001553void IDeserializer::DeserializerImpl::ParseConvolution3d(GraphPtr graph, unsigned int layerIndex)
1554{
1555 CHECK_LAYERS(graph, 0, layerIndex);
1556 auto inputs = GetInputs(graph, layerIndex);
1557 CHECK_LOCATION();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001558
1559 auto outputs = GetOutputs(graph, layerIndex);
1560 CHECK_VALID_SIZE(outputs.size(), 1);
1561
1562 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1563 auto layerName = GetLayerName(graph, layerIndex);
1564 auto serializerDescriptor = serializerLayer->descriptor();
1565
1566 armnn::Convolution3dDescriptor descriptor;
1567 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1568 descriptor.m_PadRight = serializerDescriptor->padRight();
1569 descriptor.m_PadTop = serializerDescriptor->padTop();
1570 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1571 descriptor.m_PadFront = serializerDescriptor->padFront();
1572 descriptor.m_PadBack = serializerDescriptor->padBack();
1573 descriptor.m_StrideX = serializerDescriptor->strideX();
1574 descriptor.m_StrideY = serializerDescriptor->strideY();
1575 descriptor.m_StrideZ = serializerDescriptor->strideZ();
1576 descriptor.m_DilationX = serializerDescriptor->dilationX();
1577 descriptor.m_DilationY = serializerDescriptor->dilationY();
1578 descriptor.m_DilationZ = serializerDescriptor->dilationZ();
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001579 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001580 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1581
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001582 uint32_t numInputs = descriptor.GetNumInputs();
1583 CHECK_VALID_SIZE(inputs.size(), numInputs);
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001584
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001585 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1586
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001587 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1588 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1589
1590 RegisterInputSlots(graph, layerIndex, layer);
1591 RegisterOutputSlots(graph, layerIndex, layer);
1592}
1593
Finn Williams85d36712021-01-26 22:30:06 +00001594void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001595{
1596 CHECK_LAYERS(graph, 0, layerIndex);
1597
1598 auto inputs = GetInputs(graph, layerIndex);
1599 CHECK_VALID_SIZE(inputs.size(), 1);
1600
1601 auto outputs = GetOutputs(graph, layerIndex);
1602 CHECK_VALID_SIZE(outputs.size(), 1);
1603
1604 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1605
1606 armnn::DepthToSpaceDescriptor descriptor;
1607 descriptor.m_BlockSize = fbDescriptor->blockSize();
1608 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1609
1610 auto layerName = GetLayerName(graph, layerIndex);
1611 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1612
1613 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1614 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1615
1616 RegisterInputSlots(graph, layerIndex, layer);
1617 RegisterOutputSlots(graph, layerIndex, layer);
1618}
1619
Finn Williams85d36712021-01-26 22:30:06 +00001620void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001621{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001622 CHECK_LAYERS(graph, 0, layerIndex);
1623 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001624 CHECK_LOCATION();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001625
Derek Lamberti8ddae332019-02-21 16:29:43 +00001626 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001627 CHECK_VALID_SIZE(outputs.size(), 1);
1628
Derek Lamberti8ddae332019-02-21 16:29:43 +00001629 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001630 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001631 auto serializerDescriptor = serializerLayer->descriptor();
1632
1633 armnn::DepthwiseConvolution2dDescriptor descriptor;
Cathal Corbett06902652022-04-14 17:55:11 +01001634 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1635 descriptor.m_PadRight = serializerDescriptor->padRight();
1636 descriptor.m_PadTop = serializerDescriptor->padTop();
1637 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1638 descriptor.m_StrideX = serializerDescriptor->strideX();
1639 descriptor.m_StrideY = serializerDescriptor->strideY();
1640 descriptor.m_DilationX = serializerDescriptor->dilationX();
1641 descriptor.m_DilationY = serializerDescriptor->dilationY();
1642 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
1643 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001644
Jan Eilers53ef7952021-06-02 12:01:25 +01001645 IConnectableLayer* layer;
Cathal Corbett06902652022-04-14 17:55:11 +01001646 std::vector<unsigned int> ignoreSlots {};
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001647
Cathal Corbett06902652022-04-14 17:55:11 +01001648 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1649 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1650 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001651 {
Cathal Corbett06902652022-04-14 17:55:11 +01001652 CHECK_VALID_SIZE(inputs.size(), 1);
Jan Eilers53ef7952021-06-02 12:01:25 +01001653
Cathal Corbett06902652022-04-14 17:55:11 +01001654 // If the model stores weights and biases as members of the layer we have to read them from there
1655 // but add them to their own ConstantLayer for compatibility
1656 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1657 ignoreSlots.emplace_back(1u);
Jan Eilers53ef7952021-06-02 12:01:25 +01001658
1659 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001660 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001661
1662 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1663 if (descriptor.m_BiasEnabled)
1664 {
1665 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
1666 ignoreSlots.emplace_back(2u);
1667
1668 auto biasLayer = m_Network->AddConstantLayer(biases);
1669 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1670 biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
1671 }
1672
1673 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1674 {
1675 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1676 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1677 PermutationVector permutationVector = { 3, 2, 0, 1 };
1678 armnn::TensorInfo weightsInfo = weights.GetInfo();
1679 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1680 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1681 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1682 weights.GetMemoryArea(), permuteBuffer.get(),
1683 GetDataTypeSize(weightsInfo.GetDataType()));
1684
1685 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1686 auto weightsShape = weightsInfo.GetShape();
1687 weightsInfo.SetShape({1,
1688 weightsShape[0],
1689 weightsShape[1],
1690 weightsShape[2]*weightsShape[3]});
1691
1692 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1693
1694 auto weightsLayer = m_Network->AddConstantLayer(weightsPermuted);
1695 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1696 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1697 }
1698 else
1699 {
1700 auto weightsLayer = m_Network->AddConstantLayer(weights);
1701 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1702 weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
1703 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001704 }
1705 else
1706 {
1707 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001708 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001709 uint32_t numInputs = descriptor.GetNumInputs();
1710 CHECK_VALID_SIZE(inputs.size(), numInputs);
Jan Eilers53ef7952021-06-02 12:01:25 +01001711 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001712
1713 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1714 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1715
Cathal Corbett06902652022-04-14 17:55:11 +01001716 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001717 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001718}
1719
Finn Williams85d36712021-01-26 22:30:06 +00001720void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001721{
1722 CHECK_LAYERS(graph, 0, layerIndex);
1723 auto inputs = GetInputs(graph, layerIndex);
1724 CHECK_LOCATION();
1725 CHECK_VALID_SIZE(inputs.size(), 2);
1726
1727 auto outputs = GetOutputs(graph, layerIndex);
1728 CHECK_VALID_SIZE(outputs.size(), 4);
1729
1730 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1731 auto layerName = GetLayerName(graph, layerIndex);
1732 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1733
1734 armnn::DetectionPostProcessDescriptor descriptor;
1735 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1736 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1737 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1738 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1739 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1740 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1741 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1742 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1743 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1744 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1745 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1746
1747 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1748
1749 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1750 anchors,
1751 layerName.c_str());
1752
1753 for (unsigned int i = 0; i < 4; i++)
1754 {
1755 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1756 }
1757
1758 RegisterInputSlots(graph, layerIndex, layer);
1759 RegisterOutputSlots(graph, layerIndex, layer);
1760}
1761
Finn Williams85d36712021-01-26 22:30:06 +00001762void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001763{
1764 CHECK_LAYERS(graph, 0, layerIndex);
1765 auto inputs = GetInputs(graph, layerIndex);
1766 CHECK_LOCATION();
1767 CHECK_VALID_SIZE(inputs.size(), 2);
1768
1769 auto outputs = GetOutputs(graph, layerIndex);
1770 CHECK_VALID_SIZE(outputs.size(), 1);
1771
1772 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001773 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Div);
1774 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001775
1776 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1777 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1778
1779 RegisterInputSlots(graph, layerIndex, layer);
1780 RegisterOutputSlots(graph, layerIndex, layer);
1781}
1782
Finn Williams85d36712021-01-26 22:30:06 +00001783void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001784{
1785 CHECK_LAYERS(graph, 0, layerIndex);
1786 auto inputs = GetInputs(graph, layerIndex);
1787 CHECK_LOCATION();
1788 CHECK_VALID_SIZE(inputs.size(), 2);
1789
1790 auto outputs = GetOutputs(graph, layerIndex);
1791 CHECK_VALID_SIZE(outputs.size(), 1);
1792
1793 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001794 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1795 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001796
1797 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1798 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1799
1800 RegisterInputSlots(graph, layerIndex, layer);
1801 RegisterOutputSlots(graph, layerIndex, layer);
1802}
1803
Finn Williams85d36712021-01-26 22:30:06 +00001804void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001805{
1806 CHECK_LAYERS(graph, 0, layerIndex);
1807 auto inputs = GetInputs(graph, layerIndex);
1808 CHECK_LOCATION();
1809 CHECK_VALID_SIZE(inputs.size(), 1);
1810
1811 auto outputs = GetOutputs(graph, layerIndex);
1812 CHECK_VALID_SIZE(outputs.size(), 1);
1813
1814 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armaganfd0cae32021-11-08 17:18:31 +00001815 armnn::FillDescriptor descriptor;
1816 descriptor.m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
Keith Davis300ad562020-06-04 16:34:23 +01001817 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1818
1819 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1820 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1821
1822 RegisterInputSlots(graph, layerIndex, layer);
1823 RegisterOutputSlots(graph, layerIndex, layer);
1824}
1825
Finn Williams85d36712021-01-26 22:30:06 +00001826void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001827{
1828 CHECK_LAYERS(graph, 0, layerIndex);
1829 auto inputs = GetInputs(graph, layerIndex);
1830 CHECK_LOCATION();
1831 CHECK_VALID_SIZE(inputs.size(), 2);
1832
1833 auto outputs = GetOutputs(graph, layerIndex);
1834 CHECK_VALID_SIZE(outputs.size(), 1);
1835
1836 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001837 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1838 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001839
1840 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1841 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1842
1843 RegisterInputSlots(graph, layerIndex, layer);
1844 RegisterOutputSlots(graph, layerIndex, layer);
1845}
1846
Finn Williams85d36712021-01-26 22:30:06 +00001847void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001848{
1849 CHECK_LAYERS(graph, 0, layerIndex);
1850
1851 auto inputs = GetInputs(graph, layerIndex);
1852 CHECK_VALID_SIZE(inputs.size(), 1);
1853
1854 auto outputs = GetOutputs(graph, layerIndex);
1855 CHECK_VALID_SIZE(outputs.size(), 1);
1856
1857 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1858 auto fbDescriptor = fbLayer->descriptor();
1859
1860 armnn::InstanceNormalizationDescriptor descriptor;
1861 descriptor.m_Gamma = fbDescriptor->gamma();
1862 descriptor.m_Beta = fbDescriptor->beta();
1863 descriptor.m_Eps = fbDescriptor->eps();
1864 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1865
1866 const std::string layerName = GetLayerName(graph, layerIndex);
1867 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1868
1869 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1870 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1871
1872 RegisterInputSlots(graph, layerIndex, layer);
1873 RegisterOutputSlots(graph, layerIndex, layer);
1874}
1875
Finn Williams85d36712021-01-26 22:30:06 +00001876void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001877{
1878 CHECK_LAYERS(graph, 0, layerIndex);
1879
1880 auto inputs = GetInputs(graph, layerIndex);
1881 CHECK_VALID_SIZE(inputs.size(), 1);
1882
1883 auto outputs = GetOutputs(graph, layerIndex);
1884 CHECK_VALID_SIZE(outputs.size(), 1);
1885 auto outputInfo = ToTensorInfo(outputs[0]);
1886
1887 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1888 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1889
1890 auto layerName = GetLayerName(graph, layerIndex);
1891 armnn::L2NormalizationDescriptor descriptor;
1892 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001893 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001894
1895 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1896 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1897
1898 RegisterInputSlots(graph, layerIndex, layer);
1899 RegisterOutputSlots(graph, layerIndex, layer);
1900}
1901
Finn Williams85d36712021-01-26 22:30:06 +00001902void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001903{
1904 CHECK_LAYERS(graph, 0, layerIndex);
1905 CHECK_LOCATION();
1906
1907 auto inputs = GetInputs(graph, layerIndex);
1908 CHECK_VALID_SIZE(inputs.size(), 2);
1909
1910 auto outputs = GetOutputs(graph, layerIndex);
1911 CHECK_VALID_SIZE(outputs.size(), 1);
1912
1913 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1914 auto fbDescriptor = fbLayer->descriptor();
1915
1916 armnn::LogicalBinaryDescriptor descriptor;
1917 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1918
1919 const std::string& layerName = GetLayerName(graph, layerIndex);
1920 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1921
1922 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1923 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1924
1925 RegisterInputSlots(graph, layerIndex, layer);
1926 RegisterOutputSlots(graph, layerIndex, layer);
1927}
1928
Finn Williams85d36712021-01-26 22:30:06 +00001929void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001930{
1931 CHECK_LAYERS(graph, 0, layerIndex);
1932
Finn Williams85d36712021-01-26 22:30:06 +00001933 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001934 CHECK_VALID_SIZE(inputs.size(), 1);
1935
Finn Williams85d36712021-01-26 22:30:06 +00001936 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001937 CHECK_VALID_SIZE(outputs.size(), 1);
1938
1939 armnn::LogSoftmaxDescriptor descriptor;
1940 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1941 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1942 auto layerName = GetLayerName(graph, layerIndex);
1943
1944 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1945
1946 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1947 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1948
1949 RegisterInputSlots(graph, layerIndex, layer);
1950 RegisterOutputSlots(graph, layerIndex, layer);
1951}
1952
Finn Williams85d36712021-01-26 22:30:06 +00001953void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001954{
1955 CHECK_LAYERS(graph, 0, layerIndex);
1956 auto inputs = GetInputs(graph, layerIndex);
1957 CHECK_LOCATION();
1958 CHECK_VALID_SIZE(inputs.size(), 2);
1959
1960 auto outputs = GetOutputs(graph, layerIndex);
1961 CHECK_VALID_SIZE(outputs.size(), 1);
1962
1963 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001964 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Minimum);
1965 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001966
1967 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1968 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1969
1970 RegisterInputSlots(graph, layerIndex, layer);
1971 RegisterOutputSlots(graph, layerIndex, layer);
1972}
1973
Finn Williams85d36712021-01-26 22:30:06 +00001974void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001975{
1976 CHECK_LAYERS(graph, 0, layerIndex);
1977 auto inputs = GetInputs(graph, layerIndex);
1978 CHECK_LOCATION();
1979 CHECK_VALID_SIZE(inputs.size(), 2);
1980
1981 auto outputs = GetOutputs(graph, layerIndex);
1982 CHECK_VALID_SIZE(outputs.size(), 1);
1983
1984 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001985 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Maximum);
1986 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001987
1988 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1989 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1990
1991 RegisterInputSlots(graph, layerIndex, layer);
1992 RegisterOutputSlots(graph, layerIndex, layer);
1993}
1994
Jim Flynne242f2d2019-05-22 14:24:13 +01001995const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1996 unsigned int layerIndex)
1997{
1998 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1999
2000 switch (layerType)
2001 {
2002 case Layer::Layer_ConcatLayer:
2003 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
2004 case Layer::Layer_MergerLayer:
2005 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
2006 default:
2007 throw armnn::Exception("unknown layer type, should be concat or merger");
2008 }
2009}
Simon Obute51f67772021-09-03 15:50:13 +01002010void IDeserializer::DeserializerImpl::ParseChannelShuffle(GraphPtr graph, unsigned int layerIndex)
2011{
2012 CHECK_LAYERS(graph, 0, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002013
Simon Obute51f67772021-09-03 15:50:13 +01002014 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2015 CHECK_VALID_SIZE(inputs.size(), 1);
2016
2017 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2018 CHECK_VALID_SIZE(outputs.size(), 1);
2019
2020 armnn::ChannelShuffleDescriptor descriptor;
2021 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
2022 descriptor.m_NumGroups =
2023 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
2024
2025 auto layerName = GetLayerName(graph, layerIndex);
2026 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
2027
2028 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2029 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2030
2031 RegisterInputSlots(graph, layerIndex, layer);
2032 RegisterOutputSlots(graph, layerIndex, layer);
2033}
Finn Williams85d36712021-01-26 22:30:06 +00002034void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01002035{
2036 CHECK_LAYERS(graph, 0, layerIndex);
2037 CHECK_LOCATION();
2038
2039 auto inputs = GetInputs(graph, layerIndex);
2040 CHECK_VALID_SIZE(inputs.size(), 2);
2041
2042 auto outputs = GetOutputs(graph, layerIndex);
2043 CHECK_VALID_SIZE(outputs.size(), 1);
2044
2045 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
2046 auto fbDescriptor = fbLayer->descriptor();
2047
2048 armnn::ComparisonDescriptor descriptor;
2049 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
2050
2051 const std::string& layerName = GetLayerName(graph, layerIndex);
2052 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
2053
2054 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2055 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2056
2057 RegisterInputSlots(graph, layerIndex, layer);
2058 RegisterOutputSlots(graph, layerIndex, layer);
2059}
2060
Mike Kelly3ec30772023-03-08 13:47:17 +00002061void IDeserializer::DeserializerImpl::ParseElementwiseBinary(GraphPtr graph, unsigned int layerIndex)
2062{
2063 CHECK_LAYERS(graph, 0, layerIndex);
2064 CHECK_LOCATION();
2065
2066 auto inputs = GetInputs(graph, layerIndex);
2067 CHECK_VALID_SIZE(inputs.size(), 2);
2068
2069 auto outputs = GetOutputs(graph, layerIndex);
2070 CHECK_VALID_SIZE(outputs.size(), 1);
2071
2072 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseBinaryLayer();
2073 auto fbDescriptor = fbLayer->descriptor();
2074
2075 armnn::ElementwiseBinaryDescriptor descriptor;
2076 descriptor.m_Operation = ToElementwiseBinaryOperation(fbDescriptor->operation());
2077
2078 const std::string& layerName = GetLayerName(graph, layerIndex);
2079 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
2080
2081 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2082 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2083
2084 RegisterInputSlots(graph, layerIndex, layer);
2085 RegisterOutputSlots(graph, layerIndex, layer);
2086}
2087
Finn Williams85d36712021-01-26 22:30:06 +00002088void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06002089{
2090 CHECK_LAYERS(graph, 0, layerIndex);
2091 CHECK_LOCATION();
2092
2093 auto inputs = GetInputs(graph, layerIndex);
2094 CHECK_VALID_SIZE(inputs.size(), 1);
2095
2096 auto outputs = GetOutputs(graph, layerIndex);
2097 CHECK_VALID_SIZE(outputs.size(), 1);
2098
2099 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
2100 auto fbDescriptor = fbLayer->descriptor();
2101
2102 armnn::ElementwiseUnaryDescriptor descriptor;
Mike Kelly3ec30772023-03-08 13:47:17 +00002103 descriptor.m_Operation = ToElementwiseUnaryOperation(fbDescriptor->operation());
josh minor4a3c6102020-01-06 16:40:46 -06002104
2105 const std::string& layerName = GetLayerName(graph, layerIndex);
2106 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2107
2108 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2109 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2110
2111 RegisterInputSlots(graph, layerIndex, layer);
2112 RegisterOutputSlots(graph, layerIndex, layer);
2113}
2114
Finn Williams85d36712021-01-26 22:30:06 +00002115void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00002116{
2117 CHECK_LAYERS(graph, 0, layerIndex);
2118 CHECK_LOCATION();
2119
2120 auto outputs = GetOutputs(graph, layerIndex);
2121 CHECK_VALID_SIZE(outputs.size(), 1);
2122
Jim Flynnac25a1b2019-02-28 10:40:49 +00002123 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002124 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
2125 unsigned int numViews = originsDescriptor->numViews();
2126 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002127
2128 // can now check the number of inputs == number of views
2129 auto inputs = GetInputs(graph, layerIndex);
2130 CHECK_VALID_SIZE(inputs.size(), numViews);
2131
2132 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01002133 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002134 for (unsigned int v = 0; v < numViews; ++v)
2135 {
2136 auto originPtr = originsPtr->Get(v);
2137 for (unsigned int d = 0; d < numDimensions; ++d)
2138 {
2139 uint32_t value = originPtr->data()->Get(d);
2140 descriptor.SetViewOriginCoord(v, d, value);
2141 }
2142 }
Jim Flynne242f2d2019-05-22 14:24:13 +01002143 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002144
Jim Flynn906f9462019-05-10 13:55:21 +01002145 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002146 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2147 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2148
2149 RegisterInputSlots(graph, layerIndex, layer);
2150 RegisterOutputSlots(graph, layerIndex, layer);
2151}
2152
Finn Williams85d36712021-01-26 22:30:06 +00002153void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00002154{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002155 CHECK_LAYERS(graph, 0, layerIndex);
2156 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002157 CHECK_LOCATION();
2158 CHECK_VALID_SIZE(inputs.size(), 2);
2159
Derek Lamberti8ddae332019-02-21 16:29:43 +00002160 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002161 CHECK_VALID_SIZE(outputs.size(), 1);
2162
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002163 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00002164 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Mul);
2165 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00002166
2167 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2168 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2169
Derek Lamberti8ddae332019-02-21 16:29:43 +00002170 RegisterInputSlots(graph, layerIndex, layer);
2171 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00002172}
2173
Finn Williams85d36712021-01-26 22:30:06 +00002174void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002175{
2176 CHECK_LAYERS(graph, 0, layerIndex);
2177 CHECK_LOCATION();
2178
2179 auto inputs = GetInputs(graph, layerIndex);
2180 CHECK_VALID_SIZE(inputs.size(), 1);
2181
2182 auto outputs = GetOutputs(graph, layerIndex);
2183 CHECK_VALID_SIZE(outputs.size(), 1);
2184
2185 auto layerName = GetLayerName(graph, layerIndex);
2186
2187 armnn::IConnectableLayer* layer;
2188
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00002189 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002190
2191 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2192 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2193
2194 RegisterInputSlots(graph, layerIndex, layer);
2195 RegisterOutputSlots(graph, layerIndex, layer);
2196}
2197
Finn Williams85d36712021-01-26 22:30:06 +00002198void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002199{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002200 CHECK_LAYERS(graph, 0, layerIndex);
2201 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002202 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002203
Derek Lamberti8ddae332019-02-21 16:29:43 +00002204 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002205 CHECK_VALID_SIZE(outputs.size(), 1);
2206
Derek Lamberti8ddae332019-02-21 16:29:43 +00002207 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002208 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002209 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2210
2211 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
2212 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2213 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002214 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01002215
2216 armnn::IConnectableLayer* layer;
2217 std::vector<unsigned int> ignoreSlots {};
2218
2219 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
2220 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
2221 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002222 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002223 // If the model stores weights and biases as members of the layer we have to read them from there
2224 // but add them to their own ConstantLayer for compatibility
2225 CHECK_VALID_SIZE(inputs.size(), 1);
2226 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2227 layerName.c_str());
2228
2229 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
2230 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2231 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2232 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
2233 ignoreSlots.emplace_back(1u);
2234
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002235 if (fullyConnectedDescriptor.m_BiasEnabled)
2236 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002237 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
2238 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2239 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2240 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
2241 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002242 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002243 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01002244 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002245 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002246 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2247 layerName.c_str());
2248 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
2249 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002250 }
2251
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002252 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2253 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2254
Matthew Sloyan81beae32021-07-13 19:46:11 +01002255 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00002256 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002257}
2258
Finn Williams85d36712021-01-26 22:30:06 +00002259void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002260{
2261 CHECK_LAYERS(graph, 0, layerIndex);
2262
Finn Williams85d36712021-01-26 22:30:06 +00002263 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002264 CHECK_VALID_SIZE(inputs.size(), 1);
2265
Finn Williams85d36712021-01-26 22:30:06 +00002266 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002267 CHECK_VALID_SIZE(outputs.size(), 1);
2268
2269 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2270 auto flatBufferPadList = flatBufferDescriptor->padList();
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002271 auto paddingMode = flatBufferDescriptor->paddingMode();
David Monahan34757812019-06-19 11:47:21 +01002272 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002273
Mike Kelly51b8c312022-05-24 11:34:02 +01002274 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002275 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002276 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2277 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002278 }
2279
2280 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002281 padList.reserve(flatBufferPadList->size() / 2);
2282 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002283 {
2284 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2285 }
2286
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002287 armnn::PadDescriptor descriptor(padList, padValue, ToPaddingMode(paddingMode));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002288
2289 auto layerName = GetLayerName(graph, layerIndex);
2290 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2291
2292 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2293 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2294
2295 RegisterInputSlots(graph, layerIndex, layer);
2296 RegisterOutputSlots(graph, layerIndex, layer);
2297}
2298
Finn Williams85d36712021-01-26 22:30:06 +00002299void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002300{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002301 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002302
2303 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002304 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002305
Derek Lamberti8ddae332019-02-21 16:29:43 +00002306 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002307 CHECK_VALID_SIZE(inputs.size(), 1);
2308
Derek Lamberti8ddae332019-02-21 16:29:43 +00002309 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002310 CHECK_VALID_SIZE(outputs.size(), 1);
2311 auto outputInfo = ToTensorInfo(outputs[0]);
2312
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002313 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01002314 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002315
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002316 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002317 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2318
Derek Lamberti8ddae332019-02-21 16:29:43 +00002319 RegisterInputSlots(graph, layerIndex, layer);
2320 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002321}
2322
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002323armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002324 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002325{
Jan Eilers8eb25602020-03-09 12:13:48 +00002326 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002327 armnn::Pooling2dDescriptor desc;
2328
2329 switch (pooling2dDesc->poolType())
2330 {
2331 case PoolingAlgorithm_Average:
2332 {
2333 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002334 break;
2335 }
2336 case PoolingAlgorithm_Max:
2337 {
2338 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002339 break;
2340 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002341 case PoolingAlgorithm_L2:
2342 {
2343 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2344 break;
2345 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002346 default:
2347 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002348 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002349 }
2350 }
2351
2352 switch (pooling2dDesc->outputShapeRounding())
2353 {
2354 case OutputShapeRounding_Floor:
2355 {
2356 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2357 break;
2358 }
2359 case OutputShapeRounding_Ceiling:
2360 {
2361 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2362 break;
2363 }
2364 default:
2365 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002366 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002367 }
2368 }
2369
2370 switch (pooling2dDesc->paddingMethod())
2371 {
2372 case PaddingMethod_Exclude:
2373 {
2374 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2375 break;
2376 }
2377 case PaddingMethod_IgnoreValue:
2378 {
2379 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2380 break;
2381 }
2382 default:
2383 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002384 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002385 }
2386 }
2387
2388 switch (pooling2dDesc->dataLayout())
2389 {
2390 case DataLayout_NCHW:
2391 {
2392 desc.m_DataLayout = armnn::DataLayout::NCHW;
2393 break;
2394 }
2395 case DataLayout_NHWC:
2396 {
2397 desc.m_DataLayout = armnn::DataLayout::NHWC;
2398 break;
2399 }
2400 default:
2401 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002402 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002403 }
2404 }
2405
2406 desc.m_PadRight = pooling2dDesc->padRight();
2407 desc.m_PadLeft = pooling2dDesc->padLeft();
2408 desc.m_PadBottom = pooling2dDesc->padBottom();
2409 desc.m_PadTop = pooling2dDesc->padTop();
2410 desc.m_StrideX = pooling2dDesc->strideX();
2411 desc.m_StrideY = pooling2dDesc->strideY();
2412 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2413 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2414
2415 return desc;
2416}
2417
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002418armnn::Pooling3dDescriptor IDeserializer::DeserializerImpl::GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDesc,
2419 unsigned int layerIndex)
2420{
2421 IgnoreUnused(layerIndex);
2422 armnn::Pooling3dDescriptor desc;
Finn Williams85d36712021-01-26 22:30:06 +00002423
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002424 switch (pooling3dDesc->poolType())
2425 {
2426 case PoolingAlgorithm_Average:
2427 {
2428 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2429 break;
2430 }
2431 case PoolingAlgorithm_Max:
2432 {
2433 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
2434 break;
2435 }
2436 case PoolingAlgorithm_L2:
2437 {
2438 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2439 break;
2440 }
2441 default:
2442 {
2443 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
2444 }
2445 }
2446
2447 switch (pooling3dDesc->outputShapeRounding())
2448 {
2449 case OutputShapeRounding_Floor:
2450 {
2451 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2452 break;
2453 }
2454 case OutputShapeRounding_Ceiling:
2455 {
2456 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2457 break;
2458 }
2459 default:
2460 {
2461 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
2462 }
2463 }
2464
2465 switch (pooling3dDesc->paddingMethod())
2466 {
2467 case PaddingMethod_Exclude:
2468 {
2469 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2470 break;
2471 }
2472 case PaddingMethod_IgnoreValue:
2473 {
2474 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2475 break;
2476 }
2477 default:
2478 {
2479 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
2480 }
2481 }
2482
2483 switch (pooling3dDesc->dataLayout())
2484 {
2485 case DataLayout_NCDHW:
2486 {
2487 desc.m_DataLayout = armnn::DataLayout::NCDHW;
2488 break;
2489 }
2490 case DataLayout_NDHWC:
2491 {
2492 desc.m_DataLayout = armnn::DataLayout::NDHWC;
2493 break;
2494 }
2495 default:
2496 {
2497 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
2498 }
2499 }
2500
2501 desc.m_PadRight = pooling3dDesc->padRight();
2502 desc.m_PadLeft = pooling3dDesc->padLeft();
2503 desc.m_PadBottom = pooling3dDesc->padBottom();
2504 desc.m_PadTop = pooling3dDesc->padTop();
2505 desc.m_PadFront = pooling3dDesc->padFront();
2506 desc.m_PadBack = pooling3dDesc->padBack();
2507 desc.m_StrideX = pooling3dDesc->strideX();
2508 desc.m_StrideY = pooling3dDesc->strideY();
2509 desc.m_StrideZ = pooling3dDesc->strideZ();
2510 desc.m_PoolWidth = pooling3dDesc->poolWidth();
2511 desc.m_PoolHeight = pooling3dDesc->poolHeight();
2512 desc.m_PoolDepth = pooling3dDesc->poolDepth();
2513
2514 return desc;
2515}
Finn Williams85d36712021-01-26 22:30:06 +00002516
2517void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002518{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002519 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002520
Derek Lamberti8ddae332019-02-21 16:29:43 +00002521 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002522 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002523 CHECK_VALID_SIZE(inputs.size(), 1);
2524
Derek Lamberti8ddae332019-02-21 16:29:43 +00002525 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002526 CHECK_VALID_SIZE(outputs.size(), 1);
2527 auto outputInfo = ToTensorInfo(outputs[0]);
2528
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002529 auto pooling2dDescriptor = GetPooling2dDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002530 auto layerName = GetLayerName(graph, layerIndex);
2531 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002532 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2533
Derek Lamberti8ddae332019-02-21 16:29:43 +00002534 RegisterInputSlots(graph, layerIndex, layer);
2535 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002536}
2537
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002538void IDeserializer::DeserializerImpl::ParsePooling3d(GraphPtr graph, unsigned int layerIndex)
2539{
2540 CHECK_LAYERS(graph, 0, layerIndex);
2541
2542 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2543 auto inputs = GetInputs(graph, layerIndex);
2544 CHECK_VALID_SIZE(inputs.size(), 1);
2545
2546 auto outputs = GetOutputs(graph, layerIndex);
2547 CHECK_VALID_SIZE(outputs.size(), 1);
2548 auto outputInfo = ToTensorInfo(outputs[0]);
2549
2550 auto pooling3dDescriptor = GetPooling3dDescriptor(pooling3dDes, layerIndex);
2551 auto layerName = GetLayerName(graph, layerIndex);
2552 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2553 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2554
2555 RegisterInputSlots(graph, layerIndex, layer);
2556 RegisterOutputSlots(graph, layerIndex, layer);
2557}
2558
Finn Williams85d36712021-01-26 22:30:06 +00002559void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002560{
2561 CHECK_LAYERS(graph, 0, layerIndex);
2562
2563 auto inputs = GetInputs(graph, layerIndex);
2564 CHECK_VALID_SIZE(inputs.size(), 1);
2565
2566 auto outputs = GetOutputs(graph, layerIndex);
2567 CHECK_VALID_SIZE(outputs.size(), 1);
2568 auto outputInfo = ToTensorInfo(outputs[0]);
2569
2570 auto layerName = GetLayerName(graph, layerIndex);
2571 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2572 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2573
2574 RegisterInputSlots(graph, layerIndex, layer);
2575 RegisterOutputSlots(graph, layerIndex, layer);
2576}
2577
Finn Williams85d36712021-01-26 22:30:06 +00002578armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002579 const std::vector<uint32_t>& targetDimsIn)
2580{
2581 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2582 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2583
2584 if (stretchDim != targetDimsIn.end())
2585 {
2586 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2587 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002588 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2589 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002590 }
2591
2592 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002593 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002594 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2595
2596 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2597 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2598 }
2599
2600 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2601
2602 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2603 reshapeInfo.SetShape(outputShape);
2604
2605 return reshapeInfo;
2606}
2607
Finn Williams85d36712021-01-26 22:30:06 +00002608void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002609{
2610 CHECK_LAYERS(graph, 0, layerIndex);
2611
Finn Williams85d36712021-01-26 22:30:06 +00002612 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002613 CHECK_VALID_SIZE(inputs.size(), 1);
2614
Finn Williams85d36712021-01-26 22:30:06 +00002615 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002616 CHECK_VALID_SIZE(outputs.size(), 1);
2617
2618 auto layerName = GetLayerName(graph, layerIndex);
2619 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2620
2621 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2622 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2623
2624 RegisterInputSlots(graph, layerIndex, layer);
2625 RegisterOutputSlots(graph, layerIndex, layer);
2626}
2627
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002628void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2629{
2630 CHECK_LAYERS(graph, 0, layerIndex);
2631 CHECK_LOCATION();
2632
2633 auto inputs = GetInputs(graph, layerIndex);
2634 CHECK_VALID_SIZE(inputs.size(), 1);
2635
2636 auto outputs = GetOutputs(graph, layerIndex);
2637 CHECK_VALID_SIZE(outputs.size(), 1);
2638
2639 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2640 auto fbDescriptor = fbLayer->descriptor();
2641 auto flatBufferAxis = fbDescriptor->axis();
2642
2643 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002644 descriptor.m_KeepDims = fbDescriptor->keepDims();
2645 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2646 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2647
2648 const std::string& layerName = GetLayerName(graph, layerIndex);
2649 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2650
2651 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2652 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2653
2654 RegisterInputSlots(graph, layerIndex, layer);
2655 RegisterOutputSlots(graph, layerIndex, layer);
2656}
2657
Finn Williams85d36712021-01-26 22:30:06 +00002658void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002659{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002660 CHECK_LAYERS(graph, 0, layerIndex);
2661 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002662
Derek Lamberti8ddae332019-02-21 16:29:43 +00002663 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002664 CHECK_VALID_SIZE(outputs.size(), 1);
2665
2666 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2667 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2668
Derek Lamberti8ddae332019-02-21 16:29:43 +00002669 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002670 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2671
Finn Williams85d36712021-01-26 22:30:06 +00002672 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002673 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2674
2675 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2676 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2677
2678 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2679 {
2680 std::stringstream ss;
2681 ss << "New shape defined in reshape parameters "
2682 << reshapeOutputTensorShape
2683 << " does not equal output shape "
2684 << actualOutputTensorInfo.GetShape()
2685 << ": "
2686 << CHECK_LOCATION().AsString();
2687 throw ParseException(ss.str());
2688 }
2689
2690 armnn::ReshapeDescriptor reshapeDesc;
2691 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2692
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002693 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002694 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2695 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2696
Derek Lamberti8ddae332019-02-21 16:29:43 +00002697 RegisterInputSlots(graph, layerIndex, layer);
2698 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002699}
2700
Finn Williams85d36712021-01-26 22:30:06 +00002701void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002702{
2703 CHECK_LAYERS(graph, 0, layerIndex);
2704
Finn Williams85d36712021-01-26 22:30:06 +00002705 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002706 CHECK_VALID_SIZE(inputs.size(), 1);
2707
Finn Williams85d36712021-01-26 22:30:06 +00002708 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002709 CHECK_VALID_SIZE(outputs.size(), 1);
2710
2711 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2712
2713 armnn::ResizeDescriptor descriptor;
2714 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2715 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2716 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2717 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002718 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2719 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002720
2721 auto layerName = GetLayerName(graph, layerIndex);
2722 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2723
2724 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2725 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2726
2727 RegisterInputSlots(graph, layerIndex, layer);
2728 RegisterOutputSlots(graph, layerIndex, layer);
2729}
2730
Jan Eilers1b2654f2021-09-24 15:45:46 +01002731
2732/// @Note The ResizeBiliniar operation was deprecated and removed in favor of the Resize operation.
2733/// This function is kept for backwards compatibility.
Finn Williams85d36712021-01-26 22:30:06 +00002734void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002735{
2736 CHECK_LAYERS(graph, 0, layerIndex);
2737
Finn Williams85d36712021-01-26 22:30:06 +00002738 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002739 CHECK_VALID_SIZE(inputs.size(), 1);
2740
Finn Williams85d36712021-01-26 22:30:06 +00002741 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002742 CHECK_VALID_SIZE(outputs.size(), 1);
2743
2744 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2745
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002746 armnn::ResizeDescriptor descriptor;
2747 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002748 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002749 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2750 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002751 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2752 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002753
2754 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002755 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002756
2757 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2758 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2759
2760 RegisterInputSlots(graph, layerIndex, layer);
2761 RegisterOutputSlots(graph, layerIndex, layer);
2762}
2763
Keith Davis3ae3f972021-05-21 16:33:48 +01002764void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2765{
2766 CHECK_LAYERS(graph, 0, layerIndex);
2767
2768 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2769 CHECK_VALID_SIZE(inputs.size(), 1);
2770
2771 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2772 CHECK_VALID_SIZE(outputs.size(), 1);
2773
2774 auto layerName = GetLayerName(graph, layerIndex);
2775 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2776
2777 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2778 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2779
2780 RegisterInputSlots(graph, layerIndex, layer);
2781 RegisterOutputSlots(graph, layerIndex, layer);
2782}
2783
Finn Williams85d36712021-01-26 22:30:06 +00002784void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002785{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002786 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002787
Finn Williams85d36712021-01-26 22:30:06 +00002788 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002789 CHECK_VALID_SIZE(inputs.size(), 1);
2790
Finn Williams85d36712021-01-26 22:30:06 +00002791 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002792 CHECK_VALID_SIZE(outputs.size(), 1);
2793
2794 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002795 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Sadik Armaganfd0cae32021-11-08 17:18:31 +00002796 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002797 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002798
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002799 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2800
2801 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2802 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2803
Derek Lamberti8ddae332019-02-21 16:29:43 +00002804 RegisterInputSlots(graph, layerIndex, layer);
2805 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002806}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002807
Finn Williams85d36712021-01-26 22:30:06 +00002808void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002809{
2810 CHECK_LAYERS(graph, 0, layerIndex);
2811
Finn Williams85d36712021-01-26 22:30:06 +00002812 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002813 CHECK_VALID_SIZE(inputs.size(), 1);
2814
Finn Williams85d36712021-01-26 22:30:06 +00002815 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002816 CHECK_VALID_SIZE(outputs.size(), 1);
2817
2818 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2819 auto flatBufferPadList = flatBufferDescriptor->padList();
2820 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2821
Mike Kelly51b8c312022-05-24 11:34:02 +01002822 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002823 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002824 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2825 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002826 }
2827
2828 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002829 padList.reserve(flatBufferPadList->size() / 2);
2830 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002831 {
2832 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2833 }
2834
2835 armnn::SpaceToBatchNdDescriptor descriptor;
2836 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2837 descriptor.m_BlockShape =
2838 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2839 descriptor.m_PadList = padList;
2840
2841 auto layerName = GetLayerName(graph, layerIndex);
2842 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2843
2844 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2845 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2846
2847 RegisterInputSlots(graph, layerIndex, layer);
2848 RegisterOutputSlots(graph, layerIndex, layer);
2849}
2850
Finn Williams85d36712021-01-26 22:30:06 +00002851void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002852{
2853 CHECK_LAYERS(graph, 0, layerIndex);
2854
Finn Williams85d36712021-01-26 22:30:06 +00002855 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002856 CHECK_VALID_SIZE(inputs.size(), 1);
2857
Finn Williams85d36712021-01-26 22:30:06 +00002858 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002859 CHECK_VALID_SIZE(outputs.size(), 1);
2860
2861 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2862
2863 armnn::SpaceToDepthDescriptor descriptor;
2864 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2865 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2866
2867 auto layerName = GetLayerName(graph, layerIndex);
2868 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2869
2870 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2871 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2872
2873 RegisterInputSlots(graph, layerIndex, layer);
2874 RegisterOutputSlots(graph, layerIndex, layer);
2875}
2876
Finn Williams85d36712021-01-26 22:30:06 +00002877armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2878 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002879 unsigned int layerIndex)
2880{
Jan Eilers8eb25602020-03-09 12:13:48 +00002881 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002882 armnn::NormalizationDescriptor desc;
2883
2884 switch (normalizationDescriptor->normChannelType())
2885 {
2886 case NormalizationAlgorithmChannel_Across:
2887 {
2888 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2889 break;
2890 }
2891 case NormalizationAlgorithmChannel_Within:
2892 {
2893 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2894 break;
2895 }
2896 default:
2897 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002898 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002899 }
2900 }
2901
2902 switch (normalizationDescriptor->normMethodType())
2903 {
2904 case NormalizationAlgorithmMethod_LocalBrightness:
2905 {
2906 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2907 break;
2908 }
2909 case NormalizationAlgorithmMethod_LocalContrast:
2910 {
2911 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2912 break;
2913 }
2914 default:
2915 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002916 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002917 }
2918 }
2919
2920 switch (normalizationDescriptor->dataLayout())
2921 {
2922 case DataLayout_NCHW:
2923 {
2924 desc.m_DataLayout = armnn::DataLayout::NCHW;
2925 break;
2926 }
2927 case DataLayout_NHWC:
2928 {
2929 desc.m_DataLayout = armnn::DataLayout::NHWC;
2930 break;
2931 }
2932 default:
2933 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002934 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002935 }
2936 }
2937
2938 desc.m_Alpha = normalizationDescriptor->alpha();
2939 desc.m_Beta = normalizationDescriptor->beta();
2940 desc.m_K = normalizationDescriptor->k();
2941 desc.m_NormSize = normalizationDescriptor->normSize();
2942
2943 return desc;
2944}
2945
Finn Williams85d36712021-01-26 22:30:06 +00002946void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002947{
2948 CHECK_LAYERS(graph, 0, layerIndex);
2949
2950 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2951
Finn Williams85d36712021-01-26 22:30:06 +00002952 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002953 CHECK_VALID_SIZE(inputs.size(), 1);
2954
Finn Williams85d36712021-01-26 22:30:06 +00002955 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002956 CHECK_VALID_SIZE(outputs.size(), 1);
2957
2958 auto outputInfo = ToTensorInfo(outputs[0]);
2959
2960 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2961 auto layerName = GetLayerName(graph, layerIndex);
2962
2963 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2964 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2965
2966 RegisterInputSlots(graph, layerIndex, layer);
2967 RegisterOutputSlots(graph, layerIndex, layer);
2968}
2969
Finn Williams85d36712021-01-26 22:30:06 +00002970void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002971{
2972 CHECK_LAYERS(graph, 0, layerIndex);
2973 auto inputs = GetInputs(graph, layerIndex);
2974 CHECK_LOCATION();
2975 CHECK_VALID_SIZE(inputs.size(), 1);
2976
2977 auto outputs = GetOutputs(graph, layerIndex);
2978 CHECK_VALID_SIZE(outputs.size(), 1);
2979
2980 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002981
josh minor4a3c6102020-01-06 16:40:46 -06002982 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2983 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002984 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2985 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2986
2987 RegisterInputSlots(graph, layerIndex, layer);
2988 RegisterOutputSlots(graph, layerIndex, layer);
2989}
2990
Finn Williams85d36712021-01-26 22:30:06 +00002991void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002992{
2993 CHECK_LAYERS(graph, 0, layerIndex);
2994
2995 auto inputs = GetInputs(graph, layerIndex);
2996 CHECK_VALID_SIZE(inputs.size(), 1);
2997
2998 auto outputs = GetOutputs(graph, layerIndex);
2999 CHECK_VALID_SIZE(outputs.size(), 1);
3000
3001 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
3002
3003 auto fbBegin = fbDescriptor->begin();
3004 auto fbSize = fbDescriptor->size();
3005
Mike Kelly51b8c312022-05-24 11:34:02 +01003006 if (fbBegin->size() != fbSize->size())
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003007 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01003008 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
3009 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003010 }
3011
3012 armnn::SliceDescriptor descriptor;
3013 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
3014 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
3015
3016 auto layerName = GetLayerName(graph, layerIndex);
3017 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
3018
3019 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3020 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3021
3022 RegisterInputSlots(graph, layerIndex, layer);
3023 RegisterOutputSlots(graph, layerIndex, layer);
3024}
3025
Finn Williams85d36712021-01-26 22:30:06 +00003026void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003027{
3028 CHECK_LAYERS(graph, 0, layerIndex);
3029
Finn Williams85d36712021-01-26 22:30:06 +00003030 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003031 CHECK_VALID_SIZE(inputs.size(), 1);
3032
Finn Williams85d36712021-01-26 22:30:06 +00003033 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003034 CHECK_VALID_SIZE(outputs.size(), 1);
3035
3036 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
3037
3038 auto flatBufferBegin = flatBufferDescriptor->begin();
3039 auto flatBufferEnd = flatBufferDescriptor->end();
3040 auto flatBufferStride = flatBufferDescriptor->stride();
3041
Mike Kelly51b8c312022-05-24 11:34:02 +01003042 if (!(flatBufferBegin->size() == flatBufferEnd->size() &&
3043 flatBufferBegin->size() == flatBufferStride->size()))
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003044 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01003045 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
3046 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003047 }
3048
3049 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
3050 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
3051 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
3052
3053 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
3054 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
3055 descriptor.m_EndMask = flatBufferDescriptor->endMask();
3056 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
3057 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
3058 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
3059 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
3060
3061 auto layerName = GetLayerName(graph, layerIndex);
3062 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
3063
3064 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3065 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3066
3067 RegisterInputSlots(graph, layerIndex, layer);
3068 RegisterOutputSlots(graph, layerIndex, layer);
3069}
3070
Finn Williams85d36712021-01-26 22:30:06 +00003071void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00003072{
3073 CHECK_LAYERS(graph, 0, layerIndex);
3074 auto inputs = GetInputs(graph, layerIndex);
3075 CHECK_LOCATION();
3076 CHECK_VALID_SIZE(inputs.size(), 2);
3077
3078 auto outputs = GetOutputs(graph, layerIndex);
3079 CHECK_VALID_SIZE(outputs.size(), 1);
3080
3081 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00003082 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Sub);
3083 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Conor Kennedyda1f9752019-03-01 14:37:12 +00003084
3085 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3086 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3087
3088 RegisterInputSlots(graph, layerIndex, layer);
3089 RegisterOutputSlots(graph, layerIndex, layer);
3090}
3091
Finn Williams85d36712021-01-26 22:30:06 +00003092void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003093{
3094 CHECK_LAYERS(graph, 0, layerIndex);
3095
Finn Williams85d36712021-01-26 22:30:06 +00003096 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003097 CHECK_VALID_SIZE(inputs.size(), 2);
3098
Finn Williams85d36712021-01-26 22:30:06 +00003099 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003100 CHECK_VALID_SIZE(outputs.size(), 1);
3101
Teresa Charlin52664732020-06-29 16:27:03 +01003102 armnn::GatherDescriptor descriptor;
3103 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
3104
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003105 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01003106 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003107
3108 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003109 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3110
3111 RegisterInputSlots(graph, layerIndex, layer);
3112 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003113}
3114
Teresa Charlin6966bfa2022-04-25 17:14:50 +01003115void IDeserializer::DeserializerImpl::ParseGatherNd(GraphPtr graph, unsigned int layerIndex)
3116{
3117 CHECK_LAYERS(graph, 0, layerIndex);
3118
3119 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
3120 CHECK_VALID_SIZE(inputs.size(), 2);
3121
3122 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
3123 CHECK_VALID_SIZE(outputs.size(), 1);
3124
3125 auto layerName = GetLayerName(graph, layerIndex);
3126 IConnectableLayer* layer = m_Network->AddGatherNdLayer(layerName.c_str());
3127
3128 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3129 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3130
3131 RegisterInputSlots(graph, layerIndex, layer);
3132 RegisterOutputSlots(graph, layerIndex, layer);
3133}
3134
Finn Williams85d36712021-01-26 22:30:06 +00003135void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003136{
3137 CHECK_LAYERS(graph, 0, layerIndex);
3138
Finn Williams85d36712021-01-26 22:30:06 +00003139 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003140 CHECK_VALID_SIZE(inputs.size(), 1);
3141
Finn Williams85d36712021-01-26 22:30:06 +00003142 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003143 CHECK_VALID_SIZE(outputs.size(), 1);
3144
3145 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
3146 auto flatBufferAxis = flatBufferDescriptor->axis();
3147 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
3148
3149 armnn::MeanDescriptor descriptor;
3150 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
3151 descriptor.m_KeepDims = flatBufferKeepDims;
3152
3153 auto layerName = GetLayerName(graph, layerIndex);
3154 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
3155
3156 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3157 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3158
3159 RegisterInputSlots(graph, layerIndex, layer);
3160 RegisterOutputSlots(graph, layerIndex, layer);
3161}
3162
Finn Williams85d36712021-01-26 22:30:06 +00003163void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00003164{
3165 CHECK_LAYERS(graph, 0, layerIndex);
3166
Finn Williams85d36712021-01-26 22:30:06 +00003167 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003168 CHECK_VALID_SIZE(inputs.size(), 1);
3169
Finn Williams85d36712021-01-26 22:30:06 +00003170 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003171
3172 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
3173 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
3174 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
3175 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
3176 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
3177 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
3178
3179 // Check numViews and numDimensions corresponds to the ones already serialized ...
3180 // numViews == flatBufferViewSizes.size();
3181 // foreach: numDimensions == flatBufferViewSizes[x].size();
3182
3183 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
3184 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3185 {
3186 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
3187 {
3188 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
3189 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
3190 }
3191 }
3192
3193 auto layerName = GetLayerName(graph, layerIndex);
3194 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
3195
3196 // I could have as many outputs as views ...
3197 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3198 {
3199 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
3200 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
3201 }
3202
3203 RegisterInputSlots(graph, layerIndex, layer);
3204 RegisterOutputSlots(graph, layerIndex, layer);
3205}
3206
Finn Williams85d36712021-01-26 22:30:06 +00003207armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00003208{
3209 armnn::LstmDescriptor desc;
3210
3211 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
3212 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
3213 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
3214 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
3215 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
3216 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01003217 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00003218
3219 return desc;
3220}
3221
Finn Williams85d36712021-01-26 22:30:06 +00003222void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00003223{
3224 CHECK_LAYERS(graph, 0, layerIndex);
3225
3226 auto inputs = GetInputs(graph, layerIndex);
3227 CHECK_VALID_SIZE(inputs.size(), 3);
3228
3229 auto outputs = GetOutputs(graph, layerIndex);
3230 CHECK_VALID_SIZE(outputs.size(), 4);
3231
3232 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3233 auto layerName = GetLayerName(graph, layerIndex);
3234 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3235 auto flatBufferInputParams = flatBufferLayer->inputParams();
3236
3237 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
3238
3239 armnn::LstmInputParams lstmInputParams;
3240
3241 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3242 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3243 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3244 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3245 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3246 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3247 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3248 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3249 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3250
3251 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3252 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3253 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3254 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3255 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3256 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3257 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3258 lstmInputParams.m_CellBias = &cellBias;
3259 lstmInputParams.m_OutputGateBias = &outputGateBias;
3260
3261 armnn::ConstTensor inputToInputWeights;
3262 armnn::ConstTensor recurrentToInputWeights;
3263 armnn::ConstTensor cellToInputWeights;
3264 armnn::ConstTensor inputGateBias;
3265 if (!lstmDescriptor.m_CifgEnabled)
3266 {
3267 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3268 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3269 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3270 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3271
3272 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3273 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3274 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3275 lstmInputParams.m_InputGateBias = &inputGateBias;
3276 }
3277
3278 armnn::ConstTensor projectionWeights;
3279 armnn::ConstTensor projectionBias;
3280 if (lstmDescriptor.m_ProjectionEnabled)
3281 {
3282 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3283 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3284
3285 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3286 lstmInputParams.m_ProjectionBias = &projectionBias;
3287 }
3288
3289 armnn::ConstTensor cellToForgetWeights;
3290 armnn::ConstTensor cellToOutputWeights;
3291 if (lstmDescriptor.m_PeepholeEnabled)
3292 {
3293 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3294 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3295
3296 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3297 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3298 }
3299
Jan Eilersf8c62972019-07-17 11:07:49 +01003300 armnn::ConstTensor inputLayerNormWeights;
3301 armnn::ConstTensor forgetLayerNormWeights;
3302 armnn::ConstTensor cellLayerNormWeights;
3303 armnn::ConstTensor outputLayerNormWeights;
3304 if (lstmDescriptor.m_LayerNormEnabled)
3305 {
3306 if (!lstmDescriptor.m_CifgEnabled)
3307 {
3308 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3309 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3310 }
3311 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3312 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3313 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3314
3315 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3316 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3317 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3318 }
3319
Jim Flynn11af3752019-03-19 17:22:29 +00003320 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3321
3322 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3323 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3324
3325 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3326 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3327
3328 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
3329 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
3330
3331 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
3332 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
3333
3334 RegisterInputSlots(graph, layerIndex, layer);
3335 RegisterOutputSlots(graph, layerIndex, layer);
3336}
3337
Finn Williams85d36712021-01-26 22:30:06 +00003338armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01003339{
3340 armnn::QLstmDescriptor desc;
3341
3342 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
3343 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
3344 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
3345 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
3346
3347 desc.m_CellClip = qLstmDescriptor->cellClip();
3348 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
3349
3350 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
3351 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
3352 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
3353 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
3354
3355 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
3356 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
3357
3358 return desc;
3359}
3360
Finn Williams85d36712021-01-26 22:30:06 +00003361void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01003362{
3363 CHECK_LAYERS(graph, 0, layerIndex);
3364
3365 auto inputs = GetInputs(graph, layerIndex);
3366 CHECK_VALID_SIZE(inputs.size(), 3);
3367
3368 auto outputs = GetOutputs(graph, layerIndex);
3369 CHECK_VALID_SIZE(outputs.size(), 3);
3370
3371 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3372 auto layerName = GetLayerName(graph, layerIndex);
3373 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3374 auto flatBufferInputParams = flatBufferLayer->inputParams();
3375
3376 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
3377 armnn::LstmInputParams qLstmInputParams;
3378
3379 // Mandatory params
3380 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3381 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3382 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3383 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3384 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3385 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3386 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3387 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3388 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3389
3390 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3391 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3392 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3393 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3394 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3395 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3396 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
3397 qLstmInputParams.m_CellBias = &cellBias;
3398 qLstmInputParams.m_OutputGateBias = &outputGateBias;
3399
3400 // Optional CIFG params
3401 armnn::ConstTensor inputToInputWeights;
3402 armnn::ConstTensor recurrentToInputWeights;
3403 armnn::ConstTensor inputGateBias;
3404
3405 if (!qLstmDescriptor.m_CifgEnabled)
3406 {
3407 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3408 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3409 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3410
3411 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3412 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3413 qLstmInputParams.m_InputGateBias = &inputGateBias;
3414 }
3415
3416 // Optional projection params
3417 armnn::ConstTensor projectionWeights;
3418 armnn::ConstTensor projectionBias;
3419
3420 if (qLstmDescriptor.m_ProjectionEnabled)
3421 {
3422 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3423 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3424
3425 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3426 qLstmInputParams.m_ProjectionBias = &projectionBias;
3427 }
3428
3429 // Optional peephole params
3430 armnn::ConstTensor cellToInputWeights;
3431 armnn::ConstTensor cellToForgetWeights;
3432 armnn::ConstTensor cellToOutputWeights;
3433
3434 if (qLstmDescriptor.m_PeepholeEnabled)
3435 {
3436 if (!qLstmDescriptor.m_CifgEnabled)
3437 {
3438 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3439 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3440 }
3441
3442 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3443 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3444
3445 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3446 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3447 }
3448
3449 // Optional layer norm params
3450 armnn::ConstTensor inputLayerNormWeights;
3451 armnn::ConstTensor forgetLayerNormWeights;
3452 armnn::ConstTensor cellLayerNormWeights;
3453 armnn::ConstTensor outputLayerNormWeights;
3454
3455 if (qLstmDescriptor.m_LayerNormEnabled)
3456 {
3457 if (!qLstmDescriptor.m_CifgEnabled)
3458 {
3459 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3460 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3461 }
3462
3463 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3464 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3465 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3466
3467 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3468 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3469 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3470 }
3471
3472 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3473
3474 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3475 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3476
3477 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3478 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3479
3480 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3481 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3482
3483 RegisterInputSlots(graph, layerIndex, layer);
3484 RegisterOutputSlots(graph, layerIndex, layer);
3485}
3486
Finn Williams85d36712021-01-26 22:30:06 +00003487void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003488{
3489 CHECK_LAYERS(graph, 0, layerIndex);
3490
3491 auto inputs = GetInputs(graph, layerIndex);
3492 CHECK_VALID_SIZE(inputs.size(), 3);
3493
3494 auto outputs = GetOutputs(graph, layerIndex);
3495 CHECK_VALID_SIZE(outputs.size(), 2);
3496
3497 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3498 auto layerName = GetLayerName(graph, layerIndex);
3499 auto flatBufferInputParams = flatBufferLayer->inputParams();
3500
3501 armnn::QuantizedLstmInputParams lstmInputParams;
3502
3503 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3504 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3505 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3506 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3507 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3508 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3509 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3510 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3511 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3512 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3513 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3514 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3515
3516 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3517 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3518 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3519 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3520 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3521 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3522 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3523 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3524 lstmInputParams.m_InputGateBias = &inputGateBias;
3525 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3526 lstmInputParams.m_CellBias = &cellBias;
3527 lstmInputParams.m_OutputGateBias = &outputGateBias;
3528
3529 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3530
3531 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3532 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3533
3534 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3535 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3536
3537 RegisterInputSlots(graph, layerIndex, layer);
3538 RegisterOutputSlots(graph, layerIndex, layer);
3539}
3540
Finn Williams85d36712021-01-26 22:30:06 +00003541void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003542{
3543 CHECK_LAYERS(graph, 0, layerIndex);
3544
Finn Williams85d36712021-01-26 22:30:06 +00003545 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003546 CHECK_VALID_SIZE(inputs.size(), 1);
3547
Finn Williams85d36712021-01-26 22:30:06 +00003548 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003549 CHECK_VALID_SIZE(outputs.size(), 1);
3550
3551 const std::string layerName = GetLayerName(graph, layerIndex);
3552 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3553
3554 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3555 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3556
3557 RegisterInputSlots(graph, layerIndex, layer);
3558 RegisterOutputSlots(graph, layerIndex, layer);
3559}
3560
Finn Williams85d36712021-01-26 22:30:06 +00003561void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003562{
3563 CHECK_LAYERS(graph, 0, layerIndex);
3564
Finn Williams85d36712021-01-26 22:30:06 +00003565 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003566 CHECK_VALID_SIZE(inputs.size(), 2);
3567
Finn Williams85d36712021-01-26 22:30:06 +00003568 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003569 CHECK_VALID_SIZE(outputs.size(), 1);
3570
3571 const std::string layerName = GetLayerName(graph, layerIndex);
3572 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3573
3574 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3575 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3576
3577 RegisterInputSlots(graph, layerIndex, layer);
3578 RegisterOutputSlots(graph, layerIndex, layer);
3579}
3580
Finn Williams85d36712021-01-26 22:30:06 +00003581void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003582{
3583 CHECK_LAYERS(graph, 0, layerIndex);
3584 auto inputs = GetInputs(graph, layerIndex);
3585 CHECK_LOCATION();
3586 CHECK_VALID_SIZE(inputs.size(), 2);
3587
3588 auto outputs = GetOutputs(graph, layerIndex);
3589 CHECK_VALID_SIZE(outputs.size(), 2);
3590
3591 auto layerName = GetLayerName(graph, layerIndex);
3592 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3593
3594 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3595 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3596
3597 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3598 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3599
3600 RegisterInputSlots(graph, layerIndex, layer);
3601 RegisterOutputSlots(graph, layerIndex, layer);
3602}
3603
Finn Williams85d36712021-01-26 22:30:06 +00003604void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003605{
3606 CHECK_LAYERS(graph, 0, layerIndex);
3607 auto inputs = GetInputs(graph, layerIndex);
3608 CHECK_LOCATION();
3609 CHECK_VALID_SIZE(inputs.size(), 2);
3610
3611 auto outputs = GetOutputs(graph, layerIndex);
3612 CHECK_VALID_SIZE(outputs.size(), 1);
3613
3614 auto layerName = GetLayerName(graph, layerIndex);
3615 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3616
3617 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3618 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3619
3620 RegisterInputSlots(graph, layerIndex, layer);
3621 RegisterOutputSlots(graph, layerIndex, layer);
3622}
3623
Finn Williams85d36712021-01-26 22:30:06 +00003624void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003625{
3626 CHECK_LAYERS(graph, 0, layerIndex);
3627
3628 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3629
3630 auto inputs = GetInputs(graph, layerIndex);
3631 CHECK_VALID_SIZE(inputs.size(), 1);
3632
3633 auto outputs = GetOutputs(graph, layerIndex);
3634 CHECK_VALID_SIZE(outputs.size(), 1);
3635 auto outputInfo = ToTensorInfo(outputs[0]);
3636
3637 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01003638 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003639
3640 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3641 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3642
3643 RegisterInputSlots(graph, layerIndex, layer);
3644 RegisterOutputSlots(graph, layerIndex, layer);
3645}
3646
Finn Williams85d36712021-01-26 22:30:06 +00003647void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003648{
3649 CHECK_LAYERS(graph, 0, layerIndex);
3650
3651 auto inputs = GetInputs(graph, layerIndex);
3652 CHECK_VALID_SIZE(inputs.size(), 1);
3653
3654 auto outputs = GetOutputs(graph, layerIndex);
3655 CHECK_VALID_SIZE(outputs.size(), 1);
3656
3657 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3658 auto layerName = GetLayerName(graph, layerIndex);
3659 auto serializerDescriptor = serializerLayer->descriptor();
3660
3661 armnn::TransposeConvolution2dDescriptor descriptor;
3662 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3663 descriptor.m_PadRight = serializerDescriptor->padRight();
3664 descriptor.m_PadTop = serializerDescriptor->padTop();
3665 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3666 descriptor.m_StrideX = serializerDescriptor->strideX();
3667 descriptor.m_StrideY = serializerDescriptor->strideY();;
3668 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3669 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3670
3671 // weights & biases
3672 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3673 armnn::Optional<armnn::ConstTensor> optionalBiases;
3674 if (descriptor.m_BiasEnabled)
3675 {
3676 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3677 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3678 }
3679
3680 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3681 weights,
3682 optionalBiases,
3683 layerName.c_str());
3684
3685 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3686 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3687
3688 RegisterInputSlots(graph, layerIndex, layer);
3689 RegisterOutputSlots(graph, layerIndex, layer);
3690}
3691
Finn Williams85d36712021-01-26 22:30:06 +00003692void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003693{
3694 CHECK_LAYERS(graph, 0, layerIndex);
3695 auto inputs = GetInputs(graph, layerIndex);
3696
3697 auto outputs = GetOutputs(graph, layerIndex);
3698 CHECK_VALID_SIZE(outputs.size(), 1);
3699
3700 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3701 unsigned int axis = flatBufferDescriptor->axis();
3702 unsigned int numInputs = flatBufferDescriptor->numInputs();
3703 CHECK_VALID_SIZE(inputs.size(), numInputs);
3704
3705 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3706 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3707 flatBufferInputShape->begin() + flatBufferInputShape->size());
3708
3709 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3710 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3711
3712 for (unsigned int i=0; i<inputs.size(); ++i)
3713 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003714 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003715 if (descriptor.m_InputShape != inputShape)
3716 {
3717 std::stringstream ss;
3718 ss << "Shape of input "
3719 << i
3720 << " "
3721 << inputShape
3722 << " does not equal defined input shape "
3723 << descriptor.m_InputShape
3724 << ": "
3725 << CHECK_LOCATION().AsString();
3726 throw ParseException(ss.str());
3727 }
3728 }
3729
3730 auto layerName = GetLayerName(graph, layerIndex);
3731 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3732
3733 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3734 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3735
3736 RegisterInputSlots(graph, layerIndex, layer);
3737 RegisterOutputSlots(graph, layerIndex, layer);
3738}
3739
Finn Williams85d36712021-01-26 22:30:06 +00003740void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003741{
3742 CHECK_LAYERS(graph, 0, layerIndex);
3743
3744 auto inputs = GetInputs(graph, layerIndex);
3745 auto outputs = GetOutputs(graph, layerIndex);
3746
3747 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3748 auto fbDescriptor = fbLayer->descriptor();
3749
3750 armnn::StandInDescriptor descriptor;
3751 descriptor.m_NumInputs = fbDescriptor->numInputs();
3752 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3753
3754 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3755 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3756
3757 const std::string layerName = GetLayerName(graph, layerIndex);
3758 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3759
3760 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3761 {
3762 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3763 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3764 }
3765
3766 RegisterInputSlots(graph, layerIndex, layer);
3767 RegisterOutputSlots(graph, layerIndex, layer);
3768}
3769
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003770armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3771 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3772{
3773 armnn::UnidirectionalSequenceLstmDescriptor desc;
3774
3775 desc.m_ActivationFunc = descriptor->activationFunc();
3776 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3777 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3778 desc.m_CifgEnabled = descriptor->cifgEnabled();
3779 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3780 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3781 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3782 desc.m_TimeMajor = descriptor->timeMajor();
3783
3784 return desc;
3785}
3786
3787void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3788{
3789 CHECK_LAYERS(graph, 0, layerIndex);
3790
3791 auto inputs = GetInputs(graph, layerIndex);
3792 CHECK_VALID_SIZE(inputs.size(), 3);
3793
3794 auto outputs = GetOutputs(graph, layerIndex);
Mike Kelly12994962022-04-21 11:57:09 +01003795 CHECK_VALID_SIZE(outputs.size(), 3);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003796
3797 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3798 auto layerName = GetLayerName(graph, layerIndex);
3799 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3800 auto flatBufferInputParams = flatBufferLayer->inputParams();
3801
3802 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3803
3804 armnn::LstmInputParams lstmInputParams;
3805
3806 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3807 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3808 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3809 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3810 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3811 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3812 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3813 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3814 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3815
3816 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3817 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3818 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3819 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3820 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3821 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3822 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3823 lstmInputParams.m_CellBias = &cellBias;
3824 lstmInputParams.m_OutputGateBias = &outputGateBias;
3825
3826 armnn::ConstTensor inputToInputWeights;
3827 armnn::ConstTensor recurrentToInputWeights;
3828 armnn::ConstTensor cellToInputWeights;
3829 armnn::ConstTensor inputGateBias;
3830 if (!descriptor.m_CifgEnabled)
3831 {
3832 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3833 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3834 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3835
3836 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3837 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3838 lstmInputParams.m_InputGateBias = &inputGateBias;
3839
3840 if (descriptor.m_PeepholeEnabled)
3841 {
3842 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3843 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3844 }
3845 }
3846
3847 armnn::ConstTensor projectionWeights;
3848 armnn::ConstTensor projectionBias;
3849 if (descriptor.m_ProjectionEnabled)
3850 {
3851 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3852 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3853
3854 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3855 lstmInputParams.m_ProjectionBias = &projectionBias;
3856 }
3857
3858 armnn::ConstTensor cellToForgetWeights;
3859 armnn::ConstTensor cellToOutputWeights;
3860 if (descriptor.m_PeepholeEnabled)
3861 {
3862 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3863 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3864
3865 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3866 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3867 }
3868
3869 armnn::ConstTensor inputLayerNormWeights;
3870 armnn::ConstTensor forgetLayerNormWeights;
3871 armnn::ConstTensor cellLayerNormWeights;
3872 armnn::ConstTensor outputLayerNormWeights;
3873 if (descriptor.m_LayerNormEnabled)
3874 {
3875 if (!descriptor.m_CifgEnabled)
3876 {
3877 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3878 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3879 }
3880 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3881 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3882 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3883
3884 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3885 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3886 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3887 }
3888
3889 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3890 lstmInputParams,
3891 layerName.c_str());
3892
Mike Kelly12994962022-04-21 11:57:09 +01003893 armnn::TensorInfo outputTensorInfo0 = ToTensorInfo(outputs[0]);
3894 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo0);
3895
3896 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[1]);
3897 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo1);
3898
3899 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[2]);
3900 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo2);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003901
3902 RegisterInputSlots(graph, layerIndex, layer);
3903 RegisterOutputSlots(graph, layerIndex, layer);
3904}
3905
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003906} // namespace armnnDeserializer