blob: 3bd24bd9ddf8b179242dd9589f3fc9ba94f7f17f [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2017,2019-2023 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Simon Obutedb5804e2022-04-14 15:49:52 +010013#include <armnn/Logging.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000014
Matteo Martincighe011d202019-11-28 11:35:47 +000015#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000016#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010017#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000018#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010019#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000020
Kevin May43a799c2019-02-08 16:31:42 +000021#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022#include <VerificationHelpers.hpp>
23
Colm Donelan5b5c2222020-09-09 12:48:16 +010024#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Finn Williams85d36712021-01-26 22:30:06 +000038IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
39
40IDeserializer::~IDeserializer() = default;
41
42IDeserializer *IDeserializer::CreateRaw()
43{
44 return new IDeserializer();
45}
46
47IDeserializerPtr IDeserializer::Create()
48{
49 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
50}
51
52void IDeserializer::Destroy(IDeserializer *parser)
53{
54 delete parser;
55}
56
57armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
58{
59 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
60}
61
62armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
63{
64 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
65}
66
67BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
68{
69 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
70}
71
72BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
73{
74 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
75}
76
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000077namespace
78{
Kevin May43a799c2019-02-08 16:31:42 +000079
80const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
81
Finn Williams85d36712021-01-26 22:30:06 +000082 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000083 unsigned int layersIndex,
84 const CheckLocation& location)
85{
86 if (graph->layers() == nullptr)
87 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010088 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
89 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
90 "layers:{1} at {2}",
91 location.m_Function,
92 layersIndex,
93 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000094 }
95 else if (layersIndex >= graph->layers()->size())
96 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010097 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
98 location.m_Function,
99 layersIndex,
100 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000101 }
102}
103
Finn Williams85d36712021-01-26 22:30:06 +0000104void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000105 unsigned int layersIndex,
106 unsigned int layerIndex,
107 const CheckLocation& location)
108{
109 if (graph->layers() == nullptr)
110 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100111 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
112 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
113 "layers:{1} at {2}",
114 location.m_Function,
115 layersIndex,
116 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000117 }
118 else if (layersIndex >= graph->layers()->size())
119 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100120 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
121 "layers:{1} at {2}",
122 location.m_Function,
123 layersIndex,
124 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000125 }
126 else if (layerIndex >= graph->layers()[layersIndex].size()
127 && layerIndex != VIRTUAL_LAYER_ID)
128 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100129 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
130 "layers:{1} layer:{2} at {3}",
131 location.m_Function,
132 layersIndex,
133 layerIndex,
134 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000135 }
136}
137
Finn Williams85d36712021-01-26 22:30:06 +0000138void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000139 const CheckLocation& location)
140{
141 if (rawPtr == nullptr)
142 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100143 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
144 location.m_Function,
145 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000146 }
147}
148
Finn Williams85d36712021-01-26 22:30:06 +0000149void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000150 const CheckLocation& location)
151{
152 if (rawPtr == nullptr)
153 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100154 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
155 location.m_Function,
156 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000157 }
158}
159
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000160void CheckConstTensorSize(const unsigned int constTensorSize,
161 const unsigned int tensorSize,
162 const CheckLocation& location)
163{
164 if (constTensorSize != tensorSize)
165 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100166 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
167 location.m_Function,
168 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000169 }
170}
171
Kevin May43a799c2019-02-08 16:31:42 +0000172#define CHECK_TENSOR_PTR(TENSOR_PTR) \
173 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
174
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000175#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
176 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
177
Mike Kellya0766c32019-02-19 17:22:07 +0000178#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
179 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
180
Kevin May43a799c2019-02-08 16:31:42 +0000181#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
182 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
183
184#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
185 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
186}
187
Saoirse Stewart263829c2019-02-19 15:54:14 +0000188bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
189{
190 const unsigned int actualSize = actual.GetNumDimensions();
191 if (actualSize != expected.size())
192 {
193 return false;
194 }
195
196 for (unsigned int i = 0u; i < actualSize; i++)
197 {
198 if (actual[i] != static_cast<unsigned int>(expected[i]))
199 {
200 return false;
201 }
202 }
203
204 return true;
205}
206
Finn Williams85d36712021-01-26 22:30:06 +0000207IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000208: m_Network(nullptr, nullptr),
209//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000210m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000211{
212 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000213 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
214 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
215 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
216 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
Samuel Yapa04f4a12022-08-19 11:14:38 +0100217 m_ParserFunctions[Layer_BatchMatMulLayer] = &DeserializerImpl::ParseBatchMatMul;
Finn Williams85d36712021-01-26 22:30:06 +0000218 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
219 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
mathad01b392e982021-04-07 12:07:30 +0100220 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Simon Obute51f67772021-09-03 15:50:13 +0100221 m_ParserFunctions[Layer_ChannelShuffleLayer] = &DeserializerImpl::ParseChannelShuffle;
222 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
Finn Williams85d36712021-01-26 22:30:06 +0000223 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
224 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
225 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100226 m_ParserFunctions[Layer_Convolution3dLayer] = &DeserializerImpl::ParseConvolution3d;
Finn Williams85d36712021-01-26 22:30:06 +0000227 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
228 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
229 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
230 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
231 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
Mike Kelly3ec30772023-03-08 13:47:17 +0000232 m_ParserFunctions[Layer_ElementwiseBinaryLayer] = &DeserializerImpl::ParseElementwiseBinary;
Finn Williams85d36712021-01-26 22:30:06 +0000233 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
234 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
235 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
236 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
237 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
238 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100239 m_ParserFunctions[Layer_GatherNdLayer] = &DeserializerImpl::ParseGatherNd;
Finn Williams85d36712021-01-26 22:30:06 +0000240 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
241 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
242 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
243 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
244 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
245 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
246 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
247 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
248 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
249 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
250 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
251 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
252 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
253 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
254 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
255 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000256 m_ParserFunctions[Layer_Pooling3dLayer] = &DeserializerImpl::ParsePooling3d;
Finn Williams85d36712021-01-26 22:30:06 +0000257 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
258 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
259 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
260 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
261 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000262 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000263 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
264 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
265 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
266 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100267 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000268 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
269 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
270 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
271 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
272 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
273 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
274 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
275 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
276 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
277 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
278 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
279 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100280 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000281}
282
Finn Williams85d36712021-01-26 22:30:06 +0000283LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000284{
285 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
286
287 switch(layerType)
288 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100289 case Layer::Layer_AbsLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000291 case Layer::Layer_ActivationLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000293 case Layer::Layer_AdditionLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100295 case Layer::Layer_ArgMinMaxLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Samuel Yapa04f4a12022-08-19 11:14:38 +0100297 case Layer::Layer_BatchMatMulLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000299 case Layer::Layer_BatchToSpaceNdLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000301 case Layer::Layer_BatchNormalizationLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100303 case Layer::Layer_CastLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Simon Obute51f67772021-09-03 15:50:13 +0100305 case Layer::Layer_ChannelShuffleLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100307 case Layer::Layer_ComparisonLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100309 case Layer::Layer_ConcatLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000311 case Layer::Layer_ConstantLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000313 case Layer::Layer_Convolution2dLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100315 case Layer::Layer_Convolution3dLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100317 case Layer::Layer_DepthToSpaceLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000319 case Layer::Layer_DepthwiseConvolution2dLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000321 case Layer::Layer_DequantizeLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000323 case Layer::Layer_DetectionPostProcessLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000325 case Layer::Layer_DivisionLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000327 case Layer::Layer_EqualLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Mike Kelly3ec30772023-03-08 13:47:17 +0000329 case Layer::Layer_ElementwiseBinaryLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseBinaryLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000331 case Layer::Layer_ElementwiseUnaryLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000333 case Layer::Layer_FullyConnectedLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100335 case Layer::Layer_FillLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000337 case Layer::Layer_FloorLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000339 case Layer::Layer_GatherLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100341 case Layer::Layer_GatherNdLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherNdLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000343 case Layer::Layer_GreaterLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000345 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000346 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100347 case Layer::Layer_InstanceNormalizationLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000349 case Layer::Layer_L2NormalizationLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000351 case Layer::Layer_LogicalBinaryLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100353 case Layer::Layer_LogSoftmaxLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000355 case Layer::Layer_LstmLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000357 case Layer::Layer_MeanLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000359 case Layer::Layer_MinimumLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000361 case Layer::Layer_MaximumLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100363 case Layer::Layer_MergeLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000365 case Layer::Layer_MergerLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000367 case Layer::Layer_MultiplicationLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000369 case Layer::Layer_NormalizationLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000371 case Layer::Layer_OutputLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000373 case Layer::Layer_PadLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000375 case Layer::Layer_PermuteLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000377 case Layer::Layer_Pooling2dLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000379 case Layer::Layer_Pooling3dLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100381 case Layer::Layer_PreluLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100383 case Layer::Layer_QLstmLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000385 case Layer::Layer_QuantizeLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100387 case Layer::Layer_QuantizedLstmLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100389 case Layer::Layer_RankLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000391 case Layer::Layer_ReduceLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000393 case Layer::Layer_ReshapeLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000395 case Layer::Layer_ResizeBilinearLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100397 case Layer::Layer_ResizeLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000399 case Layer::Layer_RsqrtLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100401 case Layer::Layer_ShapeLayer:
402 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100403 case Layer::Layer_SliceLayer:
404 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000405 case Layer::Layer_SoftmaxLayer:
406 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000407 case Layer::Layer_SpaceToBatchNdLayer:
408 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100409 case Layer::Layer_SpaceToDepthLayer:
410 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000411 case Layer::Layer_SplitterLayer:
412 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100413 case Layer::Layer_StackLayer:
414 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100415 case Layer::Layer_StandInLayer:
416 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000417 case Layer::Layer_StridedSliceLayer:
418 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000419 case Layer::Layer_SubtractionLayer:
420 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100421 case Layer::Layer_SwitchLayer:
422 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100423 case Layer::Layer_TransposeConvolution2dLayer:
424 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000425 case Layer::Layer_TransposeLayer:
426 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100427 case Layer::Layer_UnidirectionalSequenceLstmLayer:
428 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000429 case Layer::Layer_NONE:
430 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100431 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000432 }
433}
434
Finn Williams85d36712021-01-26 22:30:06 +0000435std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000436{
437 auto layer = GetBaseLayer(graph, index);
438 assert(layer);
439 return layer->layerName()->str();
440}
441
Finn Williams85d36712021-01-26 22:30:06 +0000442int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000443{
444 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
445
446 if (layerType == Layer::Layer_InputLayer)
447 {
448 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
449 }
450 else if ( layerType == Layer::Layer_OutputLayer )
451 {
452 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
453 }
454 return 0;
455}
456
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000457armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000458{
459 switch (dataLayout)
460 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000462 return armnn::DataLayout::NHWC;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100463 case armnnSerializer::DataLayout::DataLayout_NDHWC:
464 return armnn::DataLayout::NDHWC;
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100465 case armnnSerializer::DataLayout::DataLayout_NCDHW:
466 return armnn::DataLayout::NCDHW;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000467 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000468 default:
469 return armnn::DataLayout::NCHW;
470 }
471}
472
Mike Kellyaf484012019-02-20 16:53:11 +0000473armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
474{
475 switch (function)
476 {
477 case armnnSerializer::ActivationFunction_Sigmoid:
478 return armnn::ActivationFunction::Sigmoid;
479 case armnnSerializer::ActivationFunction_TanH:
480 return armnn::ActivationFunction::TanH;
481 case armnnSerializer::ActivationFunction_Linear:
482 return armnn::ActivationFunction::Linear;
483 case armnnSerializer::ActivationFunction_ReLu:
484 return armnn::ActivationFunction::ReLu;
485 case armnnSerializer::ActivationFunction_BoundedReLu:
486 return armnn::ActivationFunction::BoundedReLu;
487 case armnnSerializer::ActivationFunction_LeakyReLu:
488 return armnn::ActivationFunction::LeakyReLu;
489 case armnnSerializer::ActivationFunction_Abs:
490 return armnn::ActivationFunction::Abs;
491 case armnnSerializer::ActivationFunction_Sqrt:
492 return armnn::ActivationFunction::Sqrt;
493 case armnnSerializer::ActivationFunction_Square:
494 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000495 case armnnSerializer::ActivationFunction_Elu:
496 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000497 case armnnSerializer::ActivationFunction_HardSwish:
498 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000499 default:
500 return armnn::ActivationFunction::Sigmoid;
501 }
502}
503
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100504armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
505{
506 switch (function)
507 {
508 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
509 return armnn::ArgMinMaxFunction::Max;
510 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
511 default:
512 return armnn::ArgMinMaxFunction::Min;
513 }
514}
515
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100516armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
517{
518 switch (operation)
519 {
520 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
521 return armnn::ComparisonOperation::Equal;
522 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
523 return armnn::ComparisonOperation::Greater;
524 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
525 return armnn::ComparisonOperation::GreaterOrEqual;
526 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
527 return armnn::ComparisonOperation::Less;
528 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
529 return armnn::ComparisonOperation::LessOrEqual;
530 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
531 default:
532 return armnn::ComparisonOperation::NotEqual;
533 }
534}
535
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000536armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
537{
538 switch (operation)
539 {
540 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
541 return armnn::ReduceOperation::Sum;
542 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
543 return armnn::ReduceOperation::Max;
544 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
545 return armnn::ReduceOperation::Mean;
546 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
547 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100548 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
549 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000550 default:
551 return armnn::ReduceOperation::Sum;
552 }
553}
554
James Conroyaba90cd2020-11-06 16:28:18 +0000555armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
556{
557 switch (operation)
558 {
559 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
560 return armnn::LogicalBinaryOperation::LogicalAnd;
561 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
562 return armnn::LogicalBinaryOperation::LogicalOr;
563 default:
564 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
565 }
566}
567
Mike Kelly3ec30772023-03-08 13:47:17 +0000568armnn::BinaryOperation ToElementwiseBinaryOperation(armnnSerializer::BinaryOperation operation)
569{
570 switch (operation)
571 {
572 case armnnSerializer::BinaryOperation::BinaryOperation_Add:
573 return armnn::BinaryOperation::Add;
574 case armnnSerializer::BinaryOperation::BinaryOperation_Div:
575 return armnn::BinaryOperation::Div;
576 case armnnSerializer::BinaryOperation::BinaryOperation_Maximum:
577 return armnn::BinaryOperation::Maximum;
578 case armnnSerializer::BinaryOperation::BinaryOperation_Minimum:
579 return armnn::BinaryOperation::Minimum;
580 case armnnSerializer::BinaryOperation::BinaryOperation_Mul:
581 return armnn::BinaryOperation::Mul;
582 case armnnSerializer::BinaryOperation::BinaryOperation_Sub:
583 return armnn::BinaryOperation::Sub;
584 default:
585 throw armnn::InvalidArgumentException("Binary operation unknown");
586 }
587}
588
589armnn::UnaryOperation ToElementwiseUnaryOperation(armnnSerializer::UnaryOperation operation)
josh minor4a3c6102020-01-06 16:40:46 -0600590{
591 switch (operation)
592 {
593 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
594 return armnn::UnaryOperation::Abs;
Teresa Charlin93f0ad02023-03-23 15:28:02 +0000595 case armnnSerializer::UnaryOperation::UnaryOperation_Ceil:
596 return armnn::UnaryOperation::Ceil;
josh minor4a3c6102020-01-06 16:40:46 -0600597 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
598 return armnn::UnaryOperation::Rsqrt;
599 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
600 return armnn::UnaryOperation::Sqrt;
601 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
602 return armnn::UnaryOperation::Exp;
603 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
604 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000605 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
606 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100607 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
608 return armnn::UnaryOperation::Log;
609 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
610 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600611 default:
612 throw armnn::InvalidArgumentException("Unary operation unknown");
613 }
614}
615
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +0100616armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
617{
618 switch (paddingMode)
619 {
620 case armnnSerializer::PaddingMode::PaddingMode_Reflect:
621 return armnn::PaddingMode::Reflect;
622 case armnnSerializer::PaddingMode::PaddingMode_Symmetric:
623 return armnn::PaddingMode::Symmetric;
624 default:
625 return armnn::PaddingMode::Constant;
626 }
627}
628
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100629armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
630{
631 switch (method)
632 {
633 case armnnSerializer::ResizeMethod_NearestNeighbor:
634 return armnn::ResizeMethod::NearestNeighbor;
635 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000636 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100637 default:
638 return armnn::ResizeMethod::NearestNeighbor;
639 }
640}
641
Finn Williams85d36712021-01-26 22:30:06 +0000642armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000643{
644 armnn::DataType type;
645 CHECK_TENSOR_PTR(tensorPtr);
646
647 switch (tensorPtr->dataType())
648 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000649 case DataType_QAsymmS8:
650 type = armnn::DataType::QAsymmS8;
651 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000652 case DataType_QSymmS8:
653 type = armnn::DataType::QSymmS8;
654 break;
Kevin May43a799c2019-02-08 16:31:42 +0000655 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000656 case DataType_QAsymmU8:
657 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000658 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000659 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000660 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000661 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000662 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000663 case DataType_Signed32:
664 type = armnn::DataType::Signed32;
665 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100666 case DataType_Signed64:
667 type = armnn::DataType::Signed64;
668 break;
Kevin May43a799c2019-02-08 16:31:42 +0000669 case DataType_Float32:
670 type = armnn::DataType::Float32;
671 break;
672 case DataType_Float16:
673 type = armnn::DataType::Float16;
674 break;
675 case DataType_Boolean:
676 type = armnn::DataType::Boolean;
677 break;
678 default:
679 {
680 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100681 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
682 tensorPtr->dataType(),
683 EnumNameDataType(tensorPtr->dataType()),
684 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000685 }
686 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000687
Colm Donelan800b2812021-02-12 12:43:35 +0000688 float quantizationScale = tensorPtr->quantizationScale();
689 int32_t quantizationOffset = tensorPtr->quantizationOffset();
690
Finn Williams2605b232020-06-10 15:53:46 +0100691 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
692 {
Colm Donelan800b2812021-02-12 12:43:35 +0000693 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100694 type,
695 quantizationScale,
696 quantizationOffset);
697 }
Colm Donelan800b2812021-02-12 12:43:35 +0000698 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
699 {
700 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
701 type,
702 quantizationScale,
703 quantizationOffset);
704 return result;
705 }
Kevin May43a799c2019-02-08 16:31:42 +0000706
707 auto dimensions = tensorPtr->dimensions();
708 unsigned int size = dimensions->size();
709 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000710 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
711 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
712 // For backwards compatibility check if the dimensionSpecificity vector is present first.
713 // The default is to have dimensionSpecificity set to all true's anyway.
714 if (tensorPtr->dimensionSpecificity() != nullptr)
715 {
716 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
717 size = dimensionSpecificity->size();
718 for (unsigned int i = 0; i < size; ++i)
719 {
720 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
721 }
722 }
723 // Construct a TensorShape
724 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000725
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000726 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000727 if (quantizationScales)
728 {
729 unsigned int quantizationScalesSize = quantizationScales->size();
730 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
731 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000732 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000733 type,
734 scales,
735 quantizationDim);
736 return result;
737 }
738
Kevin May43a799c2019-02-08 16:31:42 +0000739 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000740 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000741 type,
742 quantizationScale,
743 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000744
Kevin May43a799c2019-02-08 16:31:42 +0000745 return result;
746}
747
Finn Williams85d36712021-01-26 22:30:06 +0000748armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000749{
750 CHECK_CONST_TENSOR_PTR(constTensorPtr);
751 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100752 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000753
754 switch (constTensorPtr->data_type())
755 {
756 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000757 {
758 auto byteData = constTensorPtr->data_as_ByteData()->data();
759 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
760 return armnn::ConstTensor(tensorInfo, byteData->data());
761 }
Mike Kellya0766c32019-02-19 17:22:07 +0000762 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000763 {
764 auto shortData = constTensorPtr->data_as_ShortData()->data();
765 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
766 return armnn::ConstTensor(tensorInfo, shortData->data());
767 }
Mike Kellya0766c32019-02-19 17:22:07 +0000768 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000769 {
770 auto intData = constTensorPtr->data_as_IntData()->data();
771 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
772 return armnn::ConstTensor(tensorInfo, intData->data());
773 }
Mike Kellya0766c32019-02-19 17:22:07 +0000774 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000775 {
776 auto longData = constTensorPtr->data_as_LongData()->data();
777 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
778 return armnn::ConstTensor(tensorInfo, longData->data());
779 }
Mike Kellya0766c32019-02-19 17:22:07 +0000780 default:
781 {
782 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100783 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
784 constTensorPtr->data_type(),
785 EnumNameConstTensorData(constTensorPtr->data_type()),
786 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000787 }
788 }
789}
790
Finn Williams85d36712021-01-26 22:30:06 +0000791TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000792{
793 CHECK_LAYERS(graphPtr, 0, layerIndex);
794 auto layer = GetBaseLayer(graphPtr, layerIndex);
795 const auto& numInputs = layer->inputSlots()->size();
796
797 TensorRawPtrVector result(numInputs);
798
799 for (unsigned int i=0; i<numInputs; ++i)
800 {
801 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
802 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
803 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
804 }
805 return result;
806}
807
Finn Williams85d36712021-01-26 22:30:06 +0000808TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000809{
810 CHECK_LAYERS(graphPtr, 0, layerIndex);
811 auto layer = GetBaseLayer(graphPtr, layerIndex);
812 const auto& numOutputs = layer->outputSlots()->size();
813
814 TensorRawPtrVector result(numOutputs);
815
816 for (unsigned int i=0; i<numOutputs; ++i)
817 {
818 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
819 }
820 return result;
821}
822
Finn Williams85d36712021-01-26 22:30:06 +0000823void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000824{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000825 CHECK_LAYERS(graph, 0, layerIndex);
826 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100827 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
828 "layerName: {1} / {2}",
829 layerIndex,
830 layerName,
831 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000832}
833
Finn Williams85d36712021-01-26 22:30:06 +0000834void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000835{
836 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000837 m_InputBindings.clear();
838 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000839}
840
Kevin May43a799c2019-02-08 16:31:42 +0000841
Finn Williams85d36712021-01-26 22:30:06 +0000842INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000843{
844 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000845 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
846 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000847}
848
Finn Williams85d36712021-01-26 22:30:06 +0000849armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000850{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000851 ResetParser();
Simon Obutedb5804e2022-04-14 15:49:52 +0100852 if (binaryContent.fail()) {
853 ARMNN_LOG(error) << (std::string("Cannot read input"));
854 throw ParseException("Unable to read Input stream data");
855 }
856 binaryContent.seekg(0, std::ios::end);
857 const std::streamoff size = binaryContent.tellg();
858 std::vector<char> content(static_cast<size_t>(size));
859 binaryContent.seekg(0);
860 binaryContent.read(content.data(), static_cast<std::streamsize>(size));
861 GraphPtr graph = LoadGraphFromBinary(reinterpret_cast<uint8_t*>(content.data()), static_cast<size_t>(size));
Derek Lamberti8ddae332019-02-21 16:29:43 +0000862 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000863}
864
Finn Williams85d36712021-01-26 22:30:06 +0000865GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000866{
867 if (binaryContent == nullptr)
868 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100869 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
870 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000871 }
872 flatbuffers::Verifier verifier(binaryContent, len);
873 if (verifier.VerifyBuffer<SerializedGraph>() == false)
874 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100875 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
876 "flatbuffers format. size:{0} {1}",
877 len,
878 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000879 }
880 return GetSerializedGraph(binaryContent);
881}
882
Finn Williams85d36712021-01-26 22:30:06 +0000883INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000884{
885 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100886 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000887 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000888 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000889 {
890 if (layer->layer_type() != Layer_InputLayer &&
891 layer->layer_type() != Layer_OutputLayer)
892 {
893 // lookup and call the parser function
894 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000895 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000896 }
897 ++layerIndex;
898 }
899
Derek Lamberti8ddae332019-02-21 16:29:43 +0000900 SetupInputLayers(graph);
901 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000902
903 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100904 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000905 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100906 Connections& connections = graphIt.second;
907 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000908 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100909 const unsigned int outputSlotIndex = outputIt.first;
910 IOutputSlot* outputSlot = outputIt.second;
911 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000912 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100913 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000914 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100915 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000916 }
Kevin May43a799c2019-02-08 16:31:42 +0000917 }
918 }
919 }
920
921 return std::move(m_Network);
922}
923
Finn Williams85d36712021-01-26 22:30:06 +0000924BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000925 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000926{
Jan Eilers8eb25602020-03-09 12:13:48 +0000927 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000928 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000929 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000930 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000931 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000932 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000933 }
934 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100935 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
936 name,
937 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000938}
939
Finn Williams85d36712021-01-26 22:30:06 +0000940BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000941 const std::string& name) const
942{
Jan Eilers8eb25602020-03-09 12:13:48 +0000943 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000944 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000945 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000946 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000947 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000948 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000949 }
950 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100951 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
952 name,
953 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000954}
955
Finn Williams85d36712021-01-26 22:30:06 +0000956unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000957{
958 for (unsigned int i = 0; i < graph->layers()->size(); i++)
959 {
960 auto layer = graph->layers()->Get(i);
961 if (layer->layer_type() == Layer::Layer_InputLayer)
962 {
963 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
964 if (layerBindingId == targetId)
965 {
966 return i;
967 }
968 }
969 }
970 throw ParseException("Input layer with given layerBindingId not found");
971}
972
Finn Williams85d36712021-01-26 22:30:06 +0000973unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000974{
975 for (unsigned int i = 0; i < graph->layers()->size(); i++)
976 {
977 auto layer = graph->layers()->Get(i);
978 if (layer->layer_type() == Layer::Layer_OutputLayer)
979 {
980 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
981 if (layerBindingId == targetId)
982 {
983 return i;
984 }
985 }
986 }
987 throw ParseException("Output layer with given layerBindingId not found");
988}
989
Finn Williams85d36712021-01-26 22:30:06 +0000990unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100991{
992 for (unsigned int i = 0; i < graph->layers()->size(); i++)
993 {
994 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
995 if (layer->index() == targetIndex)
996 {
997 return i;
998 }
999 }
1000 throw ParseException("Layer with given index not found");
1001}
1002
Finn Williams85d36712021-01-26 22:30:06 +00001003IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +00001004{
Finn Williams85d36712021-01-26 22:30:06 +00001005 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +00001006
1007 if (graph->featureVersions())
1008 {
1009 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +01001010 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +01001011 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +00001012 }
1013
1014 return versions;
1015}
1016
Finn Williams85d36712021-01-26 22:30:06 +00001017void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001018{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001019 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001020 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001021 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001022 m_InputBindings.reserve(numInputs);
1023
1024 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001025 {
Tee Jungaa920c52019-11-05 10:48:25 +00001026 unsigned int inputLayerIndex = 0xFFFFFFFF;
1027 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1028 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001029 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001030 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
1031 }
1032 else
1033 {
1034 const int inputId = graph->inputIds()->Get(i);
1035 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1036 }
1037
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001038 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001039
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001040 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1041 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001042 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +00001043
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001044 IConnectableLayer* inputLayer =
1045 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001046
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001047 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
1048 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1049 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1050
Derek Lamberti8ddae332019-02-21 16:29:43 +00001051 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001052 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001053 }
1054}
1055
Finn Williams85d36712021-01-26 22:30:06 +00001056void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001057{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001058 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001059 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001060 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001061 m_OutputBindings.reserve(numOutputs);
1062
1063 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001064 {
Tee Jungaa920c52019-11-05 10:48:25 +00001065 unsigned int outputLayerIndex = 0xFFFFFFFF;
1066 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1067 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001068 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001069 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1070 }
1071 else
1072 {
1073 const int outputId = graph->outputIds()->Get(i);
1074 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1075 }
1076
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001077 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001078
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001079 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1080 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001081 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001082
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001083 IConnectableLayer* outputLayer =
1084 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001085
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001086 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001087 unsigned int sourceLayerIndex =
1088 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001089 unsigned int outputSlotIndex =
1090 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001091 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001092 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1093 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001094 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001095 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001096 }
1097}
1098
Finn Williams85d36712021-01-26 22:30:06 +00001099void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001100 uint32_t layerIndex,
1101 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001102{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001103 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001104 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001105 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1106 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001107 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001108 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1109 " for layer index: {2} {3}",
1110 baseLayer->outputSlots()->size(),
1111 layer->GetNumOutputSlots(),
1112 layerIndex,
1113 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001114 }
1115
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001116 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001117 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001118 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1119 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1120 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1121 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001122 }
1123}
1124
Finn Williams85d36712021-01-26 22:30:06 +00001125void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001126 uint32_t layerIndex,
1127 armnn::IConnectableLayer* layer,
1128 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001129{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001130 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001131 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001132 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001133
1134 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001135 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001136 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1137 " for layer index:{2} {3}",
1138 baseLayer->inputSlots()->size(),
1139 layer->GetNumInputSlots(),
1140 layerIndex,
1141 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001142 }
1143
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001144 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001145 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001146 // Check if slot should be ignored.
1147 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1148 {
1149 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1150 auto fbConnection = fbInputSlot->connection();
1151 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1152 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1153 }
Kevin May43a799c2019-02-08 16:31:42 +00001154 }
1155}
1156
Finn Williams85d36712021-01-26 22:30:06 +00001157void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001158 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001159 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001160{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001161 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001162 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001163 m_GraphConnections[sourceLayerIndex] = Connections();
1164 }
1165
1166 Connections& connections = m_GraphConnections[sourceLayerIndex];
1167 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1168 {
1169 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001170 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001171 else
1172 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001173 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001174 }
1175}
Kevin May43a799c2019-02-08 16:31:42 +00001176
Finn Williams85d36712021-01-26 22:30:06 +00001177void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001178 uint32_t outputSlotIndex,
1179 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001180{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001181 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1182 {
1183 m_GraphConnections[sourceLayerIndex] = Connections();
1184 }
1185
1186 Connections& connections = m_GraphConnections[sourceLayerIndex];
1187 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1188 {
1189 throw ParseException("Same output slot index processed twice");
1190 }
1191
1192 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001193}
1194
Finn Williams85d36712021-01-26 22:30:06 +00001195void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001196{
1197 CHECK_LAYERS(graph, 0, layerIndex);
1198 auto inputs = GetInputs(graph, layerIndex);
1199 CHECK_LOCATION();
1200 CHECK_VALID_SIZE(inputs.size(), 1);
1201
1202 auto outputs = GetOutputs(graph, layerIndex);
1203 CHECK_VALID_SIZE(outputs.size(), 1);
1204
1205 auto layerName = GetLayerName(graph, layerIndex);
1206
josh minor4a3c6102020-01-06 16:40:46 -06001207 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1208 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001209 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1210 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1211
1212 RegisterInputSlots(graph, layerIndex, layer);
1213 RegisterOutputSlots(graph, layerIndex, layer);
1214}
1215
Finn Williams85d36712021-01-26 22:30:06 +00001216void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001217{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001218 CHECK_LAYERS(graph, 0, layerIndex);
1219 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001220 CHECK_LOCATION();
1221 CHECK_VALID_SIZE(inputs.size(), 1);
1222
Derek Lamberti8ddae332019-02-21 16:29:43 +00001223 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001224 CHECK_VALID_SIZE(outputs.size(), 1);
1225
Derek Lamberti8ddae332019-02-21 16:29:43 +00001226 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001227 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001228 auto serializerDescriptor = serializerLayer->descriptor();
1229
1230 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001231 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001232 descriptor.m_A = serializerDescriptor->a();
1233 descriptor.m_B = serializerDescriptor->b();
1234
1235 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1236 layerName.c_str());
1237 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1238 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1239
Derek Lamberti8ddae332019-02-21 16:29:43 +00001240 RegisterInputSlots(graph, layerIndex, layer);
1241 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001242}
1243
Finn Williams85d36712021-01-26 22:30:06 +00001244void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001245{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001246 CHECK_LAYERS(graph, 0, layerIndex);
1247 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001248 CHECK_LOCATION();
1249 CHECK_VALID_SIZE(inputs.size(), 2);
1250
Derek Lamberti8ddae332019-02-21 16:29:43 +00001251 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001252 CHECK_VALID_SIZE(outputs.size(), 1);
1253
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001254 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001255 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Add);
1256 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001257
1258 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1259 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1260
Derek Lamberti8ddae332019-02-21 16:29:43 +00001261 RegisterInputSlots(graph, layerIndex, layer);
1262 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001263}
1264
Finn Williams85d36712021-01-26 22:30:06 +00001265void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001266{
1267 CHECK_LAYERS(graph, 0, layerIndex);
1268 auto inputs = GetInputs(graph, layerIndex);
1269 CHECK_LOCATION();
1270 CHECK_VALID_SIZE(inputs.size(), 1);
1271
1272 auto outputs = GetOutputs(graph, layerIndex);
1273 CHECK_VALID_SIZE(outputs.size(), 1);
1274
1275 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1276 auto serializerDescriptor = serializerLayer->descriptor();
1277
1278 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001279 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001280 descriptor.m_Axis = serializerDescriptor->axis();
1281 auto layerName = GetLayerName(graph, layerIndex);
1282 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1283
1284 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1285 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1286
1287 RegisterInputSlots(graph, layerIndex, layer);
1288 RegisterOutputSlots(graph, layerIndex, layer);
1289}
1290
Samuel Yapa04f4a12022-08-19 11:14:38 +01001291void IDeserializer::DeserializerImpl::ParseBatchMatMul(GraphPtr graph, unsigned int layerIndex)
1292{
1293 CHECK_LAYERS(graph, 0, layerIndex);
1294
1295 auto inputs = GetInputs(graph, layerIndex);
1296 CHECK_LOCATION();
1297 CHECK_VALID_SIZE(inputs.size(), 2);
1298
1299 auto outputs = GetOutputs(graph, layerIndex);
1300 CHECK_VALID_SIZE(outputs.size(), 1);
1301
1302 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer();
1303 auto serializerDescriptor = serializerLayer->descriptor();
1304
1305 armnn::BatchMatMulDescriptor descriptor(serializerDescriptor->transposeX(),
1306 serializerDescriptor->transposeY(),
1307 serializerDescriptor->adjointX(),
1308 serializerDescriptor->adjointY(),
1309 ToDataLayout(serializerDescriptor->dataLayoutX()),
1310 ToDataLayout(serializerDescriptor->dataLayoutY()));
1311
1312 auto layerName = GetLayerName(graph, layerIndex);
1313 IConnectableLayer* layer = m_Network->AddBatchMatMulLayer(descriptor, layerName.c_str());
1314
1315 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1316 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1317
1318 RegisterInputSlots(graph, layerIndex, layer);
1319 RegisterOutputSlots(graph, layerIndex, layer);
1320}
1321
Finn Williams85d36712021-01-26 22:30:06 +00001322void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001323{
1324 CHECK_LAYERS(graph, 0, layerIndex);
1325
Finn Williams85d36712021-01-26 22:30:06 +00001326 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001327 CHECK_VALID_SIZE(inputs.size(), 1);
1328
Finn Williams85d36712021-01-26 22:30:06 +00001329 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001330 CHECK_VALID_SIZE(outputs.size(), 1);
1331
1332 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1333 auto flatBufferCrops = flatBufferDescriptor->crops();
1334 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1335
Mike Kelly51b8c312022-05-24 11:34:02 +01001336 if (flatBufferCrops->size() % 2 != 0)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001337 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001338 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001339 }
1340
1341 std::vector<std::pair<unsigned int, unsigned int>> crops;
Mike Kelly51b8c312022-05-24 11:34:02 +01001342 crops.reserve(flatBufferCrops->size() / 2);
1343 for (unsigned int i = 0; i < flatBufferCrops->size() - 1; i += 2)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001344 {
1345 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1346 }
1347
1348 armnn::BatchToSpaceNdDescriptor descriptor;
1349 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1350 descriptor.m_BlockShape =
1351 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1352 descriptor.m_Crops = crops;
1353
1354 auto layerName = GetLayerName(graph, layerIndex);
1355 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1356
1357 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1358 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1359
1360 RegisterInputSlots(graph, layerIndex, layer);
1361 RegisterOutputSlots(graph, layerIndex, layer);
1362}
1363
Finn Williams85d36712021-01-26 22:30:06 +00001364void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001365{
1366 CHECK_LAYERS(graph, 0, layerIndex);
1367
1368 auto inputs = GetInputs(graph, layerIndex);
1369 CHECK_VALID_SIZE(inputs.size(), 1);
1370
1371 auto outputs = GetOutputs(graph, layerIndex);
1372 CHECK_VALID_SIZE(outputs.size(), 1);
1373 auto outputInfo = ToTensorInfo(outputs[0]);
1374
ruoyan015c7ab052019-03-04 14:48:02 +00001375 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001376
1377 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1378 auto serializerDescriptor = serializerLayer->descriptor();
1379
1380 armnn::BatchNormalizationDescriptor descriptor;
1381 descriptor.m_Eps = serializerDescriptor->eps();
1382 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1383
1384 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1385 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1386 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1387 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1388
1389 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1390 mean,
1391 variance,
1392 beta,
1393 gamma,
1394 layerName.c_str());
1395 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1396
1397 RegisterInputSlots(graph, layerIndex, layer);
1398 RegisterOutputSlots(graph, layerIndex, layer);
1399}
1400
mathad01b392e982021-04-07 12:07:30 +01001401void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1402{
1403 CHECK_LAYERS(graph, 0, layerIndex);
1404 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1405 CHECK_LOCATION();
1406 CHECK_VALID_SIZE(inputs.size(), 1);
1407
1408 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1409 CHECK_VALID_SIZE(outputs.size(), 1);
1410
1411 auto layerName = GetLayerName(graph, layerIndex);
1412
1413 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1414
1415 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1416 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1417
1418 RegisterInputSlots(graph, layerIndex, layer);
1419 RegisterOutputSlots(graph, layerIndex, layer);
1420}
1421
Finn Williams85d36712021-01-26 22:30:06 +00001422void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001423{
1424 CHECK_LAYERS(graph, 0, layerIndex);
1425 CHECK_LOCATION();
1426
1427 auto outputs = GetOutputs(graph, layerIndex);
1428 CHECK_VALID_SIZE(outputs.size(), 1);
1429
1430 auto layerName = GetLayerName(graph, layerIndex);
1431
1432 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1433 auto serializerInput = serializerLayer->input();
1434
1435 armnn::ConstTensor input = ToConstTensor(serializerInput);
Cathal Corbett06902652022-04-14 17:55:11 +01001436 IConnectableLayer* layer;
Conor Kennedy76277882019-02-26 08:29:54 +00001437
Cathal Corbett06902652022-04-14 17:55:11 +01001438 // Required for when Constant Layer is used as an inputs to DepthwiseConvolution2d Layer.
1439 // Running a model that was created before weights layout scheme version was added to our flatbuffers
1440 // file ensuring older models can still be read and executed. featureVersion weights layout scheme 1
1441 // indicates a change in the depthwise weights layout within ArmNN from [M,I,H,W] --> [1,H,W,I*M]
1442 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1443 {
1444 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1445 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1446 PermutationVector permutationVector = { 3, 2, 0, 1 };
1447 armnn::TensorInfo weightsInfo = input.GetInfo();
1448 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1449 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1450 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1451 input.GetMemoryArea(), permuteBuffer.get(),
1452 GetDataTypeSize(weightsInfo.GetDataType()));
Conor Kennedy76277882019-02-26 08:29:54 +00001453
Cathal Corbett06902652022-04-14 17:55:11 +01001454 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1455 auto weightsShape = weightsInfo.GetShape();
1456 weightsInfo.SetShape({1,
1457 weightsShape[0],
1458 weightsShape[1],
1459 weightsShape[2]*weightsShape[3]});
Sadik Armagan361679d2022-08-02 09:17:23 +01001460 weightsInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001461
1462 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1463
1464 layer = m_Network->AddConstantLayer(weightsPermuted, layerName.c_str());
1465
1466 layer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1467
1468 RegisterOutputSlots(graph, layerIndex, layer);
1469
1470 return;
1471 }
1472 else
1473 {
1474 layer = m_Network->AddConstantLayer(input, layerName.c_str());
1475
1476 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Sadik Armagan361679d2022-08-02 09:17:23 +01001477 outputTensorInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001478 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1479 }
Conor Kennedy76277882019-02-26 08:29:54 +00001480
1481 RegisterOutputSlots(graph, layerIndex, layer);
1482}
1483
Finn Williams85d36712021-01-26 22:30:06 +00001484void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001485{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001486 CHECK_LAYERS(graph, 0, layerIndex);
1487 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001488 CHECK_LOCATION();
Mike Kellya0766c32019-02-19 17:22:07 +00001489
Derek Lamberti8ddae332019-02-21 16:29:43 +00001490 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001491 CHECK_VALID_SIZE(outputs.size(), 1);
1492
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001493 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1494
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001495 auto layerName = GetLayerName(graph, layerIndex);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001496 auto flatbufferDescriptor = flatBufferLayer->descriptor();
Mike Kellya0766c32019-02-19 17:22:07 +00001497
1498 armnn::Convolution2dDescriptor descriptor;
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001499 descriptor.m_PadLeft = flatbufferDescriptor->padLeft();
1500 descriptor.m_PadRight = flatbufferDescriptor->padRight();
1501 descriptor.m_PadTop = flatbufferDescriptor->padTop();
1502 descriptor.m_PadBottom = flatbufferDescriptor->padBottom();
1503 descriptor.m_StrideX = flatbufferDescriptor->strideX();
1504 descriptor.m_StrideY = flatbufferDescriptor->strideY();;
1505 descriptor.m_DilationX = flatbufferDescriptor->dilationX();
1506 descriptor.m_DilationY = flatbufferDescriptor->dilationY();;
1507 descriptor.m_BiasEnabled = flatbufferDescriptor->biasEnabled();;
1508 descriptor.m_DataLayout = ToDataLayout(flatbufferDescriptor->dataLayout());
Mike Kellya0766c32019-02-19 17:22:07 +00001509
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001510 armnn::IConnectableLayer* layer;
1511 std::vector<unsigned int> ignoreSlots {};
Mike Kellya0766c32019-02-19 17:22:07 +00001512
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001513 armnn::ConstTensor biasTensor;
1514 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1515 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1516 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Mike Kellya0766c32019-02-19 17:22:07 +00001517 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001518 // If the model stores weights and biases as members of the layer we have to read them from there
1519 // but add them to their own ConstantLayer for compatibility
1520 CHECK_VALID_SIZE(inputs.size(), 1);
1521
1522 layer = m_Network->AddConvolution2dLayer(descriptor,
1523 layerName.c_str());
1524
1525 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1526 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1527 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1528 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
1529 ignoreSlots.emplace_back(1u);
1530
1531 if (descriptor.m_BiasEnabled)
1532 {
1533 biasTensor = ToConstTensor(flatBufferLayer->biases());
1534 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1535 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1536 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
1537 ignoreSlots.emplace_back(2u);
1538 }
Mike Kellya0766c32019-02-19 17:22:07 +00001539 }
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001540 else
1541 {
1542 layer = m_Network->AddConvolution2dLayer(descriptor,
1543 layerName.c_str());
1544 uint32_t numInputs = descriptor.GetNumInputs();
1545 CHECK_VALID_SIZE(inputs.size(), numInputs);
1546 }
1547
Mike Kellya0766c32019-02-19 17:22:07 +00001548 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1549 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1550
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001551 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001552 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001553}
1554
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001555void IDeserializer::DeserializerImpl::ParseConvolution3d(GraphPtr graph, unsigned int layerIndex)
1556{
1557 CHECK_LAYERS(graph, 0, layerIndex);
1558 auto inputs = GetInputs(graph, layerIndex);
1559 CHECK_LOCATION();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001560
1561 auto outputs = GetOutputs(graph, layerIndex);
1562 CHECK_VALID_SIZE(outputs.size(), 1);
1563
1564 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1565 auto layerName = GetLayerName(graph, layerIndex);
1566 auto serializerDescriptor = serializerLayer->descriptor();
1567
1568 armnn::Convolution3dDescriptor descriptor;
1569 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1570 descriptor.m_PadRight = serializerDescriptor->padRight();
1571 descriptor.m_PadTop = serializerDescriptor->padTop();
1572 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1573 descriptor.m_PadFront = serializerDescriptor->padFront();
1574 descriptor.m_PadBack = serializerDescriptor->padBack();
1575 descriptor.m_StrideX = serializerDescriptor->strideX();
1576 descriptor.m_StrideY = serializerDescriptor->strideY();
1577 descriptor.m_StrideZ = serializerDescriptor->strideZ();
1578 descriptor.m_DilationX = serializerDescriptor->dilationX();
1579 descriptor.m_DilationY = serializerDescriptor->dilationY();
1580 descriptor.m_DilationZ = serializerDescriptor->dilationZ();
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001581 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001582 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1583
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001584 uint32_t numInputs = descriptor.GetNumInputs();
1585 CHECK_VALID_SIZE(inputs.size(), numInputs);
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001586
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001587 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1588
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001589 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1590 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1591
1592 RegisterInputSlots(graph, layerIndex, layer);
1593 RegisterOutputSlots(graph, layerIndex, layer);
1594}
1595
Finn Williams85d36712021-01-26 22:30:06 +00001596void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001597{
1598 CHECK_LAYERS(graph, 0, layerIndex);
1599
1600 auto inputs = GetInputs(graph, layerIndex);
1601 CHECK_VALID_SIZE(inputs.size(), 1);
1602
1603 auto outputs = GetOutputs(graph, layerIndex);
1604 CHECK_VALID_SIZE(outputs.size(), 1);
1605
1606 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1607
1608 armnn::DepthToSpaceDescriptor descriptor;
1609 descriptor.m_BlockSize = fbDescriptor->blockSize();
1610 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1611
1612 auto layerName = GetLayerName(graph, layerIndex);
1613 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1614
1615 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1616 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1617
1618 RegisterInputSlots(graph, layerIndex, layer);
1619 RegisterOutputSlots(graph, layerIndex, layer);
1620}
1621
Finn Williams85d36712021-01-26 22:30:06 +00001622void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001623{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001624 CHECK_LAYERS(graph, 0, layerIndex);
1625 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001626 CHECK_LOCATION();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001627
Derek Lamberti8ddae332019-02-21 16:29:43 +00001628 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001629 CHECK_VALID_SIZE(outputs.size(), 1);
1630
Derek Lamberti8ddae332019-02-21 16:29:43 +00001631 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001632 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001633 auto serializerDescriptor = serializerLayer->descriptor();
1634
1635 armnn::DepthwiseConvolution2dDescriptor descriptor;
Cathal Corbett06902652022-04-14 17:55:11 +01001636 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1637 descriptor.m_PadRight = serializerDescriptor->padRight();
1638 descriptor.m_PadTop = serializerDescriptor->padTop();
1639 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1640 descriptor.m_StrideX = serializerDescriptor->strideX();
1641 descriptor.m_StrideY = serializerDescriptor->strideY();
1642 descriptor.m_DilationX = serializerDescriptor->dilationX();
1643 descriptor.m_DilationY = serializerDescriptor->dilationY();
1644 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
1645 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001646
Jan Eilers53ef7952021-06-02 12:01:25 +01001647 IConnectableLayer* layer;
Cathal Corbett06902652022-04-14 17:55:11 +01001648 std::vector<unsigned int> ignoreSlots {};
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001649
Cathal Corbett06902652022-04-14 17:55:11 +01001650 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1651 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1652 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001653 {
Cathal Corbett06902652022-04-14 17:55:11 +01001654 CHECK_VALID_SIZE(inputs.size(), 1);
Jan Eilers53ef7952021-06-02 12:01:25 +01001655
Cathal Corbett06902652022-04-14 17:55:11 +01001656 // If the model stores weights and biases as members of the layer we have to read them from there
1657 // but add them to their own ConstantLayer for compatibility
1658 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1659 ignoreSlots.emplace_back(1u);
Jan Eilers53ef7952021-06-02 12:01:25 +01001660
1661 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001662 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001663
1664 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1665 if (descriptor.m_BiasEnabled)
1666 {
1667 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
1668 ignoreSlots.emplace_back(2u);
1669
1670 auto biasLayer = m_Network->AddConstantLayer(biases);
1671 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1672 biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
1673 }
1674
1675 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1676 {
1677 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1678 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1679 PermutationVector permutationVector = { 3, 2, 0, 1 };
1680 armnn::TensorInfo weightsInfo = weights.GetInfo();
1681 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1682 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1683 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1684 weights.GetMemoryArea(), permuteBuffer.get(),
1685 GetDataTypeSize(weightsInfo.GetDataType()));
1686
1687 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1688 auto weightsShape = weightsInfo.GetShape();
1689 weightsInfo.SetShape({1,
1690 weightsShape[0],
1691 weightsShape[1],
1692 weightsShape[2]*weightsShape[3]});
1693
1694 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1695
1696 auto weightsLayer = m_Network->AddConstantLayer(weightsPermuted);
1697 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1698 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1699 }
1700 else
1701 {
1702 auto weightsLayer = m_Network->AddConstantLayer(weights);
1703 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1704 weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
1705 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001706 }
1707 else
1708 {
1709 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001710 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001711 uint32_t numInputs = descriptor.GetNumInputs();
1712 CHECK_VALID_SIZE(inputs.size(), numInputs);
Jan Eilers53ef7952021-06-02 12:01:25 +01001713 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001714
1715 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1716 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1717
Cathal Corbett06902652022-04-14 17:55:11 +01001718 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001719 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001720}
1721
Finn Williams85d36712021-01-26 22:30:06 +00001722void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001723{
1724 CHECK_LAYERS(graph, 0, layerIndex);
1725 auto inputs = GetInputs(graph, layerIndex);
1726 CHECK_LOCATION();
1727 CHECK_VALID_SIZE(inputs.size(), 2);
1728
1729 auto outputs = GetOutputs(graph, layerIndex);
1730 CHECK_VALID_SIZE(outputs.size(), 4);
1731
1732 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1733 auto layerName = GetLayerName(graph, layerIndex);
1734 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1735
1736 armnn::DetectionPostProcessDescriptor descriptor;
1737 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1738 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1739 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1740 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1741 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1742 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1743 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1744 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1745 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1746 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1747 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1748
1749 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1750
1751 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1752 anchors,
1753 layerName.c_str());
1754
1755 for (unsigned int i = 0; i < 4; i++)
1756 {
1757 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1758 }
1759
1760 RegisterInputSlots(graph, layerIndex, layer);
1761 RegisterOutputSlots(graph, layerIndex, layer);
1762}
1763
Finn Williams85d36712021-01-26 22:30:06 +00001764void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001765{
1766 CHECK_LAYERS(graph, 0, layerIndex);
1767 auto inputs = GetInputs(graph, layerIndex);
1768 CHECK_LOCATION();
1769 CHECK_VALID_SIZE(inputs.size(), 2);
1770
1771 auto outputs = GetOutputs(graph, layerIndex);
1772 CHECK_VALID_SIZE(outputs.size(), 1);
1773
1774 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001775 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Div);
1776 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001777
1778 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1779 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1780
1781 RegisterInputSlots(graph, layerIndex, layer);
1782 RegisterOutputSlots(graph, layerIndex, layer);
1783}
1784
Finn Williams85d36712021-01-26 22:30:06 +00001785void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001786{
1787 CHECK_LAYERS(graph, 0, layerIndex);
1788 auto inputs = GetInputs(graph, layerIndex);
1789 CHECK_LOCATION();
1790 CHECK_VALID_SIZE(inputs.size(), 2);
1791
1792 auto outputs = GetOutputs(graph, layerIndex);
1793 CHECK_VALID_SIZE(outputs.size(), 1);
1794
1795 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001796 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1797 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001798
1799 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1800 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1801
1802 RegisterInputSlots(graph, layerIndex, layer);
1803 RegisterOutputSlots(graph, layerIndex, layer);
1804}
1805
Finn Williams85d36712021-01-26 22:30:06 +00001806void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001807{
1808 CHECK_LAYERS(graph, 0, layerIndex);
1809 auto inputs = GetInputs(graph, layerIndex);
1810 CHECK_LOCATION();
1811 CHECK_VALID_SIZE(inputs.size(), 1);
1812
1813 auto outputs = GetOutputs(graph, layerIndex);
1814 CHECK_VALID_SIZE(outputs.size(), 1);
1815
1816 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armaganfd0cae32021-11-08 17:18:31 +00001817 armnn::FillDescriptor descriptor;
1818 descriptor.m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
Keith Davis300ad562020-06-04 16:34:23 +01001819 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1820
1821 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1822 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1823
1824 RegisterInputSlots(graph, layerIndex, layer);
1825 RegisterOutputSlots(graph, layerIndex, layer);
1826}
1827
Finn Williams85d36712021-01-26 22:30:06 +00001828void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001829{
1830 CHECK_LAYERS(graph, 0, layerIndex);
1831 auto inputs = GetInputs(graph, layerIndex);
1832 CHECK_LOCATION();
1833 CHECK_VALID_SIZE(inputs.size(), 2);
1834
1835 auto outputs = GetOutputs(graph, layerIndex);
1836 CHECK_VALID_SIZE(outputs.size(), 1);
1837
1838 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001839 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1840 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001841
1842 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1843 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1844
1845 RegisterInputSlots(graph, layerIndex, layer);
1846 RegisterOutputSlots(graph, layerIndex, layer);
1847}
1848
Finn Williams85d36712021-01-26 22:30:06 +00001849void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001850{
1851 CHECK_LAYERS(graph, 0, layerIndex);
1852
1853 auto inputs = GetInputs(graph, layerIndex);
1854 CHECK_VALID_SIZE(inputs.size(), 1);
1855
1856 auto outputs = GetOutputs(graph, layerIndex);
1857 CHECK_VALID_SIZE(outputs.size(), 1);
1858
1859 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1860 auto fbDescriptor = fbLayer->descriptor();
1861
1862 armnn::InstanceNormalizationDescriptor descriptor;
1863 descriptor.m_Gamma = fbDescriptor->gamma();
1864 descriptor.m_Beta = fbDescriptor->beta();
1865 descriptor.m_Eps = fbDescriptor->eps();
1866 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1867
1868 const std::string layerName = GetLayerName(graph, layerIndex);
1869 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1870
1871 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1872 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1873
1874 RegisterInputSlots(graph, layerIndex, layer);
1875 RegisterOutputSlots(graph, layerIndex, layer);
1876}
1877
Finn Williams85d36712021-01-26 22:30:06 +00001878void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001879{
1880 CHECK_LAYERS(graph, 0, layerIndex);
1881
1882 auto inputs = GetInputs(graph, layerIndex);
1883 CHECK_VALID_SIZE(inputs.size(), 1);
1884
1885 auto outputs = GetOutputs(graph, layerIndex);
1886 CHECK_VALID_SIZE(outputs.size(), 1);
1887 auto outputInfo = ToTensorInfo(outputs[0]);
1888
1889 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1890 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1891
1892 auto layerName = GetLayerName(graph, layerIndex);
1893 armnn::L2NormalizationDescriptor descriptor;
1894 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001895 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001896
1897 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1898 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1899
1900 RegisterInputSlots(graph, layerIndex, layer);
1901 RegisterOutputSlots(graph, layerIndex, layer);
1902}
1903
Finn Williams85d36712021-01-26 22:30:06 +00001904void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001905{
1906 CHECK_LAYERS(graph, 0, layerIndex);
1907 CHECK_LOCATION();
1908
1909 auto inputs = GetInputs(graph, layerIndex);
1910 CHECK_VALID_SIZE(inputs.size(), 2);
1911
1912 auto outputs = GetOutputs(graph, layerIndex);
1913 CHECK_VALID_SIZE(outputs.size(), 1);
1914
1915 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1916 auto fbDescriptor = fbLayer->descriptor();
1917
1918 armnn::LogicalBinaryDescriptor descriptor;
1919 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1920
1921 const std::string& layerName = GetLayerName(graph, layerIndex);
1922 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1923
1924 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1925 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1926
1927 RegisterInputSlots(graph, layerIndex, layer);
1928 RegisterOutputSlots(graph, layerIndex, layer);
1929}
1930
Finn Williams85d36712021-01-26 22:30:06 +00001931void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001932{
1933 CHECK_LAYERS(graph, 0, layerIndex);
1934
Finn Williams85d36712021-01-26 22:30:06 +00001935 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001936 CHECK_VALID_SIZE(inputs.size(), 1);
1937
Finn Williams85d36712021-01-26 22:30:06 +00001938 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001939 CHECK_VALID_SIZE(outputs.size(), 1);
1940
1941 armnn::LogSoftmaxDescriptor descriptor;
1942 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1943 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1944 auto layerName = GetLayerName(graph, layerIndex);
1945
1946 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1947
1948 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1949 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1950
1951 RegisterInputSlots(graph, layerIndex, layer);
1952 RegisterOutputSlots(graph, layerIndex, layer);
1953}
1954
Finn Williams85d36712021-01-26 22:30:06 +00001955void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001956{
1957 CHECK_LAYERS(graph, 0, layerIndex);
1958 auto inputs = GetInputs(graph, layerIndex);
1959 CHECK_LOCATION();
1960 CHECK_VALID_SIZE(inputs.size(), 2);
1961
1962 auto outputs = GetOutputs(graph, layerIndex);
1963 CHECK_VALID_SIZE(outputs.size(), 1);
1964
1965 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001966 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Minimum);
1967 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001968
1969 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1970 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1971
1972 RegisterInputSlots(graph, layerIndex, layer);
1973 RegisterOutputSlots(graph, layerIndex, layer);
1974}
1975
Finn Williams85d36712021-01-26 22:30:06 +00001976void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001977{
1978 CHECK_LAYERS(graph, 0, layerIndex);
1979 auto inputs = GetInputs(graph, layerIndex);
1980 CHECK_LOCATION();
1981 CHECK_VALID_SIZE(inputs.size(), 2);
1982
1983 auto outputs = GetOutputs(graph, layerIndex);
1984 CHECK_VALID_SIZE(outputs.size(), 1);
1985
1986 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001987 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Maximum);
1988 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001989
1990 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1991 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1992
1993 RegisterInputSlots(graph, layerIndex, layer);
1994 RegisterOutputSlots(graph, layerIndex, layer);
1995}
1996
Jim Flynne242f2d2019-05-22 14:24:13 +01001997const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1998 unsigned int layerIndex)
1999{
2000 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
2001
2002 switch (layerType)
2003 {
2004 case Layer::Layer_ConcatLayer:
2005 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
2006 case Layer::Layer_MergerLayer:
2007 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
2008 default:
2009 throw armnn::Exception("unknown layer type, should be concat or merger");
2010 }
2011}
Simon Obute51f67772021-09-03 15:50:13 +01002012void IDeserializer::DeserializerImpl::ParseChannelShuffle(GraphPtr graph, unsigned int layerIndex)
2013{
2014 CHECK_LAYERS(graph, 0, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002015
Simon Obute51f67772021-09-03 15:50:13 +01002016 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2017 CHECK_VALID_SIZE(inputs.size(), 1);
2018
2019 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2020 CHECK_VALID_SIZE(outputs.size(), 1);
2021
2022 armnn::ChannelShuffleDescriptor descriptor;
2023 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
2024 descriptor.m_NumGroups =
2025 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
2026
2027 auto layerName = GetLayerName(graph, layerIndex);
2028 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
2029
2030 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2031 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2032
2033 RegisterInputSlots(graph, layerIndex, layer);
2034 RegisterOutputSlots(graph, layerIndex, layer);
2035}
Finn Williams85d36712021-01-26 22:30:06 +00002036void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01002037{
2038 CHECK_LAYERS(graph, 0, layerIndex);
2039 CHECK_LOCATION();
2040
2041 auto inputs = GetInputs(graph, layerIndex);
2042 CHECK_VALID_SIZE(inputs.size(), 2);
2043
2044 auto outputs = GetOutputs(graph, layerIndex);
2045 CHECK_VALID_SIZE(outputs.size(), 1);
2046
2047 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
2048 auto fbDescriptor = fbLayer->descriptor();
2049
2050 armnn::ComparisonDescriptor descriptor;
2051 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
2052
2053 const std::string& layerName = GetLayerName(graph, layerIndex);
2054 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
2055
2056 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2057 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2058
2059 RegisterInputSlots(graph, layerIndex, layer);
2060 RegisterOutputSlots(graph, layerIndex, layer);
2061}
2062
Mike Kelly3ec30772023-03-08 13:47:17 +00002063void IDeserializer::DeserializerImpl::ParseElementwiseBinary(GraphPtr graph, unsigned int layerIndex)
2064{
2065 CHECK_LAYERS(graph, 0, layerIndex);
2066 CHECK_LOCATION();
2067
2068 auto inputs = GetInputs(graph, layerIndex);
2069 CHECK_VALID_SIZE(inputs.size(), 2);
2070
2071 auto outputs = GetOutputs(graph, layerIndex);
2072 CHECK_VALID_SIZE(outputs.size(), 1);
2073
2074 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseBinaryLayer();
2075 auto fbDescriptor = fbLayer->descriptor();
2076
2077 armnn::ElementwiseBinaryDescriptor descriptor;
2078 descriptor.m_Operation = ToElementwiseBinaryOperation(fbDescriptor->operation());
2079
2080 const std::string& layerName = GetLayerName(graph, layerIndex);
2081 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
2082
2083 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2084 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2085
2086 RegisterInputSlots(graph, layerIndex, layer);
2087 RegisterOutputSlots(graph, layerIndex, layer);
2088}
2089
Finn Williams85d36712021-01-26 22:30:06 +00002090void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06002091{
2092 CHECK_LAYERS(graph, 0, layerIndex);
2093 CHECK_LOCATION();
2094
2095 auto inputs = GetInputs(graph, layerIndex);
2096 CHECK_VALID_SIZE(inputs.size(), 1);
2097
2098 auto outputs = GetOutputs(graph, layerIndex);
2099 CHECK_VALID_SIZE(outputs.size(), 1);
2100
2101 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
2102 auto fbDescriptor = fbLayer->descriptor();
2103
2104 armnn::ElementwiseUnaryDescriptor descriptor;
Mike Kelly3ec30772023-03-08 13:47:17 +00002105 descriptor.m_Operation = ToElementwiseUnaryOperation(fbDescriptor->operation());
josh minor4a3c6102020-01-06 16:40:46 -06002106
2107 const std::string& layerName = GetLayerName(graph, layerIndex);
2108 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2109
2110 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2111 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2112
2113 RegisterInputSlots(graph, layerIndex, layer);
2114 RegisterOutputSlots(graph, layerIndex, layer);
2115}
2116
Finn Williams85d36712021-01-26 22:30:06 +00002117void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00002118{
2119 CHECK_LAYERS(graph, 0, layerIndex);
2120 CHECK_LOCATION();
2121
2122 auto outputs = GetOutputs(graph, layerIndex);
2123 CHECK_VALID_SIZE(outputs.size(), 1);
2124
Jim Flynnac25a1b2019-02-28 10:40:49 +00002125 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002126 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
2127 unsigned int numViews = originsDescriptor->numViews();
2128 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002129
2130 // can now check the number of inputs == number of views
2131 auto inputs = GetInputs(graph, layerIndex);
2132 CHECK_VALID_SIZE(inputs.size(), numViews);
2133
2134 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01002135 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002136 for (unsigned int v = 0; v < numViews; ++v)
2137 {
2138 auto originPtr = originsPtr->Get(v);
2139 for (unsigned int d = 0; d < numDimensions; ++d)
2140 {
2141 uint32_t value = originPtr->data()->Get(d);
2142 descriptor.SetViewOriginCoord(v, d, value);
2143 }
2144 }
Jim Flynne242f2d2019-05-22 14:24:13 +01002145 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002146
Jim Flynn906f9462019-05-10 13:55:21 +01002147 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002148 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2149 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2150
2151 RegisterInputSlots(graph, layerIndex, layer);
2152 RegisterOutputSlots(graph, layerIndex, layer);
2153}
2154
Finn Williams85d36712021-01-26 22:30:06 +00002155void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00002156{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002157 CHECK_LAYERS(graph, 0, layerIndex);
2158 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002159 CHECK_LOCATION();
2160 CHECK_VALID_SIZE(inputs.size(), 2);
2161
Derek Lamberti8ddae332019-02-21 16:29:43 +00002162 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002163 CHECK_VALID_SIZE(outputs.size(), 1);
2164
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002165 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00002166 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Mul);
2167 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00002168
2169 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2170 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2171
Derek Lamberti8ddae332019-02-21 16:29:43 +00002172 RegisterInputSlots(graph, layerIndex, layer);
2173 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00002174}
2175
Finn Williams85d36712021-01-26 22:30:06 +00002176void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002177{
2178 CHECK_LAYERS(graph, 0, layerIndex);
2179 CHECK_LOCATION();
2180
2181 auto inputs = GetInputs(graph, layerIndex);
2182 CHECK_VALID_SIZE(inputs.size(), 1);
2183
2184 auto outputs = GetOutputs(graph, layerIndex);
2185 CHECK_VALID_SIZE(outputs.size(), 1);
2186
2187 auto layerName = GetLayerName(graph, layerIndex);
2188
2189 armnn::IConnectableLayer* layer;
2190
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00002191 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002192
2193 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2194 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2195
2196 RegisterInputSlots(graph, layerIndex, layer);
2197 RegisterOutputSlots(graph, layerIndex, layer);
2198}
2199
Finn Williams85d36712021-01-26 22:30:06 +00002200void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002201{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002202 CHECK_LAYERS(graph, 0, layerIndex);
2203 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002204 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002205
Derek Lamberti8ddae332019-02-21 16:29:43 +00002206 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002207 CHECK_VALID_SIZE(outputs.size(), 1);
2208
Derek Lamberti8ddae332019-02-21 16:29:43 +00002209 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002210 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002211 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2212
2213 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
2214 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2215 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002216 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01002217
2218 armnn::IConnectableLayer* layer;
2219 std::vector<unsigned int> ignoreSlots {};
2220
2221 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
2222 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
2223 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002224 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002225 // If the model stores weights and biases as members of the layer we have to read them from there
2226 // but add them to their own ConstantLayer for compatibility
2227 CHECK_VALID_SIZE(inputs.size(), 1);
2228 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2229 layerName.c_str());
2230
2231 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
2232 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2233 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2234 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
2235 ignoreSlots.emplace_back(1u);
2236
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002237 if (fullyConnectedDescriptor.m_BiasEnabled)
2238 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002239 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
2240 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2241 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2242 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
2243 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002244 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002245 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01002246 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002247 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002248 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2249 layerName.c_str());
2250 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
2251 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002252 }
2253
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002254 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2255 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2256
Matthew Sloyan81beae32021-07-13 19:46:11 +01002257 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00002258 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002259}
2260
Finn Williams85d36712021-01-26 22:30:06 +00002261void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002262{
2263 CHECK_LAYERS(graph, 0, layerIndex);
2264
Finn Williams85d36712021-01-26 22:30:06 +00002265 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002266 CHECK_VALID_SIZE(inputs.size(), 1);
2267
Finn Williams85d36712021-01-26 22:30:06 +00002268 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002269 CHECK_VALID_SIZE(outputs.size(), 1);
2270
2271 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2272 auto flatBufferPadList = flatBufferDescriptor->padList();
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002273 auto paddingMode = flatBufferDescriptor->paddingMode();
David Monahan34757812019-06-19 11:47:21 +01002274 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002275
Mike Kelly51b8c312022-05-24 11:34:02 +01002276 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002277 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002278 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2279 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002280 }
2281
2282 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002283 padList.reserve(flatBufferPadList->size() / 2);
2284 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002285 {
2286 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2287 }
2288
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002289 armnn::PadDescriptor descriptor(padList, padValue, ToPaddingMode(paddingMode));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002290
2291 auto layerName = GetLayerName(graph, layerIndex);
2292 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2293
2294 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2295 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2296
2297 RegisterInputSlots(graph, layerIndex, layer);
2298 RegisterOutputSlots(graph, layerIndex, layer);
2299}
2300
Finn Williams85d36712021-01-26 22:30:06 +00002301void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002302{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002303 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002304
2305 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002306 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002307
Derek Lamberti8ddae332019-02-21 16:29:43 +00002308 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002309 CHECK_VALID_SIZE(inputs.size(), 1);
2310
Derek Lamberti8ddae332019-02-21 16:29:43 +00002311 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002312 CHECK_VALID_SIZE(outputs.size(), 1);
2313 auto outputInfo = ToTensorInfo(outputs[0]);
2314
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002315 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01002316 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002317
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002318 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002319 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2320
Derek Lamberti8ddae332019-02-21 16:29:43 +00002321 RegisterInputSlots(graph, layerIndex, layer);
2322 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002323}
2324
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002325armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002326 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002327{
Jan Eilers8eb25602020-03-09 12:13:48 +00002328 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002329 armnn::Pooling2dDescriptor desc;
2330
2331 switch (pooling2dDesc->poolType())
2332 {
2333 case PoolingAlgorithm_Average:
2334 {
2335 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002336 break;
2337 }
2338 case PoolingAlgorithm_Max:
2339 {
2340 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002341 break;
2342 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002343 case PoolingAlgorithm_L2:
2344 {
2345 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2346 break;
2347 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002348 default:
2349 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002350 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002351 }
2352 }
2353
2354 switch (pooling2dDesc->outputShapeRounding())
2355 {
2356 case OutputShapeRounding_Floor:
2357 {
2358 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2359 break;
2360 }
2361 case OutputShapeRounding_Ceiling:
2362 {
2363 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2364 break;
2365 }
2366 default:
2367 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002368 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002369 }
2370 }
2371
2372 switch (pooling2dDesc->paddingMethod())
2373 {
2374 case PaddingMethod_Exclude:
2375 {
2376 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2377 break;
2378 }
2379 case PaddingMethod_IgnoreValue:
2380 {
2381 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2382 break;
2383 }
2384 default:
2385 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002386 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002387 }
2388 }
2389
2390 switch (pooling2dDesc->dataLayout())
2391 {
2392 case DataLayout_NCHW:
2393 {
2394 desc.m_DataLayout = armnn::DataLayout::NCHW;
2395 break;
2396 }
2397 case DataLayout_NHWC:
2398 {
2399 desc.m_DataLayout = armnn::DataLayout::NHWC;
2400 break;
2401 }
2402 default:
2403 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002404 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002405 }
2406 }
2407
2408 desc.m_PadRight = pooling2dDesc->padRight();
2409 desc.m_PadLeft = pooling2dDesc->padLeft();
2410 desc.m_PadBottom = pooling2dDesc->padBottom();
2411 desc.m_PadTop = pooling2dDesc->padTop();
2412 desc.m_StrideX = pooling2dDesc->strideX();
2413 desc.m_StrideY = pooling2dDesc->strideY();
2414 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2415 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2416
2417 return desc;
2418}
2419
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002420armnn::Pooling3dDescriptor IDeserializer::DeserializerImpl::GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDesc,
2421 unsigned int layerIndex)
2422{
2423 IgnoreUnused(layerIndex);
2424 armnn::Pooling3dDescriptor desc;
Finn Williams85d36712021-01-26 22:30:06 +00002425
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002426 switch (pooling3dDesc->poolType())
2427 {
2428 case PoolingAlgorithm_Average:
2429 {
2430 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2431 break;
2432 }
2433 case PoolingAlgorithm_Max:
2434 {
2435 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
2436 break;
2437 }
2438 case PoolingAlgorithm_L2:
2439 {
2440 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2441 break;
2442 }
2443 default:
2444 {
2445 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
2446 }
2447 }
2448
2449 switch (pooling3dDesc->outputShapeRounding())
2450 {
2451 case OutputShapeRounding_Floor:
2452 {
2453 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2454 break;
2455 }
2456 case OutputShapeRounding_Ceiling:
2457 {
2458 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2459 break;
2460 }
2461 default:
2462 {
2463 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
2464 }
2465 }
2466
2467 switch (pooling3dDesc->paddingMethod())
2468 {
2469 case PaddingMethod_Exclude:
2470 {
2471 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2472 break;
2473 }
2474 case PaddingMethod_IgnoreValue:
2475 {
2476 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2477 break;
2478 }
2479 default:
2480 {
2481 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
2482 }
2483 }
2484
2485 switch (pooling3dDesc->dataLayout())
2486 {
2487 case DataLayout_NCDHW:
2488 {
2489 desc.m_DataLayout = armnn::DataLayout::NCDHW;
2490 break;
2491 }
2492 case DataLayout_NDHWC:
2493 {
2494 desc.m_DataLayout = armnn::DataLayout::NDHWC;
2495 break;
2496 }
2497 default:
2498 {
2499 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
2500 }
2501 }
2502
2503 desc.m_PadRight = pooling3dDesc->padRight();
2504 desc.m_PadLeft = pooling3dDesc->padLeft();
2505 desc.m_PadBottom = pooling3dDesc->padBottom();
2506 desc.m_PadTop = pooling3dDesc->padTop();
2507 desc.m_PadFront = pooling3dDesc->padFront();
2508 desc.m_PadBack = pooling3dDesc->padBack();
2509 desc.m_StrideX = pooling3dDesc->strideX();
2510 desc.m_StrideY = pooling3dDesc->strideY();
2511 desc.m_StrideZ = pooling3dDesc->strideZ();
2512 desc.m_PoolWidth = pooling3dDesc->poolWidth();
2513 desc.m_PoolHeight = pooling3dDesc->poolHeight();
2514 desc.m_PoolDepth = pooling3dDesc->poolDepth();
2515
2516 return desc;
2517}
Finn Williams85d36712021-01-26 22:30:06 +00002518
2519void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002520{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002521 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002522
Derek Lamberti8ddae332019-02-21 16:29:43 +00002523 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002524 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002525 CHECK_VALID_SIZE(inputs.size(), 1);
2526
Derek Lamberti8ddae332019-02-21 16:29:43 +00002527 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002528 CHECK_VALID_SIZE(outputs.size(), 1);
2529 auto outputInfo = ToTensorInfo(outputs[0]);
2530
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002531 auto pooling2dDescriptor = GetPooling2dDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002532 auto layerName = GetLayerName(graph, layerIndex);
2533 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002534 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2535
Derek Lamberti8ddae332019-02-21 16:29:43 +00002536 RegisterInputSlots(graph, layerIndex, layer);
2537 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002538}
2539
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002540void IDeserializer::DeserializerImpl::ParsePooling3d(GraphPtr graph, unsigned int layerIndex)
2541{
2542 CHECK_LAYERS(graph, 0, layerIndex);
2543
2544 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2545 auto inputs = GetInputs(graph, layerIndex);
2546 CHECK_VALID_SIZE(inputs.size(), 1);
2547
2548 auto outputs = GetOutputs(graph, layerIndex);
2549 CHECK_VALID_SIZE(outputs.size(), 1);
2550 auto outputInfo = ToTensorInfo(outputs[0]);
2551
2552 auto pooling3dDescriptor = GetPooling3dDescriptor(pooling3dDes, layerIndex);
2553 auto layerName = GetLayerName(graph, layerIndex);
2554 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2555 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2556
2557 RegisterInputSlots(graph, layerIndex, layer);
2558 RegisterOutputSlots(graph, layerIndex, layer);
2559}
2560
Finn Williams85d36712021-01-26 22:30:06 +00002561void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002562{
2563 CHECK_LAYERS(graph, 0, layerIndex);
2564
2565 auto inputs = GetInputs(graph, layerIndex);
2566 CHECK_VALID_SIZE(inputs.size(), 1);
2567
2568 auto outputs = GetOutputs(graph, layerIndex);
2569 CHECK_VALID_SIZE(outputs.size(), 1);
2570 auto outputInfo = ToTensorInfo(outputs[0]);
2571
2572 auto layerName = GetLayerName(graph, layerIndex);
2573 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2574 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2575
2576 RegisterInputSlots(graph, layerIndex, layer);
2577 RegisterOutputSlots(graph, layerIndex, layer);
2578}
2579
Finn Williams85d36712021-01-26 22:30:06 +00002580armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002581 const std::vector<uint32_t>& targetDimsIn)
2582{
2583 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2584 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2585
2586 if (stretchDim != targetDimsIn.end())
2587 {
2588 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2589 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002590 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2591 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002592 }
2593
2594 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002595 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002596 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2597
2598 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2599 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2600 }
2601
2602 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2603
2604 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2605 reshapeInfo.SetShape(outputShape);
2606
2607 return reshapeInfo;
2608}
2609
Finn Williams85d36712021-01-26 22:30:06 +00002610void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002611{
2612 CHECK_LAYERS(graph, 0, layerIndex);
2613
Finn Williams85d36712021-01-26 22:30:06 +00002614 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002615 CHECK_VALID_SIZE(inputs.size(), 1);
2616
Finn Williams85d36712021-01-26 22:30:06 +00002617 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002618 CHECK_VALID_SIZE(outputs.size(), 1);
2619
2620 auto layerName = GetLayerName(graph, layerIndex);
2621 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2622
2623 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2624 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2625
2626 RegisterInputSlots(graph, layerIndex, layer);
2627 RegisterOutputSlots(graph, layerIndex, layer);
2628}
2629
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002630void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2631{
2632 CHECK_LAYERS(graph, 0, layerIndex);
2633 CHECK_LOCATION();
2634
2635 auto inputs = GetInputs(graph, layerIndex);
2636 CHECK_VALID_SIZE(inputs.size(), 1);
2637
2638 auto outputs = GetOutputs(graph, layerIndex);
2639 CHECK_VALID_SIZE(outputs.size(), 1);
2640
2641 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2642 auto fbDescriptor = fbLayer->descriptor();
2643 auto flatBufferAxis = fbDescriptor->axis();
2644
2645 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002646 descriptor.m_KeepDims = fbDescriptor->keepDims();
2647 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2648 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2649
2650 const std::string& layerName = GetLayerName(graph, layerIndex);
2651 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2652
2653 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2654 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2655
2656 RegisterInputSlots(graph, layerIndex, layer);
2657 RegisterOutputSlots(graph, layerIndex, layer);
2658}
2659
Finn Williams85d36712021-01-26 22:30:06 +00002660void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002661{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002662 CHECK_LAYERS(graph, 0, layerIndex);
2663 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002664
Derek Lamberti8ddae332019-02-21 16:29:43 +00002665 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002666 CHECK_VALID_SIZE(outputs.size(), 1);
2667
2668 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2669 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2670
Derek Lamberti8ddae332019-02-21 16:29:43 +00002671 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002672 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2673
Finn Williams85d36712021-01-26 22:30:06 +00002674 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002675 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2676
2677 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2678 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2679
2680 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2681 {
2682 std::stringstream ss;
2683 ss << "New shape defined in reshape parameters "
2684 << reshapeOutputTensorShape
2685 << " does not equal output shape "
2686 << actualOutputTensorInfo.GetShape()
2687 << ": "
2688 << CHECK_LOCATION().AsString();
2689 throw ParseException(ss.str());
2690 }
2691
2692 armnn::ReshapeDescriptor reshapeDesc;
2693 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2694
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002695 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002696 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2697 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2698
Derek Lamberti8ddae332019-02-21 16:29:43 +00002699 RegisterInputSlots(graph, layerIndex, layer);
2700 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002701}
2702
Finn Williams85d36712021-01-26 22:30:06 +00002703void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002704{
2705 CHECK_LAYERS(graph, 0, layerIndex);
2706
Finn Williams85d36712021-01-26 22:30:06 +00002707 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002708 CHECK_VALID_SIZE(inputs.size(), 1);
2709
Finn Williams85d36712021-01-26 22:30:06 +00002710 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002711 CHECK_VALID_SIZE(outputs.size(), 1);
2712
2713 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2714
2715 armnn::ResizeDescriptor descriptor;
2716 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2717 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2718 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2719 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002720 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2721 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002722
2723 auto layerName = GetLayerName(graph, layerIndex);
2724 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2725
2726 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2727 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2728
2729 RegisterInputSlots(graph, layerIndex, layer);
2730 RegisterOutputSlots(graph, layerIndex, layer);
2731}
2732
Jan Eilers1b2654f2021-09-24 15:45:46 +01002733
2734/// @Note The ResizeBiliniar operation was deprecated and removed in favor of the Resize operation.
2735/// This function is kept for backwards compatibility.
Finn Williams85d36712021-01-26 22:30:06 +00002736void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002737{
2738 CHECK_LAYERS(graph, 0, layerIndex);
2739
Finn Williams85d36712021-01-26 22:30:06 +00002740 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002741 CHECK_VALID_SIZE(inputs.size(), 1);
2742
Finn Williams85d36712021-01-26 22:30:06 +00002743 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002744 CHECK_VALID_SIZE(outputs.size(), 1);
2745
2746 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2747
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002748 armnn::ResizeDescriptor descriptor;
2749 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002750 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002751 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2752 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002753 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2754 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002755
2756 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002757 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002758
2759 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2760 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2761
2762 RegisterInputSlots(graph, layerIndex, layer);
2763 RegisterOutputSlots(graph, layerIndex, layer);
2764}
2765
Keith Davis3ae3f972021-05-21 16:33:48 +01002766void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2767{
2768 CHECK_LAYERS(graph, 0, layerIndex);
2769
2770 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2771 CHECK_VALID_SIZE(inputs.size(), 1);
2772
2773 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2774 CHECK_VALID_SIZE(outputs.size(), 1);
2775
2776 auto layerName = GetLayerName(graph, layerIndex);
2777 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2778
2779 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2780 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2781
2782 RegisterInputSlots(graph, layerIndex, layer);
2783 RegisterOutputSlots(graph, layerIndex, layer);
2784}
2785
Finn Williams85d36712021-01-26 22:30:06 +00002786void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002787{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002788 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002789
Finn Williams85d36712021-01-26 22:30:06 +00002790 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002791 CHECK_VALID_SIZE(inputs.size(), 1);
2792
Finn Williams85d36712021-01-26 22:30:06 +00002793 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002794 CHECK_VALID_SIZE(outputs.size(), 1);
2795
2796 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002797 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Sadik Armaganfd0cae32021-11-08 17:18:31 +00002798 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002799 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002800
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002801 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2802
2803 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2804 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2805
Derek Lamberti8ddae332019-02-21 16:29:43 +00002806 RegisterInputSlots(graph, layerIndex, layer);
2807 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002808}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002809
Finn Williams85d36712021-01-26 22:30:06 +00002810void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002811{
2812 CHECK_LAYERS(graph, 0, layerIndex);
2813
Finn Williams85d36712021-01-26 22:30:06 +00002814 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002815 CHECK_VALID_SIZE(inputs.size(), 1);
2816
Finn Williams85d36712021-01-26 22:30:06 +00002817 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002818 CHECK_VALID_SIZE(outputs.size(), 1);
2819
2820 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2821 auto flatBufferPadList = flatBufferDescriptor->padList();
2822 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2823
Mike Kelly51b8c312022-05-24 11:34:02 +01002824 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002825 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002826 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2827 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002828 }
2829
2830 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002831 padList.reserve(flatBufferPadList->size() / 2);
2832 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002833 {
2834 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2835 }
2836
2837 armnn::SpaceToBatchNdDescriptor descriptor;
2838 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2839 descriptor.m_BlockShape =
2840 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2841 descriptor.m_PadList = padList;
2842
2843 auto layerName = GetLayerName(graph, layerIndex);
2844 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2845
2846 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2847 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2848
2849 RegisterInputSlots(graph, layerIndex, layer);
2850 RegisterOutputSlots(graph, layerIndex, layer);
2851}
2852
Finn Williams85d36712021-01-26 22:30:06 +00002853void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002854{
2855 CHECK_LAYERS(graph, 0, layerIndex);
2856
Finn Williams85d36712021-01-26 22:30:06 +00002857 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002858 CHECK_VALID_SIZE(inputs.size(), 1);
2859
Finn Williams85d36712021-01-26 22:30:06 +00002860 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002861 CHECK_VALID_SIZE(outputs.size(), 1);
2862
2863 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2864
2865 armnn::SpaceToDepthDescriptor descriptor;
2866 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2867 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2868
2869 auto layerName = GetLayerName(graph, layerIndex);
2870 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2871
2872 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2873 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2874
2875 RegisterInputSlots(graph, layerIndex, layer);
2876 RegisterOutputSlots(graph, layerIndex, layer);
2877}
2878
Finn Williams85d36712021-01-26 22:30:06 +00002879armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2880 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002881 unsigned int layerIndex)
2882{
Jan Eilers8eb25602020-03-09 12:13:48 +00002883 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002884 armnn::NormalizationDescriptor desc;
2885
2886 switch (normalizationDescriptor->normChannelType())
2887 {
2888 case NormalizationAlgorithmChannel_Across:
2889 {
2890 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2891 break;
2892 }
2893 case NormalizationAlgorithmChannel_Within:
2894 {
2895 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2896 break;
2897 }
2898 default:
2899 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002900 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002901 }
2902 }
2903
2904 switch (normalizationDescriptor->normMethodType())
2905 {
2906 case NormalizationAlgorithmMethod_LocalBrightness:
2907 {
2908 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2909 break;
2910 }
2911 case NormalizationAlgorithmMethod_LocalContrast:
2912 {
2913 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2914 break;
2915 }
2916 default:
2917 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002918 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002919 }
2920 }
2921
2922 switch (normalizationDescriptor->dataLayout())
2923 {
2924 case DataLayout_NCHW:
2925 {
2926 desc.m_DataLayout = armnn::DataLayout::NCHW;
2927 break;
2928 }
2929 case DataLayout_NHWC:
2930 {
2931 desc.m_DataLayout = armnn::DataLayout::NHWC;
2932 break;
2933 }
2934 default:
2935 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002936 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002937 }
2938 }
2939
2940 desc.m_Alpha = normalizationDescriptor->alpha();
2941 desc.m_Beta = normalizationDescriptor->beta();
2942 desc.m_K = normalizationDescriptor->k();
2943 desc.m_NormSize = normalizationDescriptor->normSize();
2944
2945 return desc;
2946}
2947
Finn Williams85d36712021-01-26 22:30:06 +00002948void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002949{
2950 CHECK_LAYERS(graph, 0, layerIndex);
2951
2952 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2953
Finn Williams85d36712021-01-26 22:30:06 +00002954 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002955 CHECK_VALID_SIZE(inputs.size(), 1);
2956
Finn Williams85d36712021-01-26 22:30:06 +00002957 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002958 CHECK_VALID_SIZE(outputs.size(), 1);
2959
2960 auto outputInfo = ToTensorInfo(outputs[0]);
2961
2962 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2963 auto layerName = GetLayerName(graph, layerIndex);
2964
2965 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2966 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2967
2968 RegisterInputSlots(graph, layerIndex, layer);
2969 RegisterOutputSlots(graph, layerIndex, layer);
2970}
2971
Finn Williams85d36712021-01-26 22:30:06 +00002972void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002973{
2974 CHECK_LAYERS(graph, 0, layerIndex);
2975 auto inputs = GetInputs(graph, layerIndex);
2976 CHECK_LOCATION();
2977 CHECK_VALID_SIZE(inputs.size(), 1);
2978
2979 auto outputs = GetOutputs(graph, layerIndex);
2980 CHECK_VALID_SIZE(outputs.size(), 1);
2981
2982 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002983
josh minor4a3c6102020-01-06 16:40:46 -06002984 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2985 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002986 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2987 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2988
2989 RegisterInputSlots(graph, layerIndex, layer);
2990 RegisterOutputSlots(graph, layerIndex, layer);
2991}
2992
Finn Williams85d36712021-01-26 22:30:06 +00002993void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002994{
2995 CHECK_LAYERS(graph, 0, layerIndex);
2996
2997 auto inputs = GetInputs(graph, layerIndex);
2998 CHECK_VALID_SIZE(inputs.size(), 1);
2999
3000 auto outputs = GetOutputs(graph, layerIndex);
3001 CHECK_VALID_SIZE(outputs.size(), 1);
3002
3003 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
3004
3005 auto fbBegin = fbDescriptor->begin();
3006 auto fbSize = fbDescriptor->size();
3007
Mike Kelly51b8c312022-05-24 11:34:02 +01003008 if (fbBegin->size() != fbSize->size())
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003009 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01003010 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
3011 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003012 }
3013
3014 armnn::SliceDescriptor descriptor;
3015 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
3016 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
3017
3018 auto layerName = GetLayerName(graph, layerIndex);
3019 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
3020
3021 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3022 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3023
3024 RegisterInputSlots(graph, layerIndex, layer);
3025 RegisterOutputSlots(graph, layerIndex, layer);
3026}
3027
Finn Williams85d36712021-01-26 22:30:06 +00003028void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003029{
3030 CHECK_LAYERS(graph, 0, layerIndex);
3031
Finn Williams85d36712021-01-26 22:30:06 +00003032 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003033 CHECK_VALID_SIZE(inputs.size(), 1);
3034
Finn Williams85d36712021-01-26 22:30:06 +00003035 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003036 CHECK_VALID_SIZE(outputs.size(), 1);
3037
3038 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
3039
3040 auto flatBufferBegin = flatBufferDescriptor->begin();
3041 auto flatBufferEnd = flatBufferDescriptor->end();
3042 auto flatBufferStride = flatBufferDescriptor->stride();
3043
Mike Kelly51b8c312022-05-24 11:34:02 +01003044 if (!(flatBufferBegin->size() == flatBufferEnd->size() &&
3045 flatBufferBegin->size() == flatBufferStride->size()))
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003046 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01003047 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
3048 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003049 }
3050
3051 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
3052 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
3053 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
3054
3055 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
3056 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
3057 descriptor.m_EndMask = flatBufferDescriptor->endMask();
3058 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
3059 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
3060 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
3061 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
3062
3063 auto layerName = GetLayerName(graph, layerIndex);
3064 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
3065
3066 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3067 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3068
3069 RegisterInputSlots(graph, layerIndex, layer);
3070 RegisterOutputSlots(graph, layerIndex, layer);
3071}
3072
Finn Williams85d36712021-01-26 22:30:06 +00003073void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00003074{
3075 CHECK_LAYERS(graph, 0, layerIndex);
3076 auto inputs = GetInputs(graph, layerIndex);
3077 CHECK_LOCATION();
3078 CHECK_VALID_SIZE(inputs.size(), 2);
3079
3080 auto outputs = GetOutputs(graph, layerIndex);
3081 CHECK_VALID_SIZE(outputs.size(), 1);
3082
3083 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00003084 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Sub);
3085 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Conor Kennedyda1f9752019-03-01 14:37:12 +00003086
3087 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3088 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3089
3090 RegisterInputSlots(graph, layerIndex, layer);
3091 RegisterOutputSlots(graph, layerIndex, layer);
3092}
3093
Finn Williams85d36712021-01-26 22:30:06 +00003094void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003095{
3096 CHECK_LAYERS(graph, 0, layerIndex);
3097
Finn Williams85d36712021-01-26 22:30:06 +00003098 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003099 CHECK_VALID_SIZE(inputs.size(), 2);
3100
Finn Williams85d36712021-01-26 22:30:06 +00003101 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003102 CHECK_VALID_SIZE(outputs.size(), 1);
3103
Teresa Charlin52664732020-06-29 16:27:03 +01003104 armnn::GatherDescriptor descriptor;
3105 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
3106
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003107 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01003108 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003109
3110 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003111 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3112
3113 RegisterInputSlots(graph, layerIndex, layer);
3114 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003115}
3116
Teresa Charlin6966bfa2022-04-25 17:14:50 +01003117void IDeserializer::DeserializerImpl::ParseGatherNd(GraphPtr graph, unsigned int layerIndex)
3118{
3119 CHECK_LAYERS(graph, 0, layerIndex);
3120
3121 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
3122 CHECK_VALID_SIZE(inputs.size(), 2);
3123
3124 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
3125 CHECK_VALID_SIZE(outputs.size(), 1);
3126
3127 auto layerName = GetLayerName(graph, layerIndex);
3128 IConnectableLayer* layer = m_Network->AddGatherNdLayer(layerName.c_str());
3129
3130 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3131 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3132
3133 RegisterInputSlots(graph, layerIndex, layer);
3134 RegisterOutputSlots(graph, layerIndex, layer);
3135}
3136
Finn Williams85d36712021-01-26 22:30:06 +00003137void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003138{
3139 CHECK_LAYERS(graph, 0, layerIndex);
3140
Finn Williams85d36712021-01-26 22:30:06 +00003141 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003142 CHECK_VALID_SIZE(inputs.size(), 1);
3143
Finn Williams85d36712021-01-26 22:30:06 +00003144 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003145 CHECK_VALID_SIZE(outputs.size(), 1);
3146
3147 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
3148 auto flatBufferAxis = flatBufferDescriptor->axis();
3149 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
3150
3151 armnn::MeanDescriptor descriptor;
3152 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
3153 descriptor.m_KeepDims = flatBufferKeepDims;
3154
3155 auto layerName = GetLayerName(graph, layerIndex);
3156 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
3157
3158 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3159 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3160
3161 RegisterInputSlots(graph, layerIndex, layer);
3162 RegisterOutputSlots(graph, layerIndex, layer);
3163}
3164
Finn Williams85d36712021-01-26 22:30:06 +00003165void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00003166{
3167 CHECK_LAYERS(graph, 0, layerIndex);
3168
Finn Williams85d36712021-01-26 22:30:06 +00003169 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003170 CHECK_VALID_SIZE(inputs.size(), 1);
3171
Finn Williams85d36712021-01-26 22:30:06 +00003172 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003173
3174 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
3175 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
3176 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
3177 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
3178 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
3179 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
3180
3181 // Check numViews and numDimensions corresponds to the ones already serialized ...
3182 // numViews == flatBufferViewSizes.size();
3183 // foreach: numDimensions == flatBufferViewSizes[x].size();
3184
3185 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
3186 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3187 {
3188 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
3189 {
3190 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
3191 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
3192 }
3193 }
3194
3195 auto layerName = GetLayerName(graph, layerIndex);
3196 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
3197
3198 // I could have as many outputs as views ...
3199 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3200 {
3201 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
3202 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
3203 }
3204
3205 RegisterInputSlots(graph, layerIndex, layer);
3206 RegisterOutputSlots(graph, layerIndex, layer);
3207}
3208
Finn Williams85d36712021-01-26 22:30:06 +00003209armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00003210{
3211 armnn::LstmDescriptor desc;
3212
3213 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
3214 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
3215 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
3216 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
3217 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
3218 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01003219 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00003220
3221 return desc;
3222}
3223
Finn Williams85d36712021-01-26 22:30:06 +00003224void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00003225{
3226 CHECK_LAYERS(graph, 0, layerIndex);
3227
3228 auto inputs = GetInputs(graph, layerIndex);
3229 CHECK_VALID_SIZE(inputs.size(), 3);
3230
3231 auto outputs = GetOutputs(graph, layerIndex);
3232 CHECK_VALID_SIZE(outputs.size(), 4);
3233
3234 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3235 auto layerName = GetLayerName(graph, layerIndex);
3236 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3237 auto flatBufferInputParams = flatBufferLayer->inputParams();
3238
3239 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
3240
3241 armnn::LstmInputParams lstmInputParams;
3242
3243 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3244 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3245 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3246 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3247 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3248 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3249 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3250 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3251 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3252
3253 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3254 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3255 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3256 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3257 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3258 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3259 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3260 lstmInputParams.m_CellBias = &cellBias;
3261 lstmInputParams.m_OutputGateBias = &outputGateBias;
3262
3263 armnn::ConstTensor inputToInputWeights;
3264 armnn::ConstTensor recurrentToInputWeights;
3265 armnn::ConstTensor cellToInputWeights;
3266 armnn::ConstTensor inputGateBias;
3267 if (!lstmDescriptor.m_CifgEnabled)
3268 {
3269 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3270 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3271 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3272 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3273
3274 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3275 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3276 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3277 lstmInputParams.m_InputGateBias = &inputGateBias;
3278 }
3279
3280 armnn::ConstTensor projectionWeights;
3281 armnn::ConstTensor projectionBias;
3282 if (lstmDescriptor.m_ProjectionEnabled)
3283 {
3284 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3285 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3286
3287 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3288 lstmInputParams.m_ProjectionBias = &projectionBias;
3289 }
3290
3291 armnn::ConstTensor cellToForgetWeights;
3292 armnn::ConstTensor cellToOutputWeights;
3293 if (lstmDescriptor.m_PeepholeEnabled)
3294 {
3295 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3296 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3297
3298 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3299 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3300 }
3301
Jan Eilersf8c62972019-07-17 11:07:49 +01003302 armnn::ConstTensor inputLayerNormWeights;
3303 armnn::ConstTensor forgetLayerNormWeights;
3304 armnn::ConstTensor cellLayerNormWeights;
3305 armnn::ConstTensor outputLayerNormWeights;
3306 if (lstmDescriptor.m_LayerNormEnabled)
3307 {
3308 if (!lstmDescriptor.m_CifgEnabled)
3309 {
3310 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3311 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3312 }
3313 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3314 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3315 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3316
3317 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3318 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3319 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3320 }
3321
Jim Flynn11af3752019-03-19 17:22:29 +00003322 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3323
3324 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3325 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3326
3327 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3328 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3329
3330 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
3331 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
3332
3333 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
3334 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
3335
3336 RegisterInputSlots(graph, layerIndex, layer);
3337 RegisterOutputSlots(graph, layerIndex, layer);
3338}
3339
Finn Williams85d36712021-01-26 22:30:06 +00003340armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01003341{
3342 armnn::QLstmDescriptor desc;
3343
3344 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
3345 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
3346 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
3347 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
3348
3349 desc.m_CellClip = qLstmDescriptor->cellClip();
3350 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
3351
3352 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
3353 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
3354 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
3355 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
3356
3357 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
3358 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
3359
3360 return desc;
3361}
3362
Finn Williams85d36712021-01-26 22:30:06 +00003363void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01003364{
3365 CHECK_LAYERS(graph, 0, layerIndex);
3366
3367 auto inputs = GetInputs(graph, layerIndex);
3368 CHECK_VALID_SIZE(inputs.size(), 3);
3369
3370 auto outputs = GetOutputs(graph, layerIndex);
3371 CHECK_VALID_SIZE(outputs.size(), 3);
3372
3373 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3374 auto layerName = GetLayerName(graph, layerIndex);
3375 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3376 auto flatBufferInputParams = flatBufferLayer->inputParams();
3377
3378 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
3379 armnn::LstmInputParams qLstmInputParams;
3380
3381 // Mandatory params
3382 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3383 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3384 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3385 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3386 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3387 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3388 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3389 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3390 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3391
3392 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3393 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3394 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3395 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3396 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3397 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3398 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
3399 qLstmInputParams.m_CellBias = &cellBias;
3400 qLstmInputParams.m_OutputGateBias = &outputGateBias;
3401
3402 // Optional CIFG params
3403 armnn::ConstTensor inputToInputWeights;
3404 armnn::ConstTensor recurrentToInputWeights;
3405 armnn::ConstTensor inputGateBias;
3406
3407 if (!qLstmDescriptor.m_CifgEnabled)
3408 {
3409 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3410 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3411 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3412
3413 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3414 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3415 qLstmInputParams.m_InputGateBias = &inputGateBias;
3416 }
3417
3418 // Optional projection params
3419 armnn::ConstTensor projectionWeights;
3420 armnn::ConstTensor projectionBias;
3421
3422 if (qLstmDescriptor.m_ProjectionEnabled)
3423 {
3424 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3425 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3426
3427 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3428 qLstmInputParams.m_ProjectionBias = &projectionBias;
3429 }
3430
3431 // Optional peephole params
3432 armnn::ConstTensor cellToInputWeights;
3433 armnn::ConstTensor cellToForgetWeights;
3434 armnn::ConstTensor cellToOutputWeights;
3435
3436 if (qLstmDescriptor.m_PeepholeEnabled)
3437 {
3438 if (!qLstmDescriptor.m_CifgEnabled)
3439 {
3440 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3441 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3442 }
3443
3444 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3445 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3446
3447 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3448 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3449 }
3450
3451 // Optional layer norm params
3452 armnn::ConstTensor inputLayerNormWeights;
3453 armnn::ConstTensor forgetLayerNormWeights;
3454 armnn::ConstTensor cellLayerNormWeights;
3455 armnn::ConstTensor outputLayerNormWeights;
3456
3457 if (qLstmDescriptor.m_LayerNormEnabled)
3458 {
3459 if (!qLstmDescriptor.m_CifgEnabled)
3460 {
3461 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3462 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3463 }
3464
3465 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3466 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3467 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3468
3469 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3470 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3471 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3472 }
3473
3474 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3475
3476 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3477 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3478
3479 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3480 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3481
3482 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3483 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3484
3485 RegisterInputSlots(graph, layerIndex, layer);
3486 RegisterOutputSlots(graph, layerIndex, layer);
3487}
3488
Finn Williams85d36712021-01-26 22:30:06 +00003489void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003490{
3491 CHECK_LAYERS(graph, 0, layerIndex);
3492
3493 auto inputs = GetInputs(graph, layerIndex);
3494 CHECK_VALID_SIZE(inputs.size(), 3);
3495
3496 auto outputs = GetOutputs(graph, layerIndex);
3497 CHECK_VALID_SIZE(outputs.size(), 2);
3498
3499 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3500 auto layerName = GetLayerName(graph, layerIndex);
3501 auto flatBufferInputParams = flatBufferLayer->inputParams();
3502
3503 armnn::QuantizedLstmInputParams lstmInputParams;
3504
3505 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3506 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3507 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3508 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3509 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3510 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3511 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3512 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3513 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3514 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3515 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3516 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3517
3518 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3519 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3520 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3521 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3522 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3523 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3524 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3525 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3526 lstmInputParams.m_InputGateBias = &inputGateBias;
3527 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3528 lstmInputParams.m_CellBias = &cellBias;
3529 lstmInputParams.m_OutputGateBias = &outputGateBias;
3530
3531 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3532
3533 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3534 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3535
3536 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3537 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3538
3539 RegisterInputSlots(graph, layerIndex, layer);
3540 RegisterOutputSlots(graph, layerIndex, layer);
3541}
3542
Finn Williams85d36712021-01-26 22:30:06 +00003543void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003544{
3545 CHECK_LAYERS(graph, 0, layerIndex);
3546
Finn Williams85d36712021-01-26 22:30:06 +00003547 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003548 CHECK_VALID_SIZE(inputs.size(), 1);
3549
Finn Williams85d36712021-01-26 22:30:06 +00003550 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003551 CHECK_VALID_SIZE(outputs.size(), 1);
3552
3553 const std::string layerName = GetLayerName(graph, layerIndex);
3554 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3555
3556 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3557 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3558
3559 RegisterInputSlots(graph, layerIndex, layer);
3560 RegisterOutputSlots(graph, layerIndex, layer);
3561}
3562
Finn Williams85d36712021-01-26 22:30:06 +00003563void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003564{
3565 CHECK_LAYERS(graph, 0, layerIndex);
3566
Finn Williams85d36712021-01-26 22:30:06 +00003567 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003568 CHECK_VALID_SIZE(inputs.size(), 2);
3569
Finn Williams85d36712021-01-26 22:30:06 +00003570 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003571 CHECK_VALID_SIZE(outputs.size(), 1);
3572
3573 const std::string layerName = GetLayerName(graph, layerIndex);
3574 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3575
3576 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3577 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3578
3579 RegisterInputSlots(graph, layerIndex, layer);
3580 RegisterOutputSlots(graph, layerIndex, layer);
3581}
3582
Finn Williams85d36712021-01-26 22:30:06 +00003583void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003584{
3585 CHECK_LAYERS(graph, 0, layerIndex);
3586 auto inputs = GetInputs(graph, layerIndex);
3587 CHECK_LOCATION();
3588 CHECK_VALID_SIZE(inputs.size(), 2);
3589
3590 auto outputs = GetOutputs(graph, layerIndex);
3591 CHECK_VALID_SIZE(outputs.size(), 2);
3592
3593 auto layerName = GetLayerName(graph, layerIndex);
3594 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3595
3596 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3597 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3598
3599 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3600 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3601
3602 RegisterInputSlots(graph, layerIndex, layer);
3603 RegisterOutputSlots(graph, layerIndex, layer);
3604}
3605
Finn Williams85d36712021-01-26 22:30:06 +00003606void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003607{
3608 CHECK_LAYERS(graph, 0, layerIndex);
3609 auto inputs = GetInputs(graph, layerIndex);
3610 CHECK_LOCATION();
3611 CHECK_VALID_SIZE(inputs.size(), 2);
3612
3613 auto outputs = GetOutputs(graph, layerIndex);
3614 CHECK_VALID_SIZE(outputs.size(), 1);
3615
3616 auto layerName = GetLayerName(graph, layerIndex);
3617 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3618
3619 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3620 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3621
3622 RegisterInputSlots(graph, layerIndex, layer);
3623 RegisterOutputSlots(graph, layerIndex, layer);
3624}
3625
Finn Williams85d36712021-01-26 22:30:06 +00003626void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003627{
3628 CHECK_LAYERS(graph, 0, layerIndex);
3629
3630 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3631
3632 auto inputs = GetInputs(graph, layerIndex);
3633 CHECK_VALID_SIZE(inputs.size(), 1);
3634
3635 auto outputs = GetOutputs(graph, layerIndex);
3636 CHECK_VALID_SIZE(outputs.size(), 1);
3637 auto outputInfo = ToTensorInfo(outputs[0]);
3638
3639 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01003640 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003641
3642 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3643 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3644
3645 RegisterInputSlots(graph, layerIndex, layer);
3646 RegisterOutputSlots(graph, layerIndex, layer);
3647}
3648
Finn Williams85d36712021-01-26 22:30:06 +00003649void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003650{
3651 CHECK_LAYERS(graph, 0, layerIndex);
3652
3653 auto inputs = GetInputs(graph, layerIndex);
3654 CHECK_VALID_SIZE(inputs.size(), 1);
3655
3656 auto outputs = GetOutputs(graph, layerIndex);
3657 CHECK_VALID_SIZE(outputs.size(), 1);
3658
3659 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3660 auto layerName = GetLayerName(graph, layerIndex);
3661 auto serializerDescriptor = serializerLayer->descriptor();
3662
3663 armnn::TransposeConvolution2dDescriptor descriptor;
3664 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3665 descriptor.m_PadRight = serializerDescriptor->padRight();
3666 descriptor.m_PadTop = serializerDescriptor->padTop();
3667 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3668 descriptor.m_StrideX = serializerDescriptor->strideX();
3669 descriptor.m_StrideY = serializerDescriptor->strideY();;
3670 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3671 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3672
3673 // weights & biases
3674 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3675 armnn::Optional<armnn::ConstTensor> optionalBiases;
3676 if (descriptor.m_BiasEnabled)
3677 {
3678 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3679 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3680 }
3681
3682 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3683 weights,
3684 optionalBiases,
3685 layerName.c_str());
3686
3687 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3688 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3689
3690 RegisterInputSlots(graph, layerIndex, layer);
3691 RegisterOutputSlots(graph, layerIndex, layer);
3692}
3693
Finn Williams85d36712021-01-26 22:30:06 +00003694void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003695{
3696 CHECK_LAYERS(graph, 0, layerIndex);
3697 auto inputs = GetInputs(graph, layerIndex);
3698
3699 auto outputs = GetOutputs(graph, layerIndex);
3700 CHECK_VALID_SIZE(outputs.size(), 1);
3701
3702 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3703 unsigned int axis = flatBufferDescriptor->axis();
3704 unsigned int numInputs = flatBufferDescriptor->numInputs();
3705 CHECK_VALID_SIZE(inputs.size(), numInputs);
3706
3707 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3708 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3709 flatBufferInputShape->begin() + flatBufferInputShape->size());
3710
3711 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3712 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3713
3714 for (unsigned int i=0; i<inputs.size(); ++i)
3715 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003716 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003717 if (descriptor.m_InputShape != inputShape)
3718 {
3719 std::stringstream ss;
3720 ss << "Shape of input "
3721 << i
3722 << " "
3723 << inputShape
3724 << " does not equal defined input shape "
3725 << descriptor.m_InputShape
3726 << ": "
3727 << CHECK_LOCATION().AsString();
3728 throw ParseException(ss.str());
3729 }
3730 }
3731
3732 auto layerName = GetLayerName(graph, layerIndex);
3733 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3734
3735 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3736 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3737
3738 RegisterInputSlots(graph, layerIndex, layer);
3739 RegisterOutputSlots(graph, layerIndex, layer);
3740}
3741
Finn Williams85d36712021-01-26 22:30:06 +00003742void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003743{
3744 CHECK_LAYERS(graph, 0, layerIndex);
3745
3746 auto inputs = GetInputs(graph, layerIndex);
3747 auto outputs = GetOutputs(graph, layerIndex);
3748
3749 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3750 auto fbDescriptor = fbLayer->descriptor();
3751
3752 armnn::StandInDescriptor descriptor;
3753 descriptor.m_NumInputs = fbDescriptor->numInputs();
3754 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3755
3756 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3757 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3758
3759 const std::string layerName = GetLayerName(graph, layerIndex);
3760 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3761
3762 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3763 {
3764 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3765 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3766 }
3767
3768 RegisterInputSlots(graph, layerIndex, layer);
3769 RegisterOutputSlots(graph, layerIndex, layer);
3770}
3771
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003772armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3773 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3774{
3775 armnn::UnidirectionalSequenceLstmDescriptor desc;
3776
3777 desc.m_ActivationFunc = descriptor->activationFunc();
3778 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3779 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3780 desc.m_CifgEnabled = descriptor->cifgEnabled();
3781 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3782 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3783 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3784 desc.m_TimeMajor = descriptor->timeMajor();
3785
3786 return desc;
3787}
3788
3789void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3790{
3791 CHECK_LAYERS(graph, 0, layerIndex);
3792
3793 auto inputs = GetInputs(graph, layerIndex);
3794 CHECK_VALID_SIZE(inputs.size(), 3);
3795
3796 auto outputs = GetOutputs(graph, layerIndex);
Mike Kelly12994962022-04-21 11:57:09 +01003797 CHECK_VALID_SIZE(outputs.size(), 3);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003798
3799 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3800 auto layerName = GetLayerName(graph, layerIndex);
3801 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3802 auto flatBufferInputParams = flatBufferLayer->inputParams();
3803
3804 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3805
3806 armnn::LstmInputParams lstmInputParams;
3807
3808 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3809 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3810 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3811 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3812 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3813 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3814 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3815 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3816 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3817
3818 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3819 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3820 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3821 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3822 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3823 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3824 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3825 lstmInputParams.m_CellBias = &cellBias;
3826 lstmInputParams.m_OutputGateBias = &outputGateBias;
3827
3828 armnn::ConstTensor inputToInputWeights;
3829 armnn::ConstTensor recurrentToInputWeights;
3830 armnn::ConstTensor cellToInputWeights;
3831 armnn::ConstTensor inputGateBias;
3832 if (!descriptor.m_CifgEnabled)
3833 {
3834 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3835 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3836 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3837
3838 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3839 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3840 lstmInputParams.m_InputGateBias = &inputGateBias;
3841
3842 if (descriptor.m_PeepholeEnabled)
3843 {
3844 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3845 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3846 }
3847 }
3848
3849 armnn::ConstTensor projectionWeights;
3850 armnn::ConstTensor projectionBias;
3851 if (descriptor.m_ProjectionEnabled)
3852 {
3853 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3854 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3855
3856 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3857 lstmInputParams.m_ProjectionBias = &projectionBias;
3858 }
3859
3860 armnn::ConstTensor cellToForgetWeights;
3861 armnn::ConstTensor cellToOutputWeights;
3862 if (descriptor.m_PeepholeEnabled)
3863 {
3864 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3865 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3866
3867 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3868 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3869 }
3870
3871 armnn::ConstTensor inputLayerNormWeights;
3872 armnn::ConstTensor forgetLayerNormWeights;
3873 armnn::ConstTensor cellLayerNormWeights;
3874 armnn::ConstTensor outputLayerNormWeights;
3875 if (descriptor.m_LayerNormEnabled)
3876 {
3877 if (!descriptor.m_CifgEnabled)
3878 {
3879 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3880 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3881 }
3882 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3883 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3884 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3885
3886 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3887 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3888 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3889 }
3890
3891 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3892 lstmInputParams,
3893 layerName.c_str());
3894
Mike Kelly12994962022-04-21 11:57:09 +01003895 armnn::TensorInfo outputTensorInfo0 = ToTensorInfo(outputs[0]);
3896 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo0);
3897
3898 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[1]);
3899 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo1);
3900
3901 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[2]);
3902 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo2);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003903
3904 RegisterInputSlots(graph, layerIndex, layer);
3905 RegisterOutputSlots(graph, layerIndex, layer);
3906}
3907
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003908} // namespace armnnDeserializer