blob: f455b1af0a028695d23d9c245607c6294dfeeff2 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Mike Kelly3ec30772023-03-08 13:47:17 +00002// Copyright © 2017,2019-2023 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Simon Obutedb5804e2022-04-14 15:49:52 +010013#include <armnn/Logging.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000014
Matteo Martincighe011d202019-11-28 11:35:47 +000015#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000016#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010017#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000018#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010019#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000020
Kevin May43a799c2019-02-08 16:31:42 +000021#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022#include <VerificationHelpers.hpp>
23
Colm Donelan5b5c2222020-09-09 12:48:16 +010024#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Finn Williams85d36712021-01-26 22:30:06 +000038IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
39
40IDeserializer::~IDeserializer() = default;
41
42IDeserializer *IDeserializer::CreateRaw()
43{
44 return new IDeserializer();
45}
46
47IDeserializerPtr IDeserializer::Create()
48{
49 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
50}
51
52void IDeserializer::Destroy(IDeserializer *parser)
53{
54 delete parser;
55}
56
57armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
58{
59 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
60}
61
62armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
63{
64 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
65}
66
67BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
68{
69 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
70}
71
72BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
73{
74 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
75}
76
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000077namespace
78{
Kevin May43a799c2019-02-08 16:31:42 +000079
80const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
81
Finn Williams85d36712021-01-26 22:30:06 +000082 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000083 unsigned int layersIndex,
84 const CheckLocation& location)
85{
86 if (graph->layers() == nullptr)
87 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010088 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
89 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
90 "layers:{1} at {2}",
91 location.m_Function,
92 layersIndex,
93 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000094 }
95 else if (layersIndex >= graph->layers()->size())
96 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010097 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
98 location.m_Function,
99 layersIndex,
100 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000101 }
102}
103
Finn Williams85d36712021-01-26 22:30:06 +0000104void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000105 unsigned int layersIndex,
106 unsigned int layerIndex,
107 const CheckLocation& location)
108{
109 if (graph->layers() == nullptr)
110 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100111 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
112 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
113 "layers:{1} at {2}",
114 location.m_Function,
115 layersIndex,
116 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000117 }
118 else if (layersIndex >= graph->layers()->size())
119 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100120 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
121 "layers:{1} at {2}",
122 location.m_Function,
123 layersIndex,
124 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000125 }
126 else if (layerIndex >= graph->layers()[layersIndex].size()
127 && layerIndex != VIRTUAL_LAYER_ID)
128 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100129 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
130 "layers:{1} layer:{2} at {3}",
131 location.m_Function,
132 layersIndex,
133 layerIndex,
134 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000135 }
136}
137
Finn Williams85d36712021-01-26 22:30:06 +0000138void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000139 const CheckLocation& location)
140{
141 if (rawPtr == nullptr)
142 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100143 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
144 location.m_Function,
145 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000146 }
147}
148
Finn Williams85d36712021-01-26 22:30:06 +0000149void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000150 const CheckLocation& location)
151{
152 if (rawPtr == nullptr)
153 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100154 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
155 location.m_Function,
156 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000157 }
158}
159
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000160void CheckConstTensorSize(const unsigned int constTensorSize,
161 const unsigned int tensorSize,
162 const CheckLocation& location)
163{
164 if (constTensorSize != tensorSize)
165 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100166 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
167 location.m_Function,
168 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000169 }
170}
171
Kevin May43a799c2019-02-08 16:31:42 +0000172#define CHECK_TENSOR_PTR(TENSOR_PTR) \
173 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
174
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000175#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
176 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
177
Mike Kellya0766c32019-02-19 17:22:07 +0000178#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
179 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
180
Kevin May43a799c2019-02-08 16:31:42 +0000181#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
182 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
183
184#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
185 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
186}
187
Saoirse Stewart263829c2019-02-19 15:54:14 +0000188bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
189{
190 const unsigned int actualSize = actual.GetNumDimensions();
191 if (actualSize != expected.size())
192 {
193 return false;
194 }
195
196 for (unsigned int i = 0u; i < actualSize; i++)
197 {
198 if (actual[i] != static_cast<unsigned int>(expected[i]))
199 {
200 return false;
201 }
202 }
203
204 return true;
205}
206
Finn Williams85d36712021-01-26 22:30:06 +0000207IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000208: m_Network(nullptr, nullptr),
209//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000210m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000211{
212 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000213 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
214 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
215 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
216 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
Samuel Yapa04f4a12022-08-19 11:14:38 +0100217 m_ParserFunctions[Layer_BatchMatMulLayer] = &DeserializerImpl::ParseBatchMatMul;
Finn Williams85d36712021-01-26 22:30:06 +0000218 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
219 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
mathad01b392e982021-04-07 12:07:30 +0100220 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Simon Obute51f67772021-09-03 15:50:13 +0100221 m_ParserFunctions[Layer_ChannelShuffleLayer] = &DeserializerImpl::ParseChannelShuffle;
222 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
Finn Williams85d36712021-01-26 22:30:06 +0000223 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
224 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
225 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100226 m_ParserFunctions[Layer_Convolution3dLayer] = &DeserializerImpl::ParseConvolution3d;
Finn Williams85d36712021-01-26 22:30:06 +0000227 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
228 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
229 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
230 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
231 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
Mike Kelly3ec30772023-03-08 13:47:17 +0000232 m_ParserFunctions[Layer_ElementwiseBinaryLayer] = &DeserializerImpl::ParseElementwiseBinary;
Finn Williams85d36712021-01-26 22:30:06 +0000233 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
234 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
235 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
236 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
237 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
238 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100239 m_ParserFunctions[Layer_GatherNdLayer] = &DeserializerImpl::ParseGatherNd;
Finn Williams85d36712021-01-26 22:30:06 +0000240 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
241 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
242 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
243 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
244 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
245 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
246 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
247 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
248 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
249 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
250 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
251 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
252 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
253 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
254 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
255 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000256 m_ParserFunctions[Layer_Pooling3dLayer] = &DeserializerImpl::ParsePooling3d;
Finn Williams85d36712021-01-26 22:30:06 +0000257 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
258 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
259 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
260 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
261 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000262 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000263 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
264 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
265 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
Tracy Narine944fb502023-07-04 15:08:57 +0100266 m_ParserFunctions[Layer_ReverseV2Layer] = &DeserializerImpl::ParseReverseV2;
Finn Williams85d36712021-01-26 22:30:06 +0000267 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100268 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000269 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
270 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
271 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
272 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
273 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
274 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
275 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
276 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
277 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
278 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
David Monahan616b22f2023-07-25 12:08:10 +0100279 m_ParserFunctions[Layer_TileLayer] = &DeserializerImpl::ParseTile;
Finn Williams85d36712021-01-26 22:30:06 +0000280 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
281 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100282 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000283}
284
Finn Williams85d36712021-01-26 22:30:06 +0000285LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000286{
287 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
288
289 switch(layerType)
290 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100291 case Layer::Layer_AbsLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000293 case Layer::Layer_ActivationLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000295 case Layer::Layer_AdditionLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100297 case Layer::Layer_ArgMinMaxLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Samuel Yapa04f4a12022-08-19 11:14:38 +0100299 case Layer::Layer_BatchMatMulLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000301 case Layer::Layer_BatchToSpaceNdLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000303 case Layer::Layer_BatchNormalizationLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100305 case Layer::Layer_CastLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Simon Obute51f67772021-09-03 15:50:13 +0100307 case Layer::Layer_ChannelShuffleLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100309 case Layer::Layer_ComparisonLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100311 case Layer::Layer_ConcatLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000313 case Layer::Layer_ConstantLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000315 case Layer::Layer_Convolution2dLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100317 case Layer::Layer_Convolution3dLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100319 case Layer::Layer_DepthToSpaceLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000321 case Layer::Layer_DepthwiseConvolution2dLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000323 case Layer::Layer_DequantizeLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000325 case Layer::Layer_DetectionPostProcessLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000327 case Layer::Layer_DivisionLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000329 case Layer::Layer_EqualLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
Mike Kelly3ec30772023-03-08 13:47:17 +0000331 case Layer::Layer_ElementwiseBinaryLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseBinaryLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000333 case Layer::Layer_ElementwiseUnaryLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000335 case Layer::Layer_FullyConnectedLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100337 case Layer::Layer_FillLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000339 case Layer::Layer_FloorLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000341 case Layer::Layer_GatherLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100343 case Layer::Layer_GatherNdLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherNdLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000345 case Layer::Layer_GreaterLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000347 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000348 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100349 case Layer::Layer_InstanceNormalizationLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000351 case Layer::Layer_L2NormalizationLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000353 case Layer::Layer_LogicalBinaryLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100355 case Layer::Layer_LogSoftmaxLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000357 case Layer::Layer_LstmLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000359 case Layer::Layer_MeanLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000361 case Layer::Layer_MinimumLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000363 case Layer::Layer_MaximumLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100365 case Layer::Layer_MergeLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000367 case Layer::Layer_MergerLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000369 case Layer::Layer_MultiplicationLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000371 case Layer::Layer_NormalizationLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000373 case Layer::Layer_OutputLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000375 case Layer::Layer_PadLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000377 case Layer::Layer_PermuteLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000379 case Layer::Layer_Pooling2dLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000381 case Layer::Layer_Pooling3dLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100383 case Layer::Layer_PreluLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100385 case Layer::Layer_QLstmLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000387 case Layer::Layer_QuantizeLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100389 case Layer::Layer_QuantizedLstmLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100391 case Layer::Layer_RankLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000393 case Layer::Layer_ReduceLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000395 case Layer::Layer_ReshapeLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000397 case Layer::Layer_ResizeBilinearLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100399 case Layer::Layer_ResizeLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Tracy Narine944fb502023-07-04 15:08:57 +0100401 case Layer::Layer_ReverseV2Layer:
402 return graphPtr->layers()->Get(layerIndex)->layer_as_ReverseV2Layer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000403 case Layer::Layer_RsqrtLayer:
404 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100405 case Layer::Layer_ShapeLayer:
406 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100407 case Layer::Layer_SliceLayer:
408 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000409 case Layer::Layer_SoftmaxLayer:
410 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000411 case Layer::Layer_SpaceToBatchNdLayer:
412 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100413 case Layer::Layer_SpaceToDepthLayer:
414 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000415 case Layer::Layer_SplitterLayer:
416 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100417 case Layer::Layer_StackLayer:
418 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100419 case Layer::Layer_StandInLayer:
420 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000421 case Layer::Layer_StridedSliceLayer:
422 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000423 case Layer::Layer_SubtractionLayer:
424 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100425 case Layer::Layer_SwitchLayer:
426 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
David Monahan616b22f2023-07-25 12:08:10 +0100427 case Layer::Layer_TileLayer:
428 return graphPtr->layers()->Get(layerIndex)->layer_as_TileLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100429 case Layer::Layer_TransposeConvolution2dLayer:
430 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000431 case Layer::Layer_TransposeLayer:
432 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100433 case Layer::Layer_UnidirectionalSequenceLstmLayer:
434 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000435 case Layer::Layer_NONE:
436 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100437 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000438 }
439}
440
Finn Williams85d36712021-01-26 22:30:06 +0000441std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000442{
443 auto layer = GetBaseLayer(graph, index);
444 assert(layer);
445 return layer->layerName()->str();
446}
447
Finn Williams85d36712021-01-26 22:30:06 +0000448int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000449{
450 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
451
452 if (layerType == Layer::Layer_InputLayer)
453 {
454 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
455 }
456 else if ( layerType == Layer::Layer_OutputLayer )
457 {
458 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
459 }
460 return 0;
461}
462
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000463armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000464{
465 switch (dataLayout)
466 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000467 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000468 return armnn::DataLayout::NHWC;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100469 case armnnSerializer::DataLayout::DataLayout_NDHWC:
470 return armnn::DataLayout::NDHWC;
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100471 case armnnSerializer::DataLayout::DataLayout_NCDHW:
472 return armnn::DataLayout::NCDHW;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000473 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000474 default:
475 return armnn::DataLayout::NCHW;
476 }
477}
478
Mike Kellyaf484012019-02-20 16:53:11 +0000479armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
480{
481 switch (function)
482 {
483 case armnnSerializer::ActivationFunction_Sigmoid:
484 return armnn::ActivationFunction::Sigmoid;
485 case armnnSerializer::ActivationFunction_TanH:
486 return armnn::ActivationFunction::TanH;
487 case armnnSerializer::ActivationFunction_Linear:
488 return armnn::ActivationFunction::Linear;
489 case armnnSerializer::ActivationFunction_ReLu:
490 return armnn::ActivationFunction::ReLu;
491 case armnnSerializer::ActivationFunction_BoundedReLu:
492 return armnn::ActivationFunction::BoundedReLu;
493 case armnnSerializer::ActivationFunction_LeakyReLu:
494 return armnn::ActivationFunction::LeakyReLu;
495 case armnnSerializer::ActivationFunction_Abs:
496 return armnn::ActivationFunction::Abs;
497 case armnnSerializer::ActivationFunction_Sqrt:
498 return armnn::ActivationFunction::Sqrt;
499 case armnnSerializer::ActivationFunction_Square:
500 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000501 case armnnSerializer::ActivationFunction_Elu:
502 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000503 case armnnSerializer::ActivationFunction_HardSwish:
504 return armnn::ActivationFunction::HardSwish;
Teresa Charlin077cddb2023-09-15 15:19:21 +0100505 case armnnSerializer::ActivationFunction_Gelu:
506 return armnn::ActivationFunction::Gelu;
Mike Kellyaf484012019-02-20 16:53:11 +0000507 default:
508 return armnn::ActivationFunction::Sigmoid;
509 }
510}
511
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100512armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
513{
514 switch (function)
515 {
516 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
517 return armnn::ArgMinMaxFunction::Max;
518 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
519 default:
520 return armnn::ArgMinMaxFunction::Min;
521 }
522}
523
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100524armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
525{
526 switch (operation)
527 {
528 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
529 return armnn::ComparisonOperation::Equal;
530 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
531 return armnn::ComparisonOperation::Greater;
532 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
533 return armnn::ComparisonOperation::GreaterOrEqual;
534 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
535 return armnn::ComparisonOperation::Less;
536 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
537 return armnn::ComparisonOperation::LessOrEqual;
538 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
539 default:
540 return armnn::ComparisonOperation::NotEqual;
541 }
542}
543
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000544armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
545{
546 switch (operation)
547 {
548 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
549 return armnn::ReduceOperation::Sum;
550 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
551 return armnn::ReduceOperation::Max;
552 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
553 return armnn::ReduceOperation::Mean;
554 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
555 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100556 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
557 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000558 default:
559 return armnn::ReduceOperation::Sum;
560 }
561}
562
James Conroyaba90cd2020-11-06 16:28:18 +0000563armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
564{
565 switch (operation)
566 {
567 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
568 return armnn::LogicalBinaryOperation::LogicalAnd;
569 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
570 return armnn::LogicalBinaryOperation::LogicalOr;
571 default:
572 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
573 }
574}
575
Mike Kelly3ec30772023-03-08 13:47:17 +0000576armnn::BinaryOperation ToElementwiseBinaryOperation(armnnSerializer::BinaryOperation operation)
577{
578 switch (operation)
579 {
580 case armnnSerializer::BinaryOperation::BinaryOperation_Add:
581 return armnn::BinaryOperation::Add;
582 case armnnSerializer::BinaryOperation::BinaryOperation_Div:
583 return armnn::BinaryOperation::Div;
584 case armnnSerializer::BinaryOperation::BinaryOperation_Maximum:
585 return armnn::BinaryOperation::Maximum;
586 case armnnSerializer::BinaryOperation::BinaryOperation_Minimum:
587 return armnn::BinaryOperation::Minimum;
588 case armnnSerializer::BinaryOperation::BinaryOperation_Mul:
589 return armnn::BinaryOperation::Mul;
590 case armnnSerializer::BinaryOperation::BinaryOperation_Sub:
591 return armnn::BinaryOperation::Sub;
John Mcloughlin0ec00872023-05-15 17:03:49 +0100592 case armnnSerializer::BinaryOperation::BinaryOperation_SqDiff:
593 return armnn::BinaryOperation::SqDiff;
594 case armnnSerializer::BinaryOperation::BinaryOperation_Power:
595 return armnn::BinaryOperation::Power;
Mike Kelly3ec30772023-03-08 13:47:17 +0000596 default:
597 throw armnn::InvalidArgumentException("Binary operation unknown");
598 }
599}
600
601armnn::UnaryOperation ToElementwiseUnaryOperation(armnnSerializer::UnaryOperation operation)
josh minor4a3c6102020-01-06 16:40:46 -0600602{
603 switch (operation)
604 {
605 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
606 return armnn::UnaryOperation::Abs;
Teresa Charlin93f0ad02023-03-23 15:28:02 +0000607 case armnnSerializer::UnaryOperation::UnaryOperation_Ceil:
608 return armnn::UnaryOperation::Ceil;
josh minor4a3c6102020-01-06 16:40:46 -0600609 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
610 return armnn::UnaryOperation::Rsqrt;
611 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
612 return armnn::UnaryOperation::Sqrt;
613 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
614 return armnn::UnaryOperation::Exp;
615 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
616 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000617 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
618 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100619 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
620 return armnn::UnaryOperation::Log;
621 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
622 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600623 default:
624 throw armnn::InvalidArgumentException("Unary operation unknown");
625 }
626}
627
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +0100628armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
629{
630 switch (paddingMode)
631 {
632 case armnnSerializer::PaddingMode::PaddingMode_Reflect:
633 return armnn::PaddingMode::Reflect;
634 case armnnSerializer::PaddingMode::PaddingMode_Symmetric:
635 return armnn::PaddingMode::Symmetric;
636 default:
637 return armnn::PaddingMode::Constant;
638 }
639}
640
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100641armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
642{
643 switch (method)
644 {
645 case armnnSerializer::ResizeMethod_NearestNeighbor:
646 return armnn::ResizeMethod::NearestNeighbor;
647 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000648 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100649 default:
650 return armnn::ResizeMethod::NearestNeighbor;
651 }
652}
653
Finn Williams85d36712021-01-26 22:30:06 +0000654armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000655{
656 armnn::DataType type;
657 CHECK_TENSOR_PTR(tensorPtr);
658
659 switch (tensorPtr->dataType())
660 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000661 case DataType_QAsymmS8:
662 type = armnn::DataType::QAsymmS8;
663 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000664 case DataType_QSymmS8:
665 type = armnn::DataType::QSymmS8;
666 break;
Kevin May43a799c2019-02-08 16:31:42 +0000667 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000668 case DataType_QAsymmU8:
669 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000670 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000671 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000672 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000673 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000674 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000675 case DataType_Signed32:
676 type = armnn::DataType::Signed32;
677 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100678 case DataType_Signed64:
679 type = armnn::DataType::Signed64;
680 break;
Kevin May43a799c2019-02-08 16:31:42 +0000681 case DataType_Float32:
682 type = armnn::DataType::Float32;
683 break;
684 case DataType_Float16:
685 type = armnn::DataType::Float16;
686 break;
687 case DataType_Boolean:
688 type = armnn::DataType::Boolean;
689 break;
690 default:
691 {
692 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100693 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
694 tensorPtr->dataType(),
695 EnumNameDataType(tensorPtr->dataType()),
696 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000697 }
698 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000699
Colm Donelan800b2812021-02-12 12:43:35 +0000700 float quantizationScale = tensorPtr->quantizationScale();
701 int32_t quantizationOffset = tensorPtr->quantizationOffset();
702
Finn Williams2605b232020-06-10 15:53:46 +0100703 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
704 {
Colm Donelan800b2812021-02-12 12:43:35 +0000705 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100706 type,
707 quantizationScale,
708 quantizationOffset);
709 }
Colm Donelan800b2812021-02-12 12:43:35 +0000710 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
711 {
712 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
713 type,
714 quantizationScale,
715 quantizationOffset);
716 return result;
717 }
Kevin May43a799c2019-02-08 16:31:42 +0000718
719 auto dimensions = tensorPtr->dimensions();
720 unsigned int size = dimensions->size();
721 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000722 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
723 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
724 // For backwards compatibility check if the dimensionSpecificity vector is present first.
725 // The default is to have dimensionSpecificity set to all true's anyway.
726 if (tensorPtr->dimensionSpecificity() != nullptr)
727 {
728 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
729 size = dimensionSpecificity->size();
730 for (unsigned int i = 0; i < size; ++i)
731 {
732 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
733 }
734 }
735 // Construct a TensorShape
736 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000737
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000738 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000739 if (quantizationScales)
740 {
741 unsigned int quantizationScalesSize = quantizationScales->size();
742 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
743 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000744 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000745 type,
746 scales,
747 quantizationDim);
748 return result;
749 }
750
Kevin May43a799c2019-02-08 16:31:42 +0000751 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000752 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000753 type,
754 quantizationScale,
755 quantizationOffset);
Jim Flynnb53f52a2023-09-17 10:00:25 +0100756
Kevin May43a799c2019-02-08 16:31:42 +0000757 return result;
758}
759
Finn Williams85d36712021-01-26 22:30:06 +0000760armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000761{
762 CHECK_CONST_TENSOR_PTR(constTensorPtr);
763 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100764 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000765
766 switch (constTensorPtr->data_type())
767 {
768 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000769 {
770 auto byteData = constTensorPtr->data_as_ByteData()->data();
771 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
772 return armnn::ConstTensor(tensorInfo, byteData->data());
773 }
Mike Kellya0766c32019-02-19 17:22:07 +0000774 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000775 {
776 auto shortData = constTensorPtr->data_as_ShortData()->data();
777 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
778 return armnn::ConstTensor(tensorInfo, shortData->data());
779 }
Mike Kellya0766c32019-02-19 17:22:07 +0000780 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000781 {
782 auto intData = constTensorPtr->data_as_IntData()->data();
783 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
784 return armnn::ConstTensor(tensorInfo, intData->data());
785 }
Mike Kellya0766c32019-02-19 17:22:07 +0000786 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000787 {
788 auto longData = constTensorPtr->data_as_LongData()->data();
789 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
790 return armnn::ConstTensor(tensorInfo, longData->data());
791 }
Mike Kellya0766c32019-02-19 17:22:07 +0000792 default:
793 {
794 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100795 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
796 constTensorPtr->data_type(),
797 EnumNameConstTensorData(constTensorPtr->data_type()),
798 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000799 }
800 }
801}
802
Finn Williams85d36712021-01-26 22:30:06 +0000803TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000804{
805 CHECK_LAYERS(graphPtr, 0, layerIndex);
806 auto layer = GetBaseLayer(graphPtr, layerIndex);
807 const auto& numInputs = layer->inputSlots()->size();
808
809 TensorRawPtrVector result(numInputs);
810
811 for (unsigned int i=0; i<numInputs; ++i)
812 {
813 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
814 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
815 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
816 }
817 return result;
818}
819
Finn Williams85d36712021-01-26 22:30:06 +0000820TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000821{
822 CHECK_LAYERS(graphPtr, 0, layerIndex);
823 auto layer = GetBaseLayer(graphPtr, layerIndex);
824 const auto& numOutputs = layer->outputSlots()->size();
825
826 TensorRawPtrVector result(numOutputs);
827
828 for (unsigned int i=0; i<numOutputs; ++i)
829 {
830 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
831 }
832 return result;
833}
834
Finn Williams85d36712021-01-26 22:30:06 +0000835void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000836{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000837 CHECK_LAYERS(graph, 0, layerIndex);
838 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100839 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
840 "layerName: {1} / {2}",
841 layerIndex,
842 layerName,
843 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000844}
845
Finn Williams85d36712021-01-26 22:30:06 +0000846void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000847{
848 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000849 m_InputBindings.clear();
850 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000851}
852
Kevin May43a799c2019-02-08 16:31:42 +0000853
Finn Williams85d36712021-01-26 22:30:06 +0000854INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000855{
856 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000857 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
858 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000859}
860
Finn Williams85d36712021-01-26 22:30:06 +0000861armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000862{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000863 ResetParser();
Simon Obutedb5804e2022-04-14 15:49:52 +0100864 if (binaryContent.fail()) {
865 ARMNN_LOG(error) << (std::string("Cannot read input"));
866 throw ParseException("Unable to read Input stream data");
867 }
868 binaryContent.seekg(0, std::ios::end);
869 const std::streamoff size = binaryContent.tellg();
870 std::vector<char> content(static_cast<size_t>(size));
871 binaryContent.seekg(0);
872 binaryContent.read(content.data(), static_cast<std::streamsize>(size));
873 GraphPtr graph = LoadGraphFromBinary(reinterpret_cast<uint8_t*>(content.data()), static_cast<size_t>(size));
Derek Lamberti8ddae332019-02-21 16:29:43 +0000874 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000875}
876
Finn Williams85d36712021-01-26 22:30:06 +0000877GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000878{
879 if (binaryContent == nullptr)
880 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100881 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
882 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000883 }
884 flatbuffers::Verifier verifier(binaryContent, len);
885 if (verifier.VerifyBuffer<SerializedGraph>() == false)
886 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100887 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
888 "flatbuffers format. size:{0} {1}",
889 len,
890 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000891 }
892 return GetSerializedGraph(binaryContent);
893}
894
Finn Williams85d36712021-01-26 22:30:06 +0000895INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000896{
Jim Flynnb53f52a2023-09-17 10:00:25 +0100897 if (graph == nullptr)
898 {
899 throw armnn::InvalidArgumentException("CreateNetworkFromGraph: graph pointer is null");
900 }
Kevin May43a799c2019-02-08 16:31:42 +0000901 m_Network = INetwork::Create();
Kevin May43a799c2019-02-08 16:31:42 +0000902 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000903 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000904 {
905 if (layer->layer_type() != Layer_InputLayer &&
906 layer->layer_type() != Layer_OutputLayer)
907 {
908 // lookup and call the parser function
909 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000910 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000911 }
912 ++layerIndex;
913 }
914
Derek Lamberti8ddae332019-02-21 16:29:43 +0000915 SetupInputLayers(graph);
916 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000917
918 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100919 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000920 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100921 Connections& connections = graphIt.second;
922 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000923 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100924 const unsigned int outputSlotIndex = outputIt.first;
925 IOutputSlot* outputSlot = outputIt.second;
926 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000927 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100928 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000929 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100930 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000931 }
Kevin May43a799c2019-02-08 16:31:42 +0000932 }
933 }
934 }
935
936 return std::move(m_Network);
937}
938
Finn Williams85d36712021-01-26 22:30:06 +0000939BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000940 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000941{
Jan Eilers8eb25602020-03-09 12:13:48 +0000942 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000943 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000944 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000945 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000946 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000947 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000948 }
949 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100950 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
951 name,
952 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000953}
954
Finn Williams85d36712021-01-26 22:30:06 +0000955BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000956 const std::string& name) const
957{
Jan Eilers8eb25602020-03-09 12:13:48 +0000958 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000959 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000960 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000961 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000962 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000963 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000964 }
965 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100966 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
967 name,
968 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000969}
970
Finn Williams85d36712021-01-26 22:30:06 +0000971unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000972{
973 for (unsigned int i = 0; i < graph->layers()->size(); i++)
974 {
975 auto layer = graph->layers()->Get(i);
976 if (layer->layer_type() == Layer::Layer_InputLayer)
977 {
978 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
979 if (layerBindingId == targetId)
980 {
981 return i;
982 }
983 }
984 }
985 throw ParseException("Input layer with given layerBindingId not found");
986}
987
Finn Williams85d36712021-01-26 22:30:06 +0000988unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000989{
990 for (unsigned int i = 0; i < graph->layers()->size(); i++)
991 {
992 auto layer = graph->layers()->Get(i);
993 if (layer->layer_type() == Layer::Layer_OutputLayer)
994 {
995 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
996 if (layerBindingId == targetId)
997 {
998 return i;
999 }
1000 }
1001 }
1002 throw ParseException("Output layer with given layerBindingId not found");
1003}
1004
Finn Williams85d36712021-01-26 22:30:06 +00001005unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001006{
1007 for (unsigned int i = 0; i < graph->layers()->size(); i++)
1008 {
1009 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
1010 if (layer->index() == targetIndex)
1011 {
1012 return i;
1013 }
1014 }
1015 throw ParseException("Layer with given index not found");
1016}
1017
Finn Williams85d36712021-01-26 22:30:06 +00001018IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +00001019{
Finn Williams85d36712021-01-26 22:30:06 +00001020 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +00001021
1022 if (graph->featureVersions())
1023 {
1024 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +01001025 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +01001026 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +00001027 }
1028
1029 return versions;
1030}
1031
Finn Williams85d36712021-01-26 22:30:06 +00001032void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001033{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001034 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001035 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001036 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001037 m_InputBindings.reserve(numInputs);
1038
1039 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001040 {
Tee Jungaa920c52019-11-05 10:48:25 +00001041 unsigned int inputLayerIndex = 0xFFFFFFFF;
1042 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1043 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001044 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001045 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
1046 }
1047 else
1048 {
1049 const int inputId = graph->inputIds()->Get(i);
1050 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1051 }
1052
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001053 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001054
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001055 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1056 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Jim Flynnb53f52a2023-09-17 10:00:25 +01001057 if (baseLayer->layerName()->c_str() == nullptr)
1058 {
1059 throw ParseException(fmt::format("Input with layer index [{0}] has no name", inputLayerIndex));
1060 }
Kevin May43a799c2019-02-08 16:31:42 +00001061
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001062 IConnectableLayer* inputLayer =
1063 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001064
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001065 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
1066 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1067 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1068
Derek Lamberti8ddae332019-02-21 16:29:43 +00001069 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001070 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001071 }
1072}
1073
Finn Williams85d36712021-01-26 22:30:06 +00001074void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001075{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001076 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001077 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001078 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001079 m_OutputBindings.reserve(numOutputs);
1080
1081 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001082 {
Tee Jungaa920c52019-11-05 10:48:25 +00001083 unsigned int outputLayerIndex = 0xFFFFFFFF;
1084 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1085 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001086 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001087 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1088 }
1089 else
1090 {
1091 const int outputId = graph->outputIds()->Get(i);
1092 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1093 }
1094
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001095 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001096
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001097 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1098 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Jim Flynnb53f52a2023-09-17 10:00:25 +01001099 if (baseLayer->layerName()->c_str() == nullptr)
1100 {
1101 throw ParseException(fmt::format("Output with layer index [{0}] has no name", outputLayerIndex));
1102 }
Derek Lamberti8ddae332019-02-21 16:29:43 +00001103
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001104 IConnectableLayer* outputLayer =
1105 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001106
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001107 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001108 unsigned int sourceLayerIndex =
1109 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001110 unsigned int outputSlotIndex =
1111 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001112 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001113 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1114 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001115 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001116 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001117 }
1118}
1119
Finn Williams85d36712021-01-26 22:30:06 +00001120void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001121 uint32_t layerIndex,
1122 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001123{
Jim Flynnb53f52a2023-09-17 10:00:25 +01001124 if (layer == nullptr)
1125 {
1126 throw ParseException(fmt::format(
1127 "RegisterOutputSlots: pointer to layer with index [{0}] is null", layerIndex));
1128 }
Derek Lamberti8ddae332019-02-21 16:29:43 +00001129 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001130 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1131 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001132 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001133 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1134 " for layer index: {2} {3}",
1135 baseLayer->outputSlots()->size(),
1136 layer->GetNumOutputSlots(),
1137 layerIndex,
1138 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001139 }
1140
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001141 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001142 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001143 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1144 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1145 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1146 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001147 }
1148}
1149
Finn Williams85d36712021-01-26 22:30:06 +00001150void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001151 uint32_t layerIndex,
1152 armnn::IConnectableLayer* layer,
1153 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001154{
Jim Flynnb53f52a2023-09-17 10:00:25 +01001155 if (layer == nullptr)
1156 {
1157 throw ParseException(fmt::format(
1158 "RegisterInputSlots: pointer to layer with index [{0}] is null", layerIndex));
1159 }
Derek Lamberti8ddae332019-02-21 16:29:43 +00001160 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001161 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001162
1163 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001164 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001165 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1166 " for layer index:{2} {3}",
1167 baseLayer->inputSlots()->size(),
1168 layer->GetNumInputSlots(),
1169 layerIndex,
1170 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001171 }
1172
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001173 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001174 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001175 // Check if slot should be ignored.
1176 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1177 {
1178 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1179 auto fbConnection = fbInputSlot->connection();
1180 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
Mike Kelly4cc341c2023-07-07 15:43:06 +01001181
1182 // If the slot has an Overridden tensorInfo then extract it
1183 if (fbInputSlot->isOverridden())
1184 {
1185 armnn::TensorInfo overriddenTensorInfo = ToTensorInfo(fbInputSlot->overriddenTensorInfo());
1186 inputSlot->SetTensorInfo(overriddenTensorInfo);
1187 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01001188 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1189 }
Kevin May43a799c2019-02-08 16:31:42 +00001190 }
1191}
1192
Finn Williams85d36712021-01-26 22:30:06 +00001193void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001194 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001195 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001196{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001197 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001198 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001199 m_GraphConnections[sourceLayerIndex] = Connections();
1200 }
1201
1202 Connections& connections = m_GraphConnections[sourceLayerIndex];
1203 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1204 {
1205 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001206 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001207 else
1208 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001209 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001210 }
1211}
Kevin May43a799c2019-02-08 16:31:42 +00001212
Finn Williams85d36712021-01-26 22:30:06 +00001213void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001214 uint32_t outputSlotIndex,
1215 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001216{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001217 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1218 {
1219 m_GraphConnections[sourceLayerIndex] = Connections();
1220 }
1221
1222 Connections& connections = m_GraphConnections[sourceLayerIndex];
1223 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1224 {
1225 throw ParseException("Same output slot index processed twice");
1226 }
1227
1228 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001229}
1230
Finn Williams85d36712021-01-26 22:30:06 +00001231void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001232{
1233 CHECK_LAYERS(graph, 0, layerIndex);
1234 auto inputs = GetInputs(graph, layerIndex);
1235 CHECK_LOCATION();
1236 CHECK_VALID_SIZE(inputs.size(), 1);
1237
1238 auto outputs = GetOutputs(graph, layerIndex);
1239 CHECK_VALID_SIZE(outputs.size(), 1);
1240
1241 auto layerName = GetLayerName(graph, layerIndex);
1242
josh minor4a3c6102020-01-06 16:40:46 -06001243 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1244 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001245 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1246 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1247
1248 RegisterInputSlots(graph, layerIndex, layer);
1249 RegisterOutputSlots(graph, layerIndex, layer);
1250}
1251
Finn Williams85d36712021-01-26 22:30:06 +00001252void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001253{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001254 CHECK_LAYERS(graph, 0, layerIndex);
1255 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001256 CHECK_LOCATION();
1257 CHECK_VALID_SIZE(inputs.size(), 1);
1258
Derek Lamberti8ddae332019-02-21 16:29:43 +00001259 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001260 CHECK_VALID_SIZE(outputs.size(), 1);
1261
Derek Lamberti8ddae332019-02-21 16:29:43 +00001262 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001263 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001264 auto serializerDescriptor = serializerLayer->descriptor();
1265
1266 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001267 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001268 descriptor.m_A = serializerDescriptor->a();
1269 descriptor.m_B = serializerDescriptor->b();
1270
1271 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1272 layerName.c_str());
1273 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1274 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1275
Derek Lamberti8ddae332019-02-21 16:29:43 +00001276 RegisterInputSlots(graph, layerIndex, layer);
1277 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001278}
1279
Finn Williams85d36712021-01-26 22:30:06 +00001280void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001281{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001282 CHECK_LAYERS(graph, 0, layerIndex);
1283 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001284 CHECK_LOCATION();
1285 CHECK_VALID_SIZE(inputs.size(), 2);
1286
Derek Lamberti8ddae332019-02-21 16:29:43 +00001287 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001288 CHECK_VALID_SIZE(outputs.size(), 1);
1289
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001290 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001291 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Add);
1292 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001293
1294 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1295 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1296
Derek Lamberti8ddae332019-02-21 16:29:43 +00001297 RegisterInputSlots(graph, layerIndex, layer);
1298 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001299}
1300
Finn Williams85d36712021-01-26 22:30:06 +00001301void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001302{
1303 CHECK_LAYERS(graph, 0, layerIndex);
1304 auto inputs = GetInputs(graph, layerIndex);
1305 CHECK_LOCATION();
1306 CHECK_VALID_SIZE(inputs.size(), 1);
1307
1308 auto outputs = GetOutputs(graph, layerIndex);
1309 CHECK_VALID_SIZE(outputs.size(), 1);
1310
1311 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1312 auto serializerDescriptor = serializerLayer->descriptor();
1313
1314 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001315 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001316 descriptor.m_Axis = serializerDescriptor->axis();
1317 auto layerName = GetLayerName(graph, layerIndex);
1318 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1319
1320 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1321 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1322
1323 RegisterInputSlots(graph, layerIndex, layer);
1324 RegisterOutputSlots(graph, layerIndex, layer);
1325}
1326
Samuel Yapa04f4a12022-08-19 11:14:38 +01001327void IDeserializer::DeserializerImpl::ParseBatchMatMul(GraphPtr graph, unsigned int layerIndex)
1328{
1329 CHECK_LAYERS(graph, 0, layerIndex);
1330
1331 auto inputs = GetInputs(graph, layerIndex);
1332 CHECK_LOCATION();
1333 CHECK_VALID_SIZE(inputs.size(), 2);
1334
1335 auto outputs = GetOutputs(graph, layerIndex);
1336 CHECK_VALID_SIZE(outputs.size(), 1);
1337
1338 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer();
1339 auto serializerDescriptor = serializerLayer->descriptor();
1340
1341 armnn::BatchMatMulDescriptor descriptor(serializerDescriptor->transposeX(),
1342 serializerDescriptor->transposeY(),
1343 serializerDescriptor->adjointX(),
1344 serializerDescriptor->adjointY(),
1345 ToDataLayout(serializerDescriptor->dataLayoutX()),
1346 ToDataLayout(serializerDescriptor->dataLayoutY()));
1347
1348 auto layerName = GetLayerName(graph, layerIndex);
1349 IConnectableLayer* layer = m_Network->AddBatchMatMulLayer(descriptor, layerName.c_str());
1350
1351 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1352 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1353
1354 RegisterInputSlots(graph, layerIndex, layer);
1355 RegisterOutputSlots(graph, layerIndex, layer);
1356}
1357
Finn Williams85d36712021-01-26 22:30:06 +00001358void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001359{
1360 CHECK_LAYERS(graph, 0, layerIndex);
1361
Finn Williams85d36712021-01-26 22:30:06 +00001362 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001363 CHECK_VALID_SIZE(inputs.size(), 1);
1364
Finn Williams85d36712021-01-26 22:30:06 +00001365 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001366 CHECK_VALID_SIZE(outputs.size(), 1);
1367
1368 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1369 auto flatBufferCrops = flatBufferDescriptor->crops();
1370 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1371
Mike Kelly51b8c312022-05-24 11:34:02 +01001372 if (flatBufferCrops->size() % 2 != 0)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001373 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001374 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001375 }
1376
1377 std::vector<std::pair<unsigned int, unsigned int>> crops;
Mike Kelly51b8c312022-05-24 11:34:02 +01001378 crops.reserve(flatBufferCrops->size() / 2);
1379 for (unsigned int i = 0; i < flatBufferCrops->size() - 1; i += 2)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001380 {
1381 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1382 }
1383
1384 armnn::BatchToSpaceNdDescriptor descriptor;
1385 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1386 descriptor.m_BlockShape =
1387 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1388 descriptor.m_Crops = crops;
1389
1390 auto layerName = GetLayerName(graph, layerIndex);
1391 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1392
1393 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1394 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1395
1396 RegisterInputSlots(graph, layerIndex, layer);
1397 RegisterOutputSlots(graph, layerIndex, layer);
1398}
1399
Finn Williams85d36712021-01-26 22:30:06 +00001400void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001401{
1402 CHECK_LAYERS(graph, 0, layerIndex);
1403
1404 auto inputs = GetInputs(graph, layerIndex);
1405 CHECK_VALID_SIZE(inputs.size(), 1);
1406
1407 auto outputs = GetOutputs(graph, layerIndex);
1408 CHECK_VALID_SIZE(outputs.size(), 1);
1409 auto outputInfo = ToTensorInfo(outputs[0]);
1410
ruoyan015c7ab052019-03-04 14:48:02 +00001411 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001412
1413 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1414 auto serializerDescriptor = serializerLayer->descriptor();
1415
1416 armnn::BatchNormalizationDescriptor descriptor;
1417 descriptor.m_Eps = serializerDescriptor->eps();
1418 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1419
1420 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1421 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1422 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1423 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1424
1425 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1426 mean,
1427 variance,
1428 beta,
1429 gamma,
1430 layerName.c_str());
1431 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1432
1433 RegisterInputSlots(graph, layerIndex, layer);
1434 RegisterOutputSlots(graph, layerIndex, layer);
1435}
1436
mathad01b392e982021-04-07 12:07:30 +01001437void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1438{
1439 CHECK_LAYERS(graph, 0, layerIndex);
1440 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1441 CHECK_LOCATION();
1442 CHECK_VALID_SIZE(inputs.size(), 1);
1443
1444 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1445 CHECK_VALID_SIZE(outputs.size(), 1);
1446
1447 auto layerName = GetLayerName(graph, layerIndex);
1448
1449 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1450
1451 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1452 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1453
1454 RegisterInputSlots(graph, layerIndex, layer);
1455 RegisterOutputSlots(graph, layerIndex, layer);
1456}
1457
Finn Williams85d36712021-01-26 22:30:06 +00001458void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001459{
1460 CHECK_LAYERS(graph, 0, layerIndex);
1461 CHECK_LOCATION();
1462
1463 auto outputs = GetOutputs(graph, layerIndex);
1464 CHECK_VALID_SIZE(outputs.size(), 1);
1465
1466 auto layerName = GetLayerName(graph, layerIndex);
1467
1468 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1469 auto serializerInput = serializerLayer->input();
1470
1471 armnn::ConstTensor input = ToConstTensor(serializerInput);
Cathal Corbett06902652022-04-14 17:55:11 +01001472 IConnectableLayer* layer;
Conor Kennedy76277882019-02-26 08:29:54 +00001473
Cathal Corbett06902652022-04-14 17:55:11 +01001474 // Required for when Constant Layer is used as an inputs to DepthwiseConvolution2d Layer.
1475 // Running a model that was created before weights layout scheme version was added to our flatbuffers
1476 // file ensuring older models can still be read and executed. featureVersion weights layout scheme 1
1477 // indicates a change in the depthwise weights layout within ArmNN from [M,I,H,W] --> [1,H,W,I*M]
1478 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1479 {
1480 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1481 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1482 PermutationVector permutationVector = { 3, 2, 0, 1 };
1483 armnn::TensorInfo weightsInfo = input.GetInfo();
1484 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1485 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1486 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1487 input.GetMemoryArea(), permuteBuffer.get(),
1488 GetDataTypeSize(weightsInfo.GetDataType()));
Conor Kennedy76277882019-02-26 08:29:54 +00001489
Cathal Corbett06902652022-04-14 17:55:11 +01001490 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1491 auto weightsShape = weightsInfo.GetShape();
1492 weightsInfo.SetShape({1,
1493 weightsShape[0],
1494 weightsShape[1],
1495 weightsShape[2]*weightsShape[3]});
Sadik Armagan361679d2022-08-02 09:17:23 +01001496 weightsInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001497
1498 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1499
1500 layer = m_Network->AddConstantLayer(weightsPermuted, layerName.c_str());
1501
1502 layer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1503
1504 RegisterOutputSlots(graph, layerIndex, layer);
1505
1506 return;
1507 }
1508 else
1509 {
1510 layer = m_Network->AddConstantLayer(input, layerName.c_str());
1511
1512 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Sadik Armagan361679d2022-08-02 09:17:23 +01001513 outputTensorInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001514 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1515 }
Conor Kennedy76277882019-02-26 08:29:54 +00001516
1517 RegisterOutputSlots(graph, layerIndex, layer);
1518}
1519
Finn Williams85d36712021-01-26 22:30:06 +00001520void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001521{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001522 CHECK_LAYERS(graph, 0, layerIndex);
1523 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001524 CHECK_LOCATION();
Mike Kellya0766c32019-02-19 17:22:07 +00001525
Derek Lamberti8ddae332019-02-21 16:29:43 +00001526 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001527 CHECK_VALID_SIZE(outputs.size(), 1);
1528
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001529 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1530
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001531 auto layerName = GetLayerName(graph, layerIndex);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001532 auto flatbufferDescriptor = flatBufferLayer->descriptor();
Mike Kellya0766c32019-02-19 17:22:07 +00001533
1534 armnn::Convolution2dDescriptor descriptor;
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001535 descriptor.m_PadLeft = flatbufferDescriptor->padLeft();
1536 descriptor.m_PadRight = flatbufferDescriptor->padRight();
1537 descriptor.m_PadTop = flatbufferDescriptor->padTop();
1538 descriptor.m_PadBottom = flatbufferDescriptor->padBottom();
1539 descriptor.m_StrideX = flatbufferDescriptor->strideX();
1540 descriptor.m_StrideY = flatbufferDescriptor->strideY();;
1541 descriptor.m_DilationX = flatbufferDescriptor->dilationX();
1542 descriptor.m_DilationY = flatbufferDescriptor->dilationY();;
1543 descriptor.m_BiasEnabled = flatbufferDescriptor->biasEnabled();;
1544 descriptor.m_DataLayout = ToDataLayout(flatbufferDescriptor->dataLayout());
Mike Kellya0766c32019-02-19 17:22:07 +00001545
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001546 armnn::IConnectableLayer* layer;
1547 std::vector<unsigned int> ignoreSlots {};
Mike Kellya0766c32019-02-19 17:22:07 +00001548
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001549 armnn::ConstTensor biasTensor;
1550 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1551 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1552 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Mike Kellya0766c32019-02-19 17:22:07 +00001553 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001554 // If the model stores weights and biases as members of the layer we have to read them from there
1555 // but add them to their own ConstantLayer for compatibility
1556 CHECK_VALID_SIZE(inputs.size(), 1);
1557
1558 layer = m_Network->AddConvolution2dLayer(descriptor,
1559 layerName.c_str());
1560
1561 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1562 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1563 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1564 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
1565 ignoreSlots.emplace_back(1u);
1566
1567 if (descriptor.m_BiasEnabled)
1568 {
1569 biasTensor = ToConstTensor(flatBufferLayer->biases());
1570 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1571 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1572 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
1573 ignoreSlots.emplace_back(2u);
1574 }
Mike Kellya0766c32019-02-19 17:22:07 +00001575 }
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001576 else
1577 {
1578 layer = m_Network->AddConvolution2dLayer(descriptor,
1579 layerName.c_str());
1580 uint32_t numInputs = descriptor.GetNumInputs();
1581 CHECK_VALID_SIZE(inputs.size(), numInputs);
1582 }
1583
Mike Kellya0766c32019-02-19 17:22:07 +00001584 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1585 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1586
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001587 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001588 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001589}
1590
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001591void IDeserializer::DeserializerImpl::ParseConvolution3d(GraphPtr graph, unsigned int layerIndex)
1592{
1593 CHECK_LAYERS(graph, 0, layerIndex);
1594 auto inputs = GetInputs(graph, layerIndex);
1595 CHECK_LOCATION();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001596
1597 auto outputs = GetOutputs(graph, layerIndex);
1598 CHECK_VALID_SIZE(outputs.size(), 1);
1599
1600 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1601 auto layerName = GetLayerName(graph, layerIndex);
1602 auto serializerDescriptor = serializerLayer->descriptor();
1603
1604 armnn::Convolution3dDescriptor descriptor;
1605 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1606 descriptor.m_PadRight = serializerDescriptor->padRight();
1607 descriptor.m_PadTop = serializerDescriptor->padTop();
1608 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1609 descriptor.m_PadFront = serializerDescriptor->padFront();
1610 descriptor.m_PadBack = serializerDescriptor->padBack();
1611 descriptor.m_StrideX = serializerDescriptor->strideX();
1612 descriptor.m_StrideY = serializerDescriptor->strideY();
1613 descriptor.m_StrideZ = serializerDescriptor->strideZ();
1614 descriptor.m_DilationX = serializerDescriptor->dilationX();
1615 descriptor.m_DilationY = serializerDescriptor->dilationY();
1616 descriptor.m_DilationZ = serializerDescriptor->dilationZ();
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001617 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001618 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1619
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001620 uint32_t numInputs = descriptor.GetNumInputs();
1621 CHECK_VALID_SIZE(inputs.size(), numInputs);
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001622
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001623 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1624
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001625 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1626 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1627
1628 RegisterInputSlots(graph, layerIndex, layer);
1629 RegisterOutputSlots(graph, layerIndex, layer);
1630}
1631
Finn Williams85d36712021-01-26 22:30:06 +00001632void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001633{
1634 CHECK_LAYERS(graph, 0, layerIndex);
1635
1636 auto inputs = GetInputs(graph, layerIndex);
1637 CHECK_VALID_SIZE(inputs.size(), 1);
1638
1639 auto outputs = GetOutputs(graph, layerIndex);
1640 CHECK_VALID_SIZE(outputs.size(), 1);
1641
1642 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1643
1644 armnn::DepthToSpaceDescriptor descriptor;
1645 descriptor.m_BlockSize = fbDescriptor->blockSize();
1646 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1647
1648 auto layerName = GetLayerName(graph, layerIndex);
1649 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1650
1651 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1652 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1653
1654 RegisterInputSlots(graph, layerIndex, layer);
1655 RegisterOutputSlots(graph, layerIndex, layer);
1656}
1657
Finn Williams85d36712021-01-26 22:30:06 +00001658void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001659{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001660 CHECK_LAYERS(graph, 0, layerIndex);
1661 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001662 CHECK_LOCATION();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001663
Derek Lamberti8ddae332019-02-21 16:29:43 +00001664 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001665 CHECK_VALID_SIZE(outputs.size(), 1);
1666
Derek Lamberti8ddae332019-02-21 16:29:43 +00001667 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001668 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001669 auto serializerDescriptor = serializerLayer->descriptor();
1670
1671 armnn::DepthwiseConvolution2dDescriptor descriptor;
Cathal Corbett06902652022-04-14 17:55:11 +01001672 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1673 descriptor.m_PadRight = serializerDescriptor->padRight();
1674 descriptor.m_PadTop = serializerDescriptor->padTop();
1675 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1676 descriptor.m_StrideX = serializerDescriptor->strideX();
1677 descriptor.m_StrideY = serializerDescriptor->strideY();
1678 descriptor.m_DilationX = serializerDescriptor->dilationX();
1679 descriptor.m_DilationY = serializerDescriptor->dilationY();
1680 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
1681 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001682
Jan Eilers53ef7952021-06-02 12:01:25 +01001683 IConnectableLayer* layer;
Cathal Corbett06902652022-04-14 17:55:11 +01001684 std::vector<unsigned int> ignoreSlots {};
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001685
Cathal Corbett06902652022-04-14 17:55:11 +01001686 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1687 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1688 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001689 {
Cathal Corbett06902652022-04-14 17:55:11 +01001690 CHECK_VALID_SIZE(inputs.size(), 1);
Jan Eilers53ef7952021-06-02 12:01:25 +01001691
Cathal Corbett06902652022-04-14 17:55:11 +01001692 // If the model stores weights and biases as members of the layer we have to read them from there
1693 // but add them to their own ConstantLayer for compatibility
1694 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1695 ignoreSlots.emplace_back(1u);
Jan Eilers53ef7952021-06-02 12:01:25 +01001696
1697 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001698 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001699
1700 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1701 if (descriptor.m_BiasEnabled)
1702 {
1703 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
1704 ignoreSlots.emplace_back(2u);
1705
1706 auto biasLayer = m_Network->AddConstantLayer(biases);
1707 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1708 biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
1709 }
1710
1711 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1712 {
1713 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1714 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1715 PermutationVector permutationVector = { 3, 2, 0, 1 };
1716 armnn::TensorInfo weightsInfo = weights.GetInfo();
1717 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1718 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1719 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1720 weights.GetMemoryArea(), permuteBuffer.get(),
1721 GetDataTypeSize(weightsInfo.GetDataType()));
1722
1723 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1724 auto weightsShape = weightsInfo.GetShape();
1725 weightsInfo.SetShape({1,
1726 weightsShape[0],
1727 weightsShape[1],
1728 weightsShape[2]*weightsShape[3]});
1729
1730 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1731
1732 auto weightsLayer = m_Network->AddConstantLayer(weightsPermuted);
1733 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1734 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1735 }
1736 else
1737 {
1738 auto weightsLayer = m_Network->AddConstantLayer(weights);
1739 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1740 weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
1741 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001742 }
1743 else
1744 {
1745 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001746 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001747 uint32_t numInputs = descriptor.GetNumInputs();
1748 CHECK_VALID_SIZE(inputs.size(), numInputs);
Jan Eilers53ef7952021-06-02 12:01:25 +01001749 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001750
1751 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1752 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1753
Cathal Corbett06902652022-04-14 17:55:11 +01001754 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001755 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001756}
1757
Finn Williams85d36712021-01-26 22:30:06 +00001758void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001759{
1760 CHECK_LAYERS(graph, 0, layerIndex);
1761 auto inputs = GetInputs(graph, layerIndex);
1762 CHECK_LOCATION();
1763 CHECK_VALID_SIZE(inputs.size(), 2);
1764
1765 auto outputs = GetOutputs(graph, layerIndex);
1766 CHECK_VALID_SIZE(outputs.size(), 4);
1767
1768 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1769 auto layerName = GetLayerName(graph, layerIndex);
1770 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1771
1772 armnn::DetectionPostProcessDescriptor descriptor;
1773 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1774 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1775 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1776 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1777 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1778 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1779 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1780 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1781 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1782 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1783 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1784
1785 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1786
1787 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1788 anchors,
1789 layerName.c_str());
1790
1791 for (unsigned int i = 0; i < 4; i++)
1792 {
1793 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1794 }
1795
1796 RegisterInputSlots(graph, layerIndex, layer);
1797 RegisterOutputSlots(graph, layerIndex, layer);
1798}
1799
Finn Williams85d36712021-01-26 22:30:06 +00001800void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001801{
1802 CHECK_LAYERS(graph, 0, layerIndex);
1803 auto inputs = GetInputs(graph, layerIndex);
1804 CHECK_LOCATION();
1805 CHECK_VALID_SIZE(inputs.size(), 2);
1806
1807 auto outputs = GetOutputs(graph, layerIndex);
1808 CHECK_VALID_SIZE(outputs.size(), 1);
1809
1810 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00001811 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Div);
1812 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001813
1814 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1815 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1816
1817 RegisterInputSlots(graph, layerIndex, layer);
1818 RegisterOutputSlots(graph, layerIndex, layer);
1819}
1820
Finn Williams85d36712021-01-26 22:30:06 +00001821void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001822{
1823 CHECK_LAYERS(graph, 0, layerIndex);
1824 auto inputs = GetInputs(graph, layerIndex);
1825 CHECK_LOCATION();
1826 CHECK_VALID_SIZE(inputs.size(), 2);
1827
1828 auto outputs = GetOutputs(graph, layerIndex);
1829 CHECK_VALID_SIZE(outputs.size(), 1);
1830
1831 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001832 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1833 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001834
1835 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1836 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1837
1838 RegisterInputSlots(graph, layerIndex, layer);
1839 RegisterOutputSlots(graph, layerIndex, layer);
1840}
1841
Finn Williams85d36712021-01-26 22:30:06 +00001842void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001843{
1844 CHECK_LAYERS(graph, 0, layerIndex);
1845 auto inputs = GetInputs(graph, layerIndex);
1846 CHECK_LOCATION();
1847 CHECK_VALID_SIZE(inputs.size(), 1);
1848
1849 auto outputs = GetOutputs(graph, layerIndex);
1850 CHECK_VALID_SIZE(outputs.size(), 1);
1851
1852 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armaganfd0cae32021-11-08 17:18:31 +00001853 armnn::FillDescriptor descriptor;
1854 descriptor.m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
Keith Davis300ad562020-06-04 16:34:23 +01001855 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1856
1857 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1858 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1859
1860 RegisterInputSlots(graph, layerIndex, layer);
1861 RegisterOutputSlots(graph, layerIndex, layer);
1862}
1863
Finn Williams85d36712021-01-26 22:30:06 +00001864void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001865{
1866 CHECK_LAYERS(graph, 0, layerIndex);
1867 auto inputs = GetInputs(graph, layerIndex);
1868 CHECK_LOCATION();
1869 CHECK_VALID_SIZE(inputs.size(), 2);
1870
1871 auto outputs = GetOutputs(graph, layerIndex);
1872 CHECK_VALID_SIZE(outputs.size(), 1);
1873
1874 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001875 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1876 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001877
1878 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1879 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1880
1881 RegisterInputSlots(graph, layerIndex, layer);
1882 RegisterOutputSlots(graph, layerIndex, layer);
1883}
1884
Finn Williams85d36712021-01-26 22:30:06 +00001885void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001886{
1887 CHECK_LAYERS(graph, 0, layerIndex);
1888
1889 auto inputs = GetInputs(graph, layerIndex);
1890 CHECK_VALID_SIZE(inputs.size(), 1);
1891
1892 auto outputs = GetOutputs(graph, layerIndex);
1893 CHECK_VALID_SIZE(outputs.size(), 1);
1894
1895 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1896 auto fbDescriptor = fbLayer->descriptor();
1897
1898 armnn::InstanceNormalizationDescriptor descriptor;
1899 descriptor.m_Gamma = fbDescriptor->gamma();
1900 descriptor.m_Beta = fbDescriptor->beta();
1901 descriptor.m_Eps = fbDescriptor->eps();
1902 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1903
1904 const std::string layerName = GetLayerName(graph, layerIndex);
1905 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1906
1907 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1908 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1909
1910 RegisterInputSlots(graph, layerIndex, layer);
1911 RegisterOutputSlots(graph, layerIndex, layer);
1912}
1913
Finn Williams85d36712021-01-26 22:30:06 +00001914void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001915{
1916 CHECK_LAYERS(graph, 0, layerIndex);
1917
1918 auto inputs = GetInputs(graph, layerIndex);
1919 CHECK_VALID_SIZE(inputs.size(), 1);
1920
1921 auto outputs = GetOutputs(graph, layerIndex);
1922 CHECK_VALID_SIZE(outputs.size(), 1);
1923 auto outputInfo = ToTensorInfo(outputs[0]);
1924
1925 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1926 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1927
1928 auto layerName = GetLayerName(graph, layerIndex);
1929 armnn::L2NormalizationDescriptor descriptor;
1930 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001931 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001932
1933 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1934 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1935
1936 RegisterInputSlots(graph, layerIndex, layer);
1937 RegisterOutputSlots(graph, layerIndex, layer);
1938}
1939
Finn Williams85d36712021-01-26 22:30:06 +00001940void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001941{
1942 CHECK_LAYERS(graph, 0, layerIndex);
1943 CHECK_LOCATION();
1944
1945 auto inputs = GetInputs(graph, layerIndex);
1946 CHECK_VALID_SIZE(inputs.size(), 2);
1947
1948 auto outputs = GetOutputs(graph, layerIndex);
1949 CHECK_VALID_SIZE(outputs.size(), 1);
1950
1951 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1952 auto fbDescriptor = fbLayer->descriptor();
1953
1954 armnn::LogicalBinaryDescriptor descriptor;
1955 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1956
1957 const std::string& layerName = GetLayerName(graph, layerIndex);
1958 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1959
1960 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1961 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1962
1963 RegisterInputSlots(graph, layerIndex, layer);
1964 RegisterOutputSlots(graph, layerIndex, layer);
1965}
1966
Finn Williams85d36712021-01-26 22:30:06 +00001967void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001968{
1969 CHECK_LAYERS(graph, 0, layerIndex);
1970
Finn Williams85d36712021-01-26 22:30:06 +00001971 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001972 CHECK_VALID_SIZE(inputs.size(), 1);
1973
Finn Williams85d36712021-01-26 22:30:06 +00001974 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001975 CHECK_VALID_SIZE(outputs.size(), 1);
1976
1977 armnn::LogSoftmaxDescriptor descriptor;
1978 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1979 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1980 auto layerName = GetLayerName(graph, layerIndex);
1981
1982 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1983
1984 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1985 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1986
1987 RegisterInputSlots(graph, layerIndex, layer);
1988 RegisterOutputSlots(graph, layerIndex, layer);
1989}
1990
Finn Williams85d36712021-01-26 22:30:06 +00001991void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001992{
1993 CHECK_LAYERS(graph, 0, layerIndex);
1994 auto inputs = GetInputs(graph, layerIndex);
1995 CHECK_LOCATION();
1996 CHECK_VALID_SIZE(inputs.size(), 2);
1997
1998 auto outputs = GetOutputs(graph, layerIndex);
1999 CHECK_VALID_SIZE(outputs.size(), 1);
2000
2001 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00002002 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Minimum);
2003 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00002004
2005 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2006 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2007
2008 RegisterInputSlots(graph, layerIndex, layer);
2009 RegisterOutputSlots(graph, layerIndex, layer);
2010}
2011
Finn Williams85d36712021-01-26 22:30:06 +00002012void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00002013{
2014 CHECK_LAYERS(graph, 0, layerIndex);
2015 auto inputs = GetInputs(graph, layerIndex);
2016 CHECK_LOCATION();
2017 CHECK_VALID_SIZE(inputs.size(), 2);
2018
2019 auto outputs = GetOutputs(graph, layerIndex);
2020 CHECK_VALID_SIZE(outputs.size(), 1);
2021
2022 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00002023 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Maximum);
2024 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00002025
2026 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2027 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2028
2029 RegisterInputSlots(graph, layerIndex, layer);
2030 RegisterOutputSlots(graph, layerIndex, layer);
2031}
2032
Jim Flynne242f2d2019-05-22 14:24:13 +01002033const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
2034 unsigned int layerIndex)
2035{
2036 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
2037
2038 switch (layerType)
2039 {
2040 case Layer::Layer_ConcatLayer:
2041 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
2042 case Layer::Layer_MergerLayer:
2043 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
2044 default:
2045 throw armnn::Exception("unknown layer type, should be concat or merger");
2046 }
2047}
Simon Obute51f67772021-09-03 15:50:13 +01002048void IDeserializer::DeserializerImpl::ParseChannelShuffle(GraphPtr graph, unsigned int layerIndex)
2049{
2050 CHECK_LAYERS(graph, 0, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002051
Simon Obute51f67772021-09-03 15:50:13 +01002052 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2053 CHECK_VALID_SIZE(inputs.size(), 1);
2054
2055 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2056 CHECK_VALID_SIZE(outputs.size(), 1);
2057
2058 armnn::ChannelShuffleDescriptor descriptor;
2059 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
2060 descriptor.m_NumGroups =
2061 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
2062
2063 auto layerName = GetLayerName(graph, layerIndex);
2064 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
2065
2066 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2067 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2068
2069 RegisterInputSlots(graph, layerIndex, layer);
2070 RegisterOutputSlots(graph, layerIndex, layer);
2071}
Finn Williams85d36712021-01-26 22:30:06 +00002072void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01002073{
2074 CHECK_LAYERS(graph, 0, layerIndex);
2075 CHECK_LOCATION();
2076
2077 auto inputs = GetInputs(graph, layerIndex);
2078 CHECK_VALID_SIZE(inputs.size(), 2);
2079
2080 auto outputs = GetOutputs(graph, layerIndex);
2081 CHECK_VALID_SIZE(outputs.size(), 1);
2082
2083 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
2084 auto fbDescriptor = fbLayer->descriptor();
2085
2086 armnn::ComparisonDescriptor descriptor;
2087 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
2088
2089 const std::string& layerName = GetLayerName(graph, layerIndex);
2090 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
2091
2092 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2093 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2094
2095 RegisterInputSlots(graph, layerIndex, layer);
2096 RegisterOutputSlots(graph, layerIndex, layer);
2097}
2098
Mike Kelly3ec30772023-03-08 13:47:17 +00002099void IDeserializer::DeserializerImpl::ParseElementwiseBinary(GraphPtr graph, unsigned int layerIndex)
2100{
2101 CHECK_LAYERS(graph, 0, layerIndex);
2102 CHECK_LOCATION();
2103
2104 auto inputs = GetInputs(graph, layerIndex);
2105 CHECK_VALID_SIZE(inputs.size(), 2);
2106
2107 auto outputs = GetOutputs(graph, layerIndex);
2108 CHECK_VALID_SIZE(outputs.size(), 1);
2109
2110 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseBinaryLayer();
2111 auto fbDescriptor = fbLayer->descriptor();
2112
2113 armnn::ElementwiseBinaryDescriptor descriptor;
2114 descriptor.m_Operation = ToElementwiseBinaryOperation(fbDescriptor->operation());
2115
2116 const std::string& layerName = GetLayerName(graph, layerIndex);
2117 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
2118
2119 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2120 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2121
2122 RegisterInputSlots(graph, layerIndex, layer);
2123 RegisterOutputSlots(graph, layerIndex, layer);
2124}
2125
Finn Williams85d36712021-01-26 22:30:06 +00002126void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06002127{
2128 CHECK_LAYERS(graph, 0, layerIndex);
2129 CHECK_LOCATION();
2130
2131 auto inputs = GetInputs(graph, layerIndex);
2132 CHECK_VALID_SIZE(inputs.size(), 1);
2133
2134 auto outputs = GetOutputs(graph, layerIndex);
2135 CHECK_VALID_SIZE(outputs.size(), 1);
2136
2137 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
2138 auto fbDescriptor = fbLayer->descriptor();
2139
2140 armnn::ElementwiseUnaryDescriptor descriptor;
Mike Kelly3ec30772023-03-08 13:47:17 +00002141 descriptor.m_Operation = ToElementwiseUnaryOperation(fbDescriptor->operation());
josh minor4a3c6102020-01-06 16:40:46 -06002142
2143 const std::string& layerName = GetLayerName(graph, layerIndex);
2144 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2145
2146 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2147 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2148
2149 RegisterInputSlots(graph, layerIndex, layer);
2150 RegisterOutputSlots(graph, layerIndex, layer);
2151}
2152
Finn Williams85d36712021-01-26 22:30:06 +00002153void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00002154{
2155 CHECK_LAYERS(graph, 0, layerIndex);
2156 CHECK_LOCATION();
2157
2158 auto outputs = GetOutputs(graph, layerIndex);
2159 CHECK_VALID_SIZE(outputs.size(), 1);
2160
Jim Flynnac25a1b2019-02-28 10:40:49 +00002161 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002162 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
2163 unsigned int numViews = originsDescriptor->numViews();
2164 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002165
2166 // can now check the number of inputs == number of views
2167 auto inputs = GetInputs(graph, layerIndex);
2168 CHECK_VALID_SIZE(inputs.size(), numViews);
2169
2170 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01002171 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002172 for (unsigned int v = 0; v < numViews; ++v)
2173 {
2174 auto originPtr = originsPtr->Get(v);
2175 for (unsigned int d = 0; d < numDimensions; ++d)
2176 {
2177 uint32_t value = originPtr->data()->Get(d);
2178 descriptor.SetViewOriginCoord(v, d, value);
2179 }
2180 }
Jim Flynne242f2d2019-05-22 14:24:13 +01002181 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002182
Jim Flynn906f9462019-05-10 13:55:21 +01002183 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002184 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2185 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2186
2187 RegisterInputSlots(graph, layerIndex, layer);
2188 RegisterOutputSlots(graph, layerIndex, layer);
2189}
2190
Finn Williams85d36712021-01-26 22:30:06 +00002191void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00002192{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002193 CHECK_LAYERS(graph, 0, layerIndex);
2194 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002195 CHECK_LOCATION();
2196 CHECK_VALID_SIZE(inputs.size(), 2);
2197
Derek Lamberti8ddae332019-02-21 16:29:43 +00002198 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002199 CHECK_VALID_SIZE(outputs.size(), 1);
2200
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002201 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00002202 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Mul);
2203 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00002204
2205 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2206 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2207
Derek Lamberti8ddae332019-02-21 16:29:43 +00002208 RegisterInputSlots(graph, layerIndex, layer);
2209 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00002210}
2211
Finn Williams85d36712021-01-26 22:30:06 +00002212void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002213{
2214 CHECK_LAYERS(graph, 0, layerIndex);
2215 CHECK_LOCATION();
2216
2217 auto inputs = GetInputs(graph, layerIndex);
2218 CHECK_VALID_SIZE(inputs.size(), 1);
2219
2220 auto outputs = GetOutputs(graph, layerIndex);
2221 CHECK_VALID_SIZE(outputs.size(), 1);
2222
2223 auto layerName = GetLayerName(graph, layerIndex);
2224
2225 armnn::IConnectableLayer* layer;
2226
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00002227 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002228
2229 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2230 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2231
2232 RegisterInputSlots(graph, layerIndex, layer);
2233 RegisterOutputSlots(graph, layerIndex, layer);
2234}
2235
Finn Williams85d36712021-01-26 22:30:06 +00002236void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002237{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002238 CHECK_LAYERS(graph, 0, layerIndex);
2239 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002240 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002241
Derek Lamberti8ddae332019-02-21 16:29:43 +00002242 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002243 CHECK_VALID_SIZE(outputs.size(), 1);
2244
Derek Lamberti8ddae332019-02-21 16:29:43 +00002245 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002246 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002247 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2248
2249 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
2250 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2251 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002252 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01002253
2254 armnn::IConnectableLayer* layer;
2255 std::vector<unsigned int> ignoreSlots {};
2256
2257 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
2258 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
2259 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002260 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002261 // If the model stores weights and biases as members of the layer we have to read them from there
2262 // but add them to their own ConstantLayer for compatibility
2263 CHECK_VALID_SIZE(inputs.size(), 1);
2264 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2265 layerName.c_str());
2266
2267 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
2268 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2269 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2270 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
2271 ignoreSlots.emplace_back(1u);
2272
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002273 if (fullyConnectedDescriptor.m_BiasEnabled)
2274 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002275 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
2276 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2277 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2278 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
2279 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002280 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002281 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01002282 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002283 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002284 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2285 layerName.c_str());
2286 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
2287 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002288 }
2289
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002290 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2291 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2292
Matthew Sloyan81beae32021-07-13 19:46:11 +01002293 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00002294 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002295}
2296
Finn Williams85d36712021-01-26 22:30:06 +00002297void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002298{
2299 CHECK_LAYERS(graph, 0, layerIndex);
2300
Finn Williams85d36712021-01-26 22:30:06 +00002301 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002302 CHECK_VALID_SIZE(inputs.size(), 1);
2303
Finn Williams85d36712021-01-26 22:30:06 +00002304 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002305 CHECK_VALID_SIZE(outputs.size(), 1);
2306
2307 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2308 auto flatBufferPadList = flatBufferDescriptor->padList();
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002309 auto paddingMode = flatBufferDescriptor->paddingMode();
David Monahan34757812019-06-19 11:47:21 +01002310 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002311
Mike Kelly51b8c312022-05-24 11:34:02 +01002312 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002313 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002314 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2315 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002316 }
2317
2318 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002319 padList.reserve(flatBufferPadList->size() / 2);
2320 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002321 {
2322 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2323 }
2324
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002325 armnn::PadDescriptor descriptor(padList, padValue, ToPaddingMode(paddingMode));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002326
2327 auto layerName = GetLayerName(graph, layerIndex);
2328 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2329
2330 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2331 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2332
2333 RegisterInputSlots(graph, layerIndex, layer);
2334 RegisterOutputSlots(graph, layerIndex, layer);
2335}
2336
Finn Williams85d36712021-01-26 22:30:06 +00002337void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002338{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002339 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002340
2341 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002342 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002343
Derek Lamberti8ddae332019-02-21 16:29:43 +00002344 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002345 CHECK_VALID_SIZE(inputs.size(), 1);
2346
Derek Lamberti8ddae332019-02-21 16:29:43 +00002347 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002348 CHECK_VALID_SIZE(outputs.size(), 1);
2349 auto outputInfo = ToTensorInfo(outputs[0]);
2350
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002351 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01002352 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002353
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002354 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002355 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2356
Derek Lamberti8ddae332019-02-21 16:29:43 +00002357 RegisterInputSlots(graph, layerIndex, layer);
2358 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002359}
2360
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002361armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002362 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002363{
Jan Eilers8eb25602020-03-09 12:13:48 +00002364 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002365 armnn::Pooling2dDescriptor desc;
2366
2367 switch (pooling2dDesc->poolType())
2368 {
2369 case PoolingAlgorithm_Average:
2370 {
2371 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002372 break;
2373 }
2374 case PoolingAlgorithm_Max:
2375 {
2376 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002377 break;
2378 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002379 case PoolingAlgorithm_L2:
2380 {
2381 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2382 break;
2383 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002384 default:
2385 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002386 throw ParseException("Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002387 }
2388 }
2389
2390 switch (pooling2dDesc->outputShapeRounding())
2391 {
2392 case OutputShapeRounding_Floor:
2393 {
2394 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2395 break;
2396 }
2397 case OutputShapeRounding_Ceiling:
2398 {
2399 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2400 break;
2401 }
2402 default:
2403 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002404 throw ParseException("Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002405 }
2406 }
2407
2408 switch (pooling2dDesc->paddingMethod())
2409 {
2410 case PaddingMethod_Exclude:
2411 {
2412 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2413 break;
2414 }
2415 case PaddingMethod_IgnoreValue:
2416 {
2417 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2418 break;
2419 }
2420 default:
2421 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002422 throw ParseException("Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002423 }
2424 }
2425
2426 switch (pooling2dDesc->dataLayout())
2427 {
2428 case DataLayout_NCHW:
2429 {
2430 desc.m_DataLayout = armnn::DataLayout::NCHW;
2431 break;
2432 }
2433 case DataLayout_NHWC:
2434 {
2435 desc.m_DataLayout = armnn::DataLayout::NHWC;
2436 break;
2437 }
2438 default:
2439 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002440 throw ParseException("Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002441 }
2442 }
2443
2444 desc.m_PadRight = pooling2dDesc->padRight();
2445 desc.m_PadLeft = pooling2dDesc->padLeft();
2446 desc.m_PadBottom = pooling2dDesc->padBottom();
2447 desc.m_PadTop = pooling2dDesc->padTop();
2448 desc.m_StrideX = pooling2dDesc->strideX();
2449 desc.m_StrideY = pooling2dDesc->strideY();
2450 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2451 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2452
2453 return desc;
2454}
2455
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002456armnn::Pooling3dDescriptor IDeserializer::DeserializerImpl::GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDesc,
2457 unsigned int layerIndex)
2458{
2459 IgnoreUnused(layerIndex);
2460 armnn::Pooling3dDescriptor desc;
Finn Williams85d36712021-01-26 22:30:06 +00002461
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002462 switch (pooling3dDesc->poolType())
2463 {
2464 case PoolingAlgorithm_Average:
2465 {
2466 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2467 break;
2468 }
2469 case PoolingAlgorithm_Max:
2470 {
2471 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
2472 break;
2473 }
2474 case PoolingAlgorithm_L2:
2475 {
2476 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2477 break;
2478 }
2479 default:
2480 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002481 throw ParseException("Unsupported pooling algorithm");
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002482 }
2483 }
2484
2485 switch (pooling3dDesc->outputShapeRounding())
2486 {
2487 case OutputShapeRounding_Floor:
2488 {
2489 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2490 break;
2491 }
2492 case OutputShapeRounding_Ceiling:
2493 {
2494 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2495 break;
2496 }
2497 default:
2498 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002499 throw ParseException("Unsupported output shape rounding");
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002500 }
2501 }
2502
2503 switch (pooling3dDesc->paddingMethod())
2504 {
2505 case PaddingMethod_Exclude:
2506 {
2507 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2508 break;
2509 }
2510 case PaddingMethod_IgnoreValue:
2511 {
2512 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2513 break;
2514 }
2515 default:
2516 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002517 throw ParseException("Unsupported padding method");
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002518 }
2519 }
2520
2521 switch (pooling3dDesc->dataLayout())
2522 {
2523 case DataLayout_NCDHW:
2524 {
2525 desc.m_DataLayout = armnn::DataLayout::NCDHW;
2526 break;
2527 }
2528 case DataLayout_NDHWC:
2529 {
2530 desc.m_DataLayout = armnn::DataLayout::NDHWC;
2531 break;
2532 }
2533 default:
2534 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002535 throw ParseException("Unsupported data layout");
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002536 }
2537 }
2538
2539 desc.m_PadRight = pooling3dDesc->padRight();
2540 desc.m_PadLeft = pooling3dDesc->padLeft();
2541 desc.m_PadBottom = pooling3dDesc->padBottom();
2542 desc.m_PadTop = pooling3dDesc->padTop();
2543 desc.m_PadFront = pooling3dDesc->padFront();
2544 desc.m_PadBack = pooling3dDesc->padBack();
2545 desc.m_StrideX = pooling3dDesc->strideX();
2546 desc.m_StrideY = pooling3dDesc->strideY();
2547 desc.m_StrideZ = pooling3dDesc->strideZ();
2548 desc.m_PoolWidth = pooling3dDesc->poolWidth();
2549 desc.m_PoolHeight = pooling3dDesc->poolHeight();
2550 desc.m_PoolDepth = pooling3dDesc->poolDepth();
2551
2552 return desc;
2553}
Finn Williams85d36712021-01-26 22:30:06 +00002554
2555void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002556{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002557 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002558
Derek Lamberti8ddae332019-02-21 16:29:43 +00002559 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002560 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002561 CHECK_VALID_SIZE(inputs.size(), 1);
2562
Derek Lamberti8ddae332019-02-21 16:29:43 +00002563 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002564 CHECK_VALID_SIZE(outputs.size(), 1);
2565 auto outputInfo = ToTensorInfo(outputs[0]);
2566
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002567 auto pooling2dDescriptor = GetPooling2dDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002568 auto layerName = GetLayerName(graph, layerIndex);
2569 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002570 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2571
Derek Lamberti8ddae332019-02-21 16:29:43 +00002572 RegisterInputSlots(graph, layerIndex, layer);
2573 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002574}
2575
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002576void IDeserializer::DeserializerImpl::ParsePooling3d(GraphPtr graph, unsigned int layerIndex)
2577{
2578 CHECK_LAYERS(graph, 0, layerIndex);
2579
2580 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2581 auto inputs = GetInputs(graph, layerIndex);
2582 CHECK_VALID_SIZE(inputs.size(), 1);
2583
2584 auto outputs = GetOutputs(graph, layerIndex);
2585 CHECK_VALID_SIZE(outputs.size(), 1);
2586 auto outputInfo = ToTensorInfo(outputs[0]);
2587
2588 auto pooling3dDescriptor = GetPooling3dDescriptor(pooling3dDes, layerIndex);
2589 auto layerName = GetLayerName(graph, layerIndex);
2590 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2591 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2592
2593 RegisterInputSlots(graph, layerIndex, layer);
2594 RegisterOutputSlots(graph, layerIndex, layer);
2595}
2596
Finn Williams85d36712021-01-26 22:30:06 +00002597void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002598{
2599 CHECK_LAYERS(graph, 0, layerIndex);
2600
2601 auto inputs = GetInputs(graph, layerIndex);
2602 CHECK_VALID_SIZE(inputs.size(), 1);
2603
2604 auto outputs = GetOutputs(graph, layerIndex);
2605 CHECK_VALID_SIZE(outputs.size(), 1);
2606 auto outputInfo = ToTensorInfo(outputs[0]);
2607
2608 auto layerName = GetLayerName(graph, layerIndex);
2609 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2610 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2611
2612 RegisterInputSlots(graph, layerIndex, layer);
2613 RegisterOutputSlots(graph, layerIndex, layer);
2614}
2615
Finn Williams85d36712021-01-26 22:30:06 +00002616armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002617 const std::vector<uint32_t>& targetDimsIn)
2618{
2619 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2620 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2621
2622 if (stretchDim != targetDimsIn.end())
2623 {
2624 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2625 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002626 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2627 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002628 }
2629
2630 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002631 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002632 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2633
2634 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
Tianle Cheng20773482023-10-03 12:01:11 +01002635 if (targetNumElements == 0)
2636 {
2637 if (inputTensorInfo.GetNumElements() == 0)
2638 {
2639 outputDims[stretchIndex] = 0;
2640 }
2641 else
2642 {
2643 throw ParseException(
2644 fmt::format("Input to reshape is a tensor with elements, but the requested shape has 0. {}",
2645 CHECK_LOCATION().AsString()));
2646 }
2647 }
2648 else
2649 {
2650 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2651 }
Saoirse Stewart263829c2019-02-19 15:54:14 +00002652 }
2653
2654 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2655
2656 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2657 reshapeInfo.SetShape(outputShape);
2658
2659 return reshapeInfo;
2660}
2661
Finn Williams85d36712021-01-26 22:30:06 +00002662void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002663{
2664 CHECK_LAYERS(graph, 0, layerIndex);
2665
Finn Williams85d36712021-01-26 22:30:06 +00002666 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002667 CHECK_VALID_SIZE(inputs.size(), 1);
2668
Finn Williams85d36712021-01-26 22:30:06 +00002669 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002670 CHECK_VALID_SIZE(outputs.size(), 1);
2671
2672 auto layerName = GetLayerName(graph, layerIndex);
2673 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2674
2675 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2676 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2677
2678 RegisterInputSlots(graph, layerIndex, layer);
2679 RegisterOutputSlots(graph, layerIndex, layer);
2680}
2681
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002682void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2683{
2684 CHECK_LAYERS(graph, 0, layerIndex);
2685 CHECK_LOCATION();
2686
2687 auto inputs = GetInputs(graph, layerIndex);
2688 CHECK_VALID_SIZE(inputs.size(), 1);
2689
2690 auto outputs = GetOutputs(graph, layerIndex);
2691 CHECK_VALID_SIZE(outputs.size(), 1);
2692
2693 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2694 auto fbDescriptor = fbLayer->descriptor();
2695 auto flatBufferAxis = fbDescriptor->axis();
2696
2697 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002698 descriptor.m_KeepDims = fbDescriptor->keepDims();
2699 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2700 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2701
2702 const std::string& layerName = GetLayerName(graph, layerIndex);
2703 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2704
2705 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2706 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2707
2708 RegisterInputSlots(graph, layerIndex, layer);
2709 RegisterOutputSlots(graph, layerIndex, layer);
2710}
2711
Finn Williams85d36712021-01-26 22:30:06 +00002712void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002713{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002714 CHECK_LAYERS(graph, 0, layerIndex);
2715 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002716
Derek Lamberti8ddae332019-02-21 16:29:43 +00002717 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002718 CHECK_VALID_SIZE(outputs.size(), 1);
2719
2720 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2721 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2722
Derek Lamberti8ddae332019-02-21 16:29:43 +00002723 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002724 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2725
Finn Williams85d36712021-01-26 22:30:06 +00002726 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002727 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2728
2729 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2730 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2731
2732 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2733 {
2734 std::stringstream ss;
2735 ss << "New shape defined in reshape parameters "
2736 << reshapeOutputTensorShape
2737 << " does not equal output shape "
2738 << actualOutputTensorInfo.GetShape()
2739 << ": "
2740 << CHECK_LOCATION().AsString();
2741 throw ParseException(ss.str());
2742 }
2743
2744 armnn::ReshapeDescriptor reshapeDesc;
2745 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2746
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002747 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002748 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2749 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2750
Derek Lamberti8ddae332019-02-21 16:29:43 +00002751 RegisterInputSlots(graph, layerIndex, layer);
2752 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002753}
2754
Finn Williams85d36712021-01-26 22:30:06 +00002755void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002756{
2757 CHECK_LAYERS(graph, 0, layerIndex);
2758
Finn Williams85d36712021-01-26 22:30:06 +00002759 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002760 CHECK_VALID_SIZE(inputs.size(), 1);
2761
Finn Williams85d36712021-01-26 22:30:06 +00002762 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002763 CHECK_VALID_SIZE(outputs.size(), 1);
2764
2765 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2766
2767 armnn::ResizeDescriptor descriptor;
2768 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2769 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2770 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2771 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002772 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2773 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002774
2775 auto layerName = GetLayerName(graph, layerIndex);
2776 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2777
2778 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2779 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2780
2781 RegisterInputSlots(graph, layerIndex, layer);
2782 RegisterOutputSlots(graph, layerIndex, layer);
2783}
2784
Tracy Narine944fb502023-07-04 15:08:57 +01002785void IDeserializer::DeserializerImpl::ParseReverseV2(GraphPtr graph, unsigned int layerIndex)
2786{
2787 CHECK_LAYERS(graph, 0, layerIndex);
2788
2789 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Tracy Narinebb8d7592023-07-13 16:50:54 +01002790 CHECK_VALID_SIZE(inputs.size(), 2);
Tracy Narine944fb502023-07-04 15:08:57 +01002791
2792 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2793 CHECK_VALID_SIZE(outputs.size(), 1);
2794
Tracy Narine944fb502023-07-04 15:08:57 +01002795 auto layerName = GetLayerName(graph, layerIndex);
Tracy Narinebb8d7592023-07-13 16:50:54 +01002796 IConnectableLayer* layer = m_Network->AddReverseV2Layer(layerName.c_str());
Tracy Narine944fb502023-07-04 15:08:57 +01002797
2798 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2799 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2800
2801 RegisterInputSlots(graph, layerIndex, layer);
2802 RegisterOutputSlots(graph, layerIndex, layer);
2803}
Jan Eilers1b2654f2021-09-24 15:45:46 +01002804
2805/// @Note The ResizeBiliniar operation was deprecated and removed in favor of the Resize operation.
2806/// This function is kept for backwards compatibility.
Finn Williams85d36712021-01-26 22:30:06 +00002807void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002808{
2809 CHECK_LAYERS(graph, 0, layerIndex);
2810
Finn Williams85d36712021-01-26 22:30:06 +00002811 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002812 CHECK_VALID_SIZE(inputs.size(), 1);
2813
Finn Williams85d36712021-01-26 22:30:06 +00002814 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002815 CHECK_VALID_SIZE(outputs.size(), 1);
2816
2817 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2818
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002819 armnn::ResizeDescriptor descriptor;
2820 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002821 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002822 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2823 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002824 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2825 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002826
2827 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002828 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002829
2830 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2831 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2832
2833 RegisterInputSlots(graph, layerIndex, layer);
2834 RegisterOutputSlots(graph, layerIndex, layer);
2835}
2836
Keith Davis3ae3f972021-05-21 16:33:48 +01002837void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2838{
2839 CHECK_LAYERS(graph, 0, layerIndex);
2840
2841 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2842 CHECK_VALID_SIZE(inputs.size(), 1);
2843
2844 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2845 CHECK_VALID_SIZE(outputs.size(), 1);
2846
2847 auto layerName = GetLayerName(graph, layerIndex);
2848 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2849
2850 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2851 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2852
2853 RegisterInputSlots(graph, layerIndex, layer);
2854 RegisterOutputSlots(graph, layerIndex, layer);
2855}
2856
Finn Williams85d36712021-01-26 22:30:06 +00002857void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002858{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002859 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002860
Finn Williams85d36712021-01-26 22:30:06 +00002861 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002862 CHECK_VALID_SIZE(inputs.size(), 1);
2863
Finn Williams85d36712021-01-26 22:30:06 +00002864 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002865 CHECK_VALID_SIZE(outputs.size(), 1);
2866
2867 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002868 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Sadik Armaganfd0cae32021-11-08 17:18:31 +00002869 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002870 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002871
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002872 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2873
2874 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2875 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2876
Derek Lamberti8ddae332019-02-21 16:29:43 +00002877 RegisterInputSlots(graph, layerIndex, layer);
2878 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002879}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002880
Finn Williams85d36712021-01-26 22:30:06 +00002881void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002882{
2883 CHECK_LAYERS(graph, 0, layerIndex);
2884
Finn Williams85d36712021-01-26 22:30:06 +00002885 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002886 CHECK_VALID_SIZE(inputs.size(), 1);
2887
Finn Williams85d36712021-01-26 22:30:06 +00002888 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002889 CHECK_VALID_SIZE(outputs.size(), 1);
2890
2891 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2892 auto flatBufferPadList = flatBufferDescriptor->padList();
2893 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2894
Mike Kelly51b8c312022-05-24 11:34:02 +01002895 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002896 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002897 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2898 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002899 }
2900
2901 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002902 padList.reserve(flatBufferPadList->size() / 2);
2903 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002904 {
2905 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2906 }
2907
2908 armnn::SpaceToBatchNdDescriptor descriptor;
2909 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2910 descriptor.m_BlockShape =
2911 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2912 descriptor.m_PadList = padList;
2913
2914 auto layerName = GetLayerName(graph, layerIndex);
2915 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2916
2917 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2918 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2919
2920 RegisterInputSlots(graph, layerIndex, layer);
2921 RegisterOutputSlots(graph, layerIndex, layer);
2922}
2923
Finn Williams85d36712021-01-26 22:30:06 +00002924void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002925{
2926 CHECK_LAYERS(graph, 0, layerIndex);
2927
Finn Williams85d36712021-01-26 22:30:06 +00002928 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002929 CHECK_VALID_SIZE(inputs.size(), 1);
2930
Finn Williams85d36712021-01-26 22:30:06 +00002931 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002932 CHECK_VALID_SIZE(outputs.size(), 1);
2933
2934 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2935
2936 armnn::SpaceToDepthDescriptor descriptor;
2937 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2938 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2939
2940 auto layerName = GetLayerName(graph, layerIndex);
2941 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2942
2943 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2944 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2945
2946 RegisterInputSlots(graph, layerIndex, layer);
2947 RegisterOutputSlots(graph, layerIndex, layer);
2948}
2949
Finn Williams85d36712021-01-26 22:30:06 +00002950armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2951 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002952 unsigned int layerIndex)
2953{
Jan Eilers8eb25602020-03-09 12:13:48 +00002954 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002955 armnn::NormalizationDescriptor desc;
2956
2957 switch (normalizationDescriptor->normChannelType())
2958 {
2959 case NormalizationAlgorithmChannel_Across:
2960 {
2961 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2962 break;
2963 }
2964 case NormalizationAlgorithmChannel_Within:
2965 {
2966 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2967 break;
2968 }
2969 default:
2970 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002971 throw ParseException("Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002972 }
2973 }
2974
2975 switch (normalizationDescriptor->normMethodType())
2976 {
2977 case NormalizationAlgorithmMethod_LocalBrightness:
2978 {
2979 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2980 break;
2981 }
2982 case NormalizationAlgorithmMethod_LocalContrast:
2983 {
2984 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2985 break;
2986 }
2987 default:
2988 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01002989 throw ParseException("Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002990 }
2991 }
2992
2993 switch (normalizationDescriptor->dataLayout())
2994 {
2995 case DataLayout_NCHW:
2996 {
2997 desc.m_DataLayout = armnn::DataLayout::NCHW;
2998 break;
2999 }
3000 case DataLayout_NHWC:
3001 {
3002 desc.m_DataLayout = armnn::DataLayout::NHWC;
3003 break;
3004 }
3005 default:
3006 {
Jim Flynnb53f52a2023-09-17 10:00:25 +01003007 throw ParseException("Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00003008 }
3009 }
3010
3011 desc.m_Alpha = normalizationDescriptor->alpha();
3012 desc.m_Beta = normalizationDescriptor->beta();
3013 desc.m_K = normalizationDescriptor->k();
3014 desc.m_NormSize = normalizationDescriptor->normSize();
3015
3016 return desc;
3017}
3018
Finn Williams85d36712021-01-26 22:30:06 +00003019void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00003020{
3021 CHECK_LAYERS(graph, 0, layerIndex);
3022
3023 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
3024
Finn Williams85d36712021-01-26 22:30:06 +00003025 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00003026 CHECK_VALID_SIZE(inputs.size(), 1);
3027
Finn Williams85d36712021-01-26 22:30:06 +00003028 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00003029 CHECK_VALID_SIZE(outputs.size(), 1);
3030
3031 auto outputInfo = ToTensorInfo(outputs[0]);
3032
3033 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
3034 auto layerName = GetLayerName(graph, layerIndex);
3035
3036 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
3037 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3038
3039 RegisterInputSlots(graph, layerIndex, layer);
3040 RegisterOutputSlots(graph, layerIndex, layer);
3041}
3042
Finn Williams85d36712021-01-26 22:30:06 +00003043void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00003044{
3045 CHECK_LAYERS(graph, 0, layerIndex);
3046 auto inputs = GetInputs(graph, layerIndex);
3047 CHECK_LOCATION();
3048 CHECK_VALID_SIZE(inputs.size(), 1);
3049
3050 auto outputs = GetOutputs(graph, layerIndex);
3051 CHECK_VALID_SIZE(outputs.size(), 1);
3052
3053 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00003054
josh minor4a3c6102020-01-06 16:40:46 -06003055 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
3056 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00003057 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3058 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3059
3060 RegisterInputSlots(graph, layerIndex, layer);
3061 RegisterOutputSlots(graph, layerIndex, layer);
3062}
3063
Finn Williams85d36712021-01-26 22:30:06 +00003064void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003065{
3066 CHECK_LAYERS(graph, 0, layerIndex);
3067
3068 auto inputs = GetInputs(graph, layerIndex);
3069 CHECK_VALID_SIZE(inputs.size(), 1);
3070
3071 auto outputs = GetOutputs(graph, layerIndex);
3072 CHECK_VALID_SIZE(outputs.size(), 1);
3073
3074 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
3075
3076 auto fbBegin = fbDescriptor->begin();
3077 auto fbSize = fbDescriptor->size();
3078
Mike Kelly51b8c312022-05-24 11:34:02 +01003079 if (fbBegin->size() != fbSize->size())
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003080 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01003081 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
3082 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01003083 }
3084
3085 armnn::SliceDescriptor descriptor;
3086 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
3087 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
3088
3089 auto layerName = GetLayerName(graph, layerIndex);
3090 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
3091
3092 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3093 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3094
3095 RegisterInputSlots(graph, layerIndex, layer);
3096 RegisterOutputSlots(graph, layerIndex, layer);
3097}
3098
Finn Williams85d36712021-01-26 22:30:06 +00003099void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003100{
3101 CHECK_LAYERS(graph, 0, layerIndex);
3102
Finn Williams85d36712021-01-26 22:30:06 +00003103 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003104 CHECK_VALID_SIZE(inputs.size(), 1);
3105
Finn Williams85d36712021-01-26 22:30:06 +00003106 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003107 CHECK_VALID_SIZE(outputs.size(), 1);
3108
3109 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
3110
3111 auto flatBufferBegin = flatBufferDescriptor->begin();
3112 auto flatBufferEnd = flatBufferDescriptor->end();
3113 auto flatBufferStride = flatBufferDescriptor->stride();
3114
Mike Kelly51b8c312022-05-24 11:34:02 +01003115 if (!(flatBufferBegin->size() == flatBufferEnd->size() &&
3116 flatBufferBegin->size() == flatBufferStride->size()))
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003117 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01003118 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
3119 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00003120 }
3121
3122 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
3123 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
3124 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
3125
3126 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
3127 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
3128 descriptor.m_EndMask = flatBufferDescriptor->endMask();
3129 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
3130 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
3131 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
3132 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
3133
3134 auto layerName = GetLayerName(graph, layerIndex);
3135 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
3136
3137 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3138 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3139
3140 RegisterInputSlots(graph, layerIndex, layer);
3141 RegisterOutputSlots(graph, layerIndex, layer);
3142}
3143
Finn Williams85d36712021-01-26 22:30:06 +00003144void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00003145{
3146 CHECK_LAYERS(graph, 0, layerIndex);
3147 auto inputs = GetInputs(graph, layerIndex);
3148 CHECK_LOCATION();
3149 CHECK_VALID_SIZE(inputs.size(), 2);
3150
3151 auto outputs = GetOutputs(graph, layerIndex);
3152 CHECK_VALID_SIZE(outputs.size(), 1);
3153
3154 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly3ec30772023-03-08 13:47:17 +00003155 armnn::ElementwiseBinaryDescriptor descriptor(armnn::BinaryOperation::Sub);
3156 IConnectableLayer* layer = m_Network->AddElementwiseBinaryLayer(descriptor, layerName.c_str());
Conor Kennedyda1f9752019-03-01 14:37:12 +00003157
3158 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3159 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3160
3161 RegisterInputSlots(graph, layerIndex, layer);
3162 RegisterOutputSlots(graph, layerIndex, layer);
3163}
3164
Finn Williams85d36712021-01-26 22:30:06 +00003165void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003166{
3167 CHECK_LAYERS(graph, 0, layerIndex);
3168
Finn Williams85d36712021-01-26 22:30:06 +00003169 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003170 CHECK_VALID_SIZE(inputs.size(), 2);
3171
Finn Williams85d36712021-01-26 22:30:06 +00003172 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003173 CHECK_VALID_SIZE(outputs.size(), 1);
3174
Teresa Charlin52664732020-06-29 16:27:03 +01003175 armnn::GatherDescriptor descriptor;
3176 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
3177
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003178 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01003179 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003180
3181 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003182 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3183
3184 RegisterInputSlots(graph, layerIndex, layer);
3185 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003186}
3187
Teresa Charlin6966bfa2022-04-25 17:14:50 +01003188void IDeserializer::DeserializerImpl::ParseGatherNd(GraphPtr graph, unsigned int layerIndex)
3189{
3190 CHECK_LAYERS(graph, 0, layerIndex);
3191
3192 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
3193 CHECK_VALID_SIZE(inputs.size(), 2);
3194
3195 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
3196 CHECK_VALID_SIZE(outputs.size(), 1);
3197
3198 auto layerName = GetLayerName(graph, layerIndex);
3199 IConnectableLayer* layer = m_Network->AddGatherNdLayer(layerName.c_str());
3200
3201 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3202 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3203
3204 RegisterInputSlots(graph, layerIndex, layer);
3205 RegisterOutputSlots(graph, layerIndex, layer);
3206}
3207
Finn Williams85d36712021-01-26 22:30:06 +00003208void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003209{
3210 CHECK_LAYERS(graph, 0, layerIndex);
3211
Finn Williams85d36712021-01-26 22:30:06 +00003212 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003213 CHECK_VALID_SIZE(inputs.size(), 1);
3214
Finn Williams85d36712021-01-26 22:30:06 +00003215 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003216 CHECK_VALID_SIZE(outputs.size(), 1);
3217
3218 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
3219 auto flatBufferAxis = flatBufferDescriptor->axis();
3220 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
3221
3222 armnn::MeanDescriptor descriptor;
3223 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
3224 descriptor.m_KeepDims = flatBufferKeepDims;
3225
3226 auto layerName = GetLayerName(graph, layerIndex);
3227 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
3228
3229 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3230 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3231
3232 RegisterInputSlots(graph, layerIndex, layer);
3233 RegisterOutputSlots(graph, layerIndex, layer);
3234}
3235
Finn Williams85d36712021-01-26 22:30:06 +00003236void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00003237{
3238 CHECK_LAYERS(graph, 0, layerIndex);
3239
Finn Williams85d36712021-01-26 22:30:06 +00003240 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003241 CHECK_VALID_SIZE(inputs.size(), 1);
3242
Finn Williams85d36712021-01-26 22:30:06 +00003243 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003244
3245 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
3246 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
3247 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
3248 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
3249 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
3250 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
3251
3252 // Check numViews and numDimensions corresponds to the ones already serialized ...
3253 // numViews == flatBufferViewSizes.size();
3254 // foreach: numDimensions == flatBufferViewSizes[x].size();
3255
3256 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
3257 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3258 {
3259 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
3260 {
3261 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
3262 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
3263 }
3264 }
3265
3266 auto layerName = GetLayerName(graph, layerIndex);
3267 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
3268
3269 // I could have as many outputs as views ...
3270 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3271 {
3272 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
3273 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
3274 }
3275
3276 RegisterInputSlots(graph, layerIndex, layer);
3277 RegisterOutputSlots(graph, layerIndex, layer);
3278}
3279
Finn Williams85d36712021-01-26 22:30:06 +00003280armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00003281{
3282 armnn::LstmDescriptor desc;
3283
3284 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
3285 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
3286 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
3287 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
3288 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
3289 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01003290 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00003291
3292 return desc;
3293}
3294
Finn Williams85d36712021-01-26 22:30:06 +00003295void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00003296{
3297 CHECK_LAYERS(graph, 0, layerIndex);
3298
3299 auto inputs = GetInputs(graph, layerIndex);
3300 CHECK_VALID_SIZE(inputs.size(), 3);
3301
3302 auto outputs = GetOutputs(graph, layerIndex);
3303 CHECK_VALID_SIZE(outputs.size(), 4);
3304
3305 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3306 auto layerName = GetLayerName(graph, layerIndex);
3307 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3308 auto flatBufferInputParams = flatBufferLayer->inputParams();
3309
3310 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
3311
3312 armnn::LstmInputParams lstmInputParams;
3313
3314 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3315 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3316 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3317 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3318 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3319 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3320 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3321 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3322 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3323
3324 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3325 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3326 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3327 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3328 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3329 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3330 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3331 lstmInputParams.m_CellBias = &cellBias;
3332 lstmInputParams.m_OutputGateBias = &outputGateBias;
3333
3334 armnn::ConstTensor inputToInputWeights;
3335 armnn::ConstTensor recurrentToInputWeights;
3336 armnn::ConstTensor cellToInputWeights;
3337 armnn::ConstTensor inputGateBias;
3338 if (!lstmDescriptor.m_CifgEnabled)
3339 {
3340 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3341 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3342 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3343 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3344
3345 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3346 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3347 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3348 lstmInputParams.m_InputGateBias = &inputGateBias;
3349 }
3350
3351 armnn::ConstTensor projectionWeights;
3352 armnn::ConstTensor projectionBias;
3353 if (lstmDescriptor.m_ProjectionEnabled)
3354 {
3355 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3356 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3357
3358 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3359 lstmInputParams.m_ProjectionBias = &projectionBias;
3360 }
3361
3362 armnn::ConstTensor cellToForgetWeights;
3363 armnn::ConstTensor cellToOutputWeights;
3364 if (lstmDescriptor.m_PeepholeEnabled)
3365 {
3366 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3367 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3368
3369 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3370 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3371 }
3372
Jan Eilersf8c62972019-07-17 11:07:49 +01003373 armnn::ConstTensor inputLayerNormWeights;
3374 armnn::ConstTensor forgetLayerNormWeights;
3375 armnn::ConstTensor cellLayerNormWeights;
3376 armnn::ConstTensor outputLayerNormWeights;
3377 if (lstmDescriptor.m_LayerNormEnabled)
3378 {
3379 if (!lstmDescriptor.m_CifgEnabled)
3380 {
3381 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3382 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3383 }
3384 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3385 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3386 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3387
3388 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3389 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3390 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3391 }
3392
Jim Flynn11af3752019-03-19 17:22:29 +00003393 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3394
3395 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3396 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3397
3398 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3399 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3400
3401 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
3402 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
3403
3404 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
3405 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
3406
3407 RegisterInputSlots(graph, layerIndex, layer);
3408 RegisterOutputSlots(graph, layerIndex, layer);
3409}
3410
Finn Williams85d36712021-01-26 22:30:06 +00003411armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01003412{
3413 armnn::QLstmDescriptor desc;
3414
3415 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
3416 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
3417 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
3418 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
3419
3420 desc.m_CellClip = qLstmDescriptor->cellClip();
3421 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
3422
3423 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
3424 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
3425 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
3426 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
3427
3428 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
3429 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
3430
3431 return desc;
3432}
3433
Finn Williams85d36712021-01-26 22:30:06 +00003434void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01003435{
3436 CHECK_LAYERS(graph, 0, layerIndex);
3437
3438 auto inputs = GetInputs(graph, layerIndex);
3439 CHECK_VALID_SIZE(inputs.size(), 3);
3440
3441 auto outputs = GetOutputs(graph, layerIndex);
3442 CHECK_VALID_SIZE(outputs.size(), 3);
3443
3444 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3445 auto layerName = GetLayerName(graph, layerIndex);
3446 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3447 auto flatBufferInputParams = flatBufferLayer->inputParams();
3448
3449 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
3450 armnn::LstmInputParams qLstmInputParams;
3451
3452 // Mandatory params
3453 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3454 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3455 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3456 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3457 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3458 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3459 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3460 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3461 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3462
3463 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3464 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3465 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3466 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3467 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3468 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3469 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
3470 qLstmInputParams.m_CellBias = &cellBias;
3471 qLstmInputParams.m_OutputGateBias = &outputGateBias;
3472
3473 // Optional CIFG params
3474 armnn::ConstTensor inputToInputWeights;
3475 armnn::ConstTensor recurrentToInputWeights;
3476 armnn::ConstTensor inputGateBias;
3477
3478 if (!qLstmDescriptor.m_CifgEnabled)
3479 {
3480 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3481 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3482 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3483
3484 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3485 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3486 qLstmInputParams.m_InputGateBias = &inputGateBias;
3487 }
3488
3489 // Optional projection params
3490 armnn::ConstTensor projectionWeights;
3491 armnn::ConstTensor projectionBias;
3492
3493 if (qLstmDescriptor.m_ProjectionEnabled)
3494 {
3495 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3496 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3497
3498 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3499 qLstmInputParams.m_ProjectionBias = &projectionBias;
3500 }
3501
3502 // Optional peephole params
3503 armnn::ConstTensor cellToInputWeights;
3504 armnn::ConstTensor cellToForgetWeights;
3505 armnn::ConstTensor cellToOutputWeights;
3506
3507 if (qLstmDescriptor.m_PeepholeEnabled)
3508 {
3509 if (!qLstmDescriptor.m_CifgEnabled)
3510 {
3511 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3512 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3513 }
3514
3515 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3516 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3517
3518 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3519 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3520 }
3521
3522 // Optional layer norm params
3523 armnn::ConstTensor inputLayerNormWeights;
3524 armnn::ConstTensor forgetLayerNormWeights;
3525 armnn::ConstTensor cellLayerNormWeights;
3526 armnn::ConstTensor outputLayerNormWeights;
3527
3528 if (qLstmDescriptor.m_LayerNormEnabled)
3529 {
3530 if (!qLstmDescriptor.m_CifgEnabled)
3531 {
3532 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3533 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3534 }
3535
3536 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3537 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3538 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3539
3540 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3541 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3542 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3543 }
3544
3545 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3546
3547 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3548 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3549
3550 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3551 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3552
3553 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3554 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3555
3556 RegisterInputSlots(graph, layerIndex, layer);
3557 RegisterOutputSlots(graph, layerIndex, layer);
3558}
3559
Finn Williams85d36712021-01-26 22:30:06 +00003560void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003561{
3562 CHECK_LAYERS(graph, 0, layerIndex);
3563
3564 auto inputs = GetInputs(graph, layerIndex);
3565 CHECK_VALID_SIZE(inputs.size(), 3);
3566
3567 auto outputs = GetOutputs(graph, layerIndex);
3568 CHECK_VALID_SIZE(outputs.size(), 2);
3569
3570 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3571 auto layerName = GetLayerName(graph, layerIndex);
3572 auto flatBufferInputParams = flatBufferLayer->inputParams();
3573
3574 armnn::QuantizedLstmInputParams lstmInputParams;
3575
3576 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3577 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3578 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3579 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3580 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3581 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3582 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3583 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3584 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3585 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3586 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3587 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3588
3589 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3590 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3591 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3592 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3593 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3594 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3595 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3596 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3597 lstmInputParams.m_InputGateBias = &inputGateBias;
3598 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3599 lstmInputParams.m_CellBias = &cellBias;
3600 lstmInputParams.m_OutputGateBias = &outputGateBias;
3601
3602 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3603
3604 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3605 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3606
3607 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3608 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3609
3610 RegisterInputSlots(graph, layerIndex, layer);
3611 RegisterOutputSlots(graph, layerIndex, layer);
3612}
3613
Finn Williams85d36712021-01-26 22:30:06 +00003614void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003615{
3616 CHECK_LAYERS(graph, 0, layerIndex);
3617
Finn Williams85d36712021-01-26 22:30:06 +00003618 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003619 CHECK_VALID_SIZE(inputs.size(), 1);
3620
Finn Williams85d36712021-01-26 22:30:06 +00003621 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003622 CHECK_VALID_SIZE(outputs.size(), 1);
3623
3624 const std::string layerName = GetLayerName(graph, layerIndex);
3625 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3626
3627 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3628 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3629
3630 RegisterInputSlots(graph, layerIndex, layer);
3631 RegisterOutputSlots(graph, layerIndex, layer);
3632}
3633
Finn Williams85d36712021-01-26 22:30:06 +00003634void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003635{
3636 CHECK_LAYERS(graph, 0, layerIndex);
3637
Finn Williams85d36712021-01-26 22:30:06 +00003638 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003639 CHECK_VALID_SIZE(inputs.size(), 2);
3640
Finn Williams85d36712021-01-26 22:30:06 +00003641 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003642 CHECK_VALID_SIZE(outputs.size(), 1);
3643
3644 const std::string layerName = GetLayerName(graph, layerIndex);
3645 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3646
3647 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3648 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3649
3650 RegisterInputSlots(graph, layerIndex, layer);
3651 RegisterOutputSlots(graph, layerIndex, layer);
3652}
3653
Finn Williams85d36712021-01-26 22:30:06 +00003654void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003655{
3656 CHECK_LAYERS(graph, 0, layerIndex);
3657 auto inputs = GetInputs(graph, layerIndex);
3658 CHECK_LOCATION();
3659 CHECK_VALID_SIZE(inputs.size(), 2);
3660
3661 auto outputs = GetOutputs(graph, layerIndex);
3662 CHECK_VALID_SIZE(outputs.size(), 2);
3663
3664 auto layerName = GetLayerName(graph, layerIndex);
3665 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3666
3667 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3668 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3669
3670 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3671 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3672
3673 RegisterInputSlots(graph, layerIndex, layer);
3674 RegisterOutputSlots(graph, layerIndex, layer);
3675}
3676
David Monahan616b22f2023-07-25 12:08:10 +01003677void IDeserializer::DeserializerImpl::ParseTile(GraphPtr graph, unsigned int layerIndex)
3678{
3679 CHECK_LAYERS(graph, 0, layerIndex);
3680 auto inputs = GetInputs(graph, layerIndex);
3681 CHECK_LOCATION();
3682 CHECK_VALID_SIZE(inputs.size(), 1);
3683
3684 auto outputs = GetOutputs(graph, layerIndex);
3685 CHECK_VALID_SIZE(outputs.size(), 1);
3686
3687 auto TileLayer = graph->layers()->Get(layerIndex)->layer_as_TileLayer();
3688 auto layerName = GetLayerName(graph, layerIndex);
3689 auto flatBufferDescriptor = TileLayer->descriptor();
3690 auto flatBufferMultiples = flatBufferDescriptor->m_Multiples();
3691
3692 armnn::TileDescriptor tileDescriptor;
3693 tileDescriptor.m_Multiples = std::vector<unsigned int>(flatBufferMultiples->begin(), flatBufferMultiples->end());
3694
3695 IConnectableLayer* layer = m_Network->AddTileLayer(tileDescriptor, layerName.c_str());
3696
3697 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3698 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3699
3700 RegisterInputSlots(graph, layerIndex, layer);
3701 RegisterOutputSlots(graph, layerIndex, layer);
3702}
3703
Finn Williams85d36712021-01-26 22:30:06 +00003704void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003705{
3706 CHECK_LAYERS(graph, 0, layerIndex);
3707 auto inputs = GetInputs(graph, layerIndex);
3708 CHECK_LOCATION();
3709 CHECK_VALID_SIZE(inputs.size(), 2);
3710
3711 auto outputs = GetOutputs(graph, layerIndex);
3712 CHECK_VALID_SIZE(outputs.size(), 1);
3713
3714 auto layerName = GetLayerName(graph, layerIndex);
3715 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3716
3717 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3718 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3719
3720 RegisterInputSlots(graph, layerIndex, layer);
3721 RegisterOutputSlots(graph, layerIndex, layer);
3722}
3723
Finn Williams85d36712021-01-26 22:30:06 +00003724void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003725{
3726 CHECK_LAYERS(graph, 0, layerIndex);
3727
3728 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3729
3730 auto inputs = GetInputs(graph, layerIndex);
3731 CHECK_VALID_SIZE(inputs.size(), 1);
3732
3733 auto outputs = GetOutputs(graph, layerIndex);
3734 CHECK_VALID_SIZE(outputs.size(), 1);
3735 auto outputInfo = ToTensorInfo(outputs[0]);
3736
3737 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01003738 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003739
3740 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3741 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3742
3743 RegisterInputSlots(graph, layerIndex, layer);
3744 RegisterOutputSlots(graph, layerIndex, layer);
3745}
3746
Finn Williams85d36712021-01-26 22:30:06 +00003747void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003748{
3749 CHECK_LAYERS(graph, 0, layerIndex);
3750
3751 auto inputs = GetInputs(graph, layerIndex);
3752 CHECK_VALID_SIZE(inputs.size(), 1);
3753
3754 auto outputs = GetOutputs(graph, layerIndex);
3755 CHECK_VALID_SIZE(outputs.size(), 1);
3756
3757 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3758 auto layerName = GetLayerName(graph, layerIndex);
3759 auto serializerDescriptor = serializerLayer->descriptor();
3760
3761 armnn::TransposeConvolution2dDescriptor descriptor;
3762 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3763 descriptor.m_PadRight = serializerDescriptor->padRight();
3764 descriptor.m_PadTop = serializerDescriptor->padTop();
3765 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3766 descriptor.m_StrideX = serializerDescriptor->strideX();
3767 descriptor.m_StrideY = serializerDescriptor->strideY();;
3768 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3769 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3770
3771 // weights & biases
3772 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3773 armnn::Optional<armnn::ConstTensor> optionalBiases;
3774 if (descriptor.m_BiasEnabled)
3775 {
3776 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3777 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3778 }
3779
3780 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3781 weights,
3782 optionalBiases,
3783 layerName.c_str());
3784
3785 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3786 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3787
3788 RegisterInputSlots(graph, layerIndex, layer);
3789 RegisterOutputSlots(graph, layerIndex, layer);
3790}
3791
Finn Williams85d36712021-01-26 22:30:06 +00003792void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003793{
3794 CHECK_LAYERS(graph, 0, layerIndex);
3795 auto inputs = GetInputs(graph, layerIndex);
3796
3797 auto outputs = GetOutputs(graph, layerIndex);
3798 CHECK_VALID_SIZE(outputs.size(), 1);
3799
3800 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3801 unsigned int axis = flatBufferDescriptor->axis();
3802 unsigned int numInputs = flatBufferDescriptor->numInputs();
3803 CHECK_VALID_SIZE(inputs.size(), numInputs);
3804
3805 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3806 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3807 flatBufferInputShape->begin() + flatBufferInputShape->size());
3808
3809 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3810 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3811
3812 for (unsigned int i=0; i<inputs.size(); ++i)
3813 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003814 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003815 if (descriptor.m_InputShape != inputShape)
3816 {
3817 std::stringstream ss;
3818 ss << "Shape of input "
3819 << i
3820 << " "
3821 << inputShape
3822 << " does not equal defined input shape "
3823 << descriptor.m_InputShape
3824 << ": "
3825 << CHECK_LOCATION().AsString();
3826 throw ParseException(ss.str());
3827 }
3828 }
3829
3830 auto layerName = GetLayerName(graph, layerIndex);
3831 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3832
3833 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3834 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3835
3836 RegisterInputSlots(graph, layerIndex, layer);
3837 RegisterOutputSlots(graph, layerIndex, layer);
3838}
3839
Finn Williams85d36712021-01-26 22:30:06 +00003840void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003841{
3842 CHECK_LAYERS(graph, 0, layerIndex);
3843
3844 auto inputs = GetInputs(graph, layerIndex);
3845 auto outputs = GetOutputs(graph, layerIndex);
3846
3847 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3848 auto fbDescriptor = fbLayer->descriptor();
3849
3850 armnn::StandInDescriptor descriptor;
3851 descriptor.m_NumInputs = fbDescriptor->numInputs();
3852 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3853
3854 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3855 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3856
3857 const std::string layerName = GetLayerName(graph, layerIndex);
3858 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3859
3860 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3861 {
3862 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3863 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3864 }
3865
3866 RegisterInputSlots(graph, layerIndex, layer);
3867 RegisterOutputSlots(graph, layerIndex, layer);
3868}
3869
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003870armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3871 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3872{
3873 armnn::UnidirectionalSequenceLstmDescriptor desc;
3874
3875 desc.m_ActivationFunc = descriptor->activationFunc();
3876 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3877 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3878 desc.m_CifgEnabled = descriptor->cifgEnabled();
3879 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3880 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3881 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3882 desc.m_TimeMajor = descriptor->timeMajor();
3883
3884 return desc;
3885}
3886
3887void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3888{
3889 CHECK_LAYERS(graph, 0, layerIndex);
3890
3891 auto inputs = GetInputs(graph, layerIndex);
3892 CHECK_VALID_SIZE(inputs.size(), 3);
3893
3894 auto outputs = GetOutputs(graph, layerIndex);
Mike Kelly12994962022-04-21 11:57:09 +01003895 CHECK_VALID_SIZE(outputs.size(), 3);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003896
3897 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3898 auto layerName = GetLayerName(graph, layerIndex);
3899 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3900 auto flatBufferInputParams = flatBufferLayer->inputParams();
3901
3902 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3903
3904 armnn::LstmInputParams lstmInputParams;
3905
3906 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3907 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3908 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3909 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3910 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3911 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3912 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3913 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3914 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3915
3916 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3917 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3918 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3919 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3920 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3921 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3922 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3923 lstmInputParams.m_CellBias = &cellBias;
3924 lstmInputParams.m_OutputGateBias = &outputGateBias;
3925
3926 armnn::ConstTensor inputToInputWeights;
3927 armnn::ConstTensor recurrentToInputWeights;
3928 armnn::ConstTensor cellToInputWeights;
3929 armnn::ConstTensor inputGateBias;
3930 if (!descriptor.m_CifgEnabled)
3931 {
3932 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3933 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3934 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3935
3936 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3937 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3938 lstmInputParams.m_InputGateBias = &inputGateBias;
3939
3940 if (descriptor.m_PeepholeEnabled)
3941 {
3942 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3943 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3944 }
3945 }
3946
3947 armnn::ConstTensor projectionWeights;
3948 armnn::ConstTensor projectionBias;
3949 if (descriptor.m_ProjectionEnabled)
3950 {
3951 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3952 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3953
3954 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3955 lstmInputParams.m_ProjectionBias = &projectionBias;
3956 }
3957
3958 armnn::ConstTensor cellToForgetWeights;
3959 armnn::ConstTensor cellToOutputWeights;
3960 if (descriptor.m_PeepholeEnabled)
3961 {
3962 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3963 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3964
3965 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3966 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3967 }
3968
3969 armnn::ConstTensor inputLayerNormWeights;
3970 armnn::ConstTensor forgetLayerNormWeights;
3971 armnn::ConstTensor cellLayerNormWeights;
3972 armnn::ConstTensor outputLayerNormWeights;
3973 if (descriptor.m_LayerNormEnabled)
3974 {
3975 if (!descriptor.m_CifgEnabled)
3976 {
3977 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3978 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3979 }
3980 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3981 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3982 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3983
3984 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3985 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3986 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3987 }
3988
3989 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3990 lstmInputParams,
3991 layerName.c_str());
3992
Mike Kelly12994962022-04-21 11:57:09 +01003993 armnn::TensorInfo outputTensorInfo0 = ToTensorInfo(outputs[0]);
3994 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo0);
3995
3996 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[1]);
3997 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo1);
3998
3999 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[2]);
4000 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo2);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01004001
4002 RegisterInputSlots(graph, layerIndex, layer);
4003 RegisterOutputSlots(graph, layerIndex, layer);
4004}
4005
Derek Lamberti0028d1b2019-02-20 13:57:42 +00004006} // namespace armnnDeserializer