blob: 702b0605124642efdd13576d697769ba0ea5a747 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Simon Obutedb5804e2022-04-14 15:49:52 +010013#include <armnn/Logging.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000014
Matteo Martincighe011d202019-11-28 11:35:47 +000015#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000016#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010017#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000018#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010019#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000020
Kevin May43a799c2019-02-08 16:31:42 +000021#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022#include <VerificationHelpers.hpp>
23
Colm Donelan5b5c2222020-09-09 12:48:16 +010024#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Finn Williams85d36712021-01-26 22:30:06 +000038IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
39
40IDeserializer::~IDeserializer() = default;
41
42IDeserializer *IDeserializer::CreateRaw()
43{
44 return new IDeserializer();
45}
46
47IDeserializerPtr IDeserializer::Create()
48{
49 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
50}
51
52void IDeserializer::Destroy(IDeserializer *parser)
53{
54 delete parser;
55}
56
57armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
58{
59 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
60}
61
62armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
63{
64 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
65}
66
67BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
68{
69 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
70}
71
72BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
73{
74 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
75}
76
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000077namespace
78{
Kevin May43a799c2019-02-08 16:31:42 +000079
80const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
81
Finn Williams85d36712021-01-26 22:30:06 +000082 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000083 unsigned int layersIndex,
84 const CheckLocation& location)
85{
86 if (graph->layers() == nullptr)
87 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010088 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
89 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
90 "layers:{1} at {2}",
91 location.m_Function,
92 layersIndex,
93 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000094 }
95 else if (layersIndex >= graph->layers()->size())
96 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010097 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
98 location.m_Function,
99 layersIndex,
100 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000101 }
102}
103
Finn Williams85d36712021-01-26 22:30:06 +0000104void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000105 unsigned int layersIndex,
106 unsigned int layerIndex,
107 const CheckLocation& location)
108{
109 if (graph->layers() == nullptr)
110 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100111 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
112 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
113 "layers:{1} at {2}",
114 location.m_Function,
115 layersIndex,
116 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000117 }
118 else if (layersIndex >= graph->layers()->size())
119 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100120 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
121 "layers:{1} at {2}",
122 location.m_Function,
123 layersIndex,
124 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000125 }
126 else if (layerIndex >= graph->layers()[layersIndex].size()
127 && layerIndex != VIRTUAL_LAYER_ID)
128 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100129 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
130 "layers:{1} layer:{2} at {3}",
131 location.m_Function,
132 layersIndex,
133 layerIndex,
134 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000135 }
136}
137
Finn Williams85d36712021-01-26 22:30:06 +0000138void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000139 const CheckLocation& location)
140{
141 if (rawPtr == nullptr)
142 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100143 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
144 location.m_Function,
145 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000146 }
147}
148
Finn Williams85d36712021-01-26 22:30:06 +0000149void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000150 const CheckLocation& location)
151{
152 if (rawPtr == nullptr)
153 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100154 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
155 location.m_Function,
156 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000157 }
158}
159
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000160void CheckConstTensorSize(const unsigned int constTensorSize,
161 const unsigned int tensorSize,
162 const CheckLocation& location)
163{
164 if (constTensorSize != tensorSize)
165 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100166 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
167 location.m_Function,
168 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000169 }
170}
171
Kevin May43a799c2019-02-08 16:31:42 +0000172#define CHECK_TENSOR_PTR(TENSOR_PTR) \
173 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
174
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000175#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
176 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
177
Mike Kellya0766c32019-02-19 17:22:07 +0000178#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
179 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
180
Kevin May43a799c2019-02-08 16:31:42 +0000181#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
182 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
183
184#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
185 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
186}
187
Saoirse Stewart263829c2019-02-19 15:54:14 +0000188bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
189{
190 const unsigned int actualSize = actual.GetNumDimensions();
191 if (actualSize != expected.size())
192 {
193 return false;
194 }
195
196 for (unsigned int i = 0u; i < actualSize; i++)
197 {
198 if (actual[i] != static_cast<unsigned int>(expected[i]))
199 {
200 return false;
201 }
202 }
203
204 return true;
205}
206
Finn Williams85d36712021-01-26 22:30:06 +0000207IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000208: m_Network(nullptr, nullptr),
209//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000210m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000211{
212 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000213 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
214 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
215 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
216 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
Samuel Yapb9e6b5c2022-08-19 11:14:38 +0100217 m_ParserFunctions[Layer_BatchMatMulLayer] = &DeserializerImpl::ParseBatchMatMul;
Finn Williams85d36712021-01-26 22:30:06 +0000218 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
219 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
mathad01b392e982021-04-07 12:07:30 +0100220 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Simon Obute51f67772021-09-03 15:50:13 +0100221 m_ParserFunctions[Layer_ChannelShuffleLayer] = &DeserializerImpl::ParseChannelShuffle;
222 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
Finn Williams85d36712021-01-26 22:30:06 +0000223 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
224 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
225 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100226 m_ParserFunctions[Layer_Convolution3dLayer] = &DeserializerImpl::ParseConvolution3d;
Finn Williams85d36712021-01-26 22:30:06 +0000227 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
228 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
229 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
230 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
231 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
232 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
233 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
234 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
235 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
236 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
237 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100238 m_ParserFunctions[Layer_GatherNdLayer] = &DeserializerImpl::ParseGatherNd;
Finn Williams85d36712021-01-26 22:30:06 +0000239 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
240 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
241 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
242 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
243 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
244 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
245 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
246 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
247 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
248 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
249 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
250 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
251 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
252 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
253 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
254 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000255 m_ParserFunctions[Layer_Pooling3dLayer] = &DeserializerImpl::ParsePooling3d;
Finn Williams85d36712021-01-26 22:30:06 +0000256 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
257 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
258 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
259 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
260 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000261 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000262 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
263 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
264 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
265 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100266 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000267 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
268 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
269 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
270 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
271 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
272 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
273 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
274 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
275 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
276 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
277 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
278 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100279 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000280}
281
Finn Williams85d36712021-01-26 22:30:06 +0000282LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000283{
284 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
285
286 switch(layerType)
287 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100288 case Layer::Layer_AbsLayer:
289 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000290 case Layer::Layer_ActivationLayer:
291 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000292 case Layer::Layer_AdditionLayer:
293 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100294 case Layer::Layer_ArgMinMaxLayer:
295 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Samuel Yapb9e6b5c2022-08-19 11:14:38 +0100296 case Layer::Layer_BatchMatMulLayer:
297 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000298 case Layer::Layer_BatchToSpaceNdLayer:
299 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000300 case Layer::Layer_BatchNormalizationLayer:
301 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100302 case Layer::Layer_CastLayer:
303 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Simon Obute51f67772021-09-03 15:50:13 +0100304 case Layer::Layer_ChannelShuffleLayer:
305 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100306 case Layer::Layer_ComparisonLayer:
307 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100308 case Layer::Layer_ConcatLayer:
309 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000310 case Layer::Layer_ConstantLayer:
311 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000312 case Layer::Layer_Convolution2dLayer:
313 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100314 case Layer::Layer_Convolution3dLayer:
315 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100316 case Layer::Layer_DepthToSpaceLayer:
317 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000318 case Layer::Layer_DepthwiseConvolution2dLayer:
319 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000320 case Layer::Layer_DequantizeLayer:
321 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000322 case Layer::Layer_DetectionPostProcessLayer:
323 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000324 case Layer::Layer_DivisionLayer:
325 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000326 case Layer::Layer_EqualLayer:
327 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000328 case Layer::Layer_ElementwiseUnaryLayer:
329 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000330 case Layer::Layer_FullyConnectedLayer:
331 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100332 case Layer::Layer_FillLayer:
333 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000334 case Layer::Layer_FloorLayer:
335 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000336 case Layer::Layer_GatherLayer:
337 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100338 case Layer::Layer_GatherNdLayer:
339 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherNdLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000340 case Layer::Layer_GreaterLayer:
341 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000342 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000343 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100344 case Layer::Layer_InstanceNormalizationLayer:
345 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000346 case Layer::Layer_L2NormalizationLayer:
347 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000348 case Layer::Layer_LogicalBinaryLayer:
349 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100350 case Layer::Layer_LogSoftmaxLayer:
351 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000352 case Layer::Layer_LstmLayer:
353 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000354 case Layer::Layer_MeanLayer:
355 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000356 case Layer::Layer_MinimumLayer:
357 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000358 case Layer::Layer_MaximumLayer:
359 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100360 case Layer::Layer_MergeLayer:
361 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000362 case Layer::Layer_MergerLayer:
363 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000364 case Layer::Layer_MultiplicationLayer:
365 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000366 case Layer::Layer_NormalizationLayer:
367 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000368 case Layer::Layer_OutputLayer:
369 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000370 case Layer::Layer_PadLayer:
371 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000372 case Layer::Layer_PermuteLayer:
373 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000374 case Layer::Layer_Pooling2dLayer:
375 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000376 case Layer::Layer_Pooling3dLayer:
377 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100378 case Layer::Layer_PreluLayer:
379 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100380 case Layer::Layer_QLstmLayer:
381 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000382 case Layer::Layer_QuantizeLayer:
383 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100384 case Layer::Layer_QuantizedLstmLayer:
385 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100386 case Layer::Layer_RankLayer:
387 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000388 case Layer::Layer_ReduceLayer:
389 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000390 case Layer::Layer_ReshapeLayer:
391 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000392 case Layer::Layer_ResizeBilinearLayer:
393 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100394 case Layer::Layer_ResizeLayer:
395 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000396 case Layer::Layer_RsqrtLayer:
397 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100398 case Layer::Layer_ShapeLayer:
399 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100400 case Layer::Layer_SliceLayer:
401 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000402 case Layer::Layer_SoftmaxLayer:
403 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000404 case Layer::Layer_SpaceToBatchNdLayer:
405 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100406 case Layer::Layer_SpaceToDepthLayer:
407 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000408 case Layer::Layer_SplitterLayer:
409 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100410 case Layer::Layer_StackLayer:
411 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100412 case Layer::Layer_StandInLayer:
413 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000414 case Layer::Layer_StridedSliceLayer:
415 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000416 case Layer::Layer_SubtractionLayer:
417 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100418 case Layer::Layer_SwitchLayer:
419 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100420 case Layer::Layer_TransposeConvolution2dLayer:
421 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000422 case Layer::Layer_TransposeLayer:
423 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100424 case Layer::Layer_UnidirectionalSequenceLstmLayer:
425 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000426 case Layer::Layer_NONE:
427 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100428 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000429 }
430}
431
Finn Williams85d36712021-01-26 22:30:06 +0000432std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000433{
434 auto layer = GetBaseLayer(graph, index);
435 assert(layer);
436 return layer->layerName()->str();
437}
438
Finn Williams85d36712021-01-26 22:30:06 +0000439int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000440{
441 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
442
443 if (layerType == Layer::Layer_InputLayer)
444 {
445 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
446 }
447 else if ( layerType == Layer::Layer_OutputLayer )
448 {
449 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
450 }
451 return 0;
452}
453
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000454armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000455{
456 switch (dataLayout)
457 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000458 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000459 return armnn::DataLayout::NHWC;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100460 case armnnSerializer::DataLayout::DataLayout_NDHWC:
461 return armnn::DataLayout::NDHWC;
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100462 case armnnSerializer::DataLayout::DataLayout_NCDHW:
463 return armnn::DataLayout::NCDHW;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000464 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000465 default:
466 return armnn::DataLayout::NCHW;
467 }
468}
469
Mike Kellyaf484012019-02-20 16:53:11 +0000470armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
471{
472 switch (function)
473 {
474 case armnnSerializer::ActivationFunction_Sigmoid:
475 return armnn::ActivationFunction::Sigmoid;
476 case armnnSerializer::ActivationFunction_TanH:
477 return armnn::ActivationFunction::TanH;
478 case armnnSerializer::ActivationFunction_Linear:
479 return armnn::ActivationFunction::Linear;
480 case armnnSerializer::ActivationFunction_ReLu:
481 return armnn::ActivationFunction::ReLu;
482 case armnnSerializer::ActivationFunction_BoundedReLu:
483 return armnn::ActivationFunction::BoundedReLu;
484 case armnnSerializer::ActivationFunction_LeakyReLu:
485 return armnn::ActivationFunction::LeakyReLu;
486 case armnnSerializer::ActivationFunction_Abs:
487 return armnn::ActivationFunction::Abs;
488 case armnnSerializer::ActivationFunction_Sqrt:
489 return armnn::ActivationFunction::Sqrt;
490 case armnnSerializer::ActivationFunction_Square:
491 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000492 case armnnSerializer::ActivationFunction_Elu:
493 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000494 case armnnSerializer::ActivationFunction_HardSwish:
495 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000496 default:
497 return armnn::ActivationFunction::Sigmoid;
498 }
499}
500
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100501armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
502{
503 switch (function)
504 {
505 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
506 return armnn::ArgMinMaxFunction::Max;
507 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
508 default:
509 return armnn::ArgMinMaxFunction::Min;
510 }
511}
512
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100513armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
514{
515 switch (operation)
516 {
517 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
518 return armnn::ComparisonOperation::Equal;
519 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
520 return armnn::ComparisonOperation::Greater;
521 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
522 return armnn::ComparisonOperation::GreaterOrEqual;
523 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
524 return armnn::ComparisonOperation::Less;
525 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
526 return armnn::ComparisonOperation::LessOrEqual;
527 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
528 default:
529 return armnn::ComparisonOperation::NotEqual;
530 }
531}
532
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000533armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
534{
535 switch (operation)
536 {
537 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
538 return armnn::ReduceOperation::Sum;
539 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
540 return armnn::ReduceOperation::Max;
541 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
542 return armnn::ReduceOperation::Mean;
543 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
544 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100545 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
546 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000547 default:
548 return armnn::ReduceOperation::Sum;
549 }
550}
551
James Conroyaba90cd2020-11-06 16:28:18 +0000552armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
553{
554 switch (operation)
555 {
556 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
557 return armnn::LogicalBinaryOperation::LogicalAnd;
558 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
559 return armnn::LogicalBinaryOperation::LogicalOr;
560 default:
561 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
562 }
563}
564
josh minor4a3c6102020-01-06 16:40:46 -0600565armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
566{
567 switch (operation)
568 {
569 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
570 return armnn::UnaryOperation::Abs;
571 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
572 return armnn::UnaryOperation::Rsqrt;
573 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
574 return armnn::UnaryOperation::Sqrt;
575 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
576 return armnn::UnaryOperation::Exp;
577 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
578 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000579 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
580 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100581 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
582 return armnn::UnaryOperation::Log;
583 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
584 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600585 default:
586 throw armnn::InvalidArgumentException("Unary operation unknown");
587 }
588}
589
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +0100590armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
591{
592 switch (paddingMode)
593 {
594 case armnnSerializer::PaddingMode::PaddingMode_Reflect:
595 return armnn::PaddingMode::Reflect;
596 case armnnSerializer::PaddingMode::PaddingMode_Symmetric:
597 return armnn::PaddingMode::Symmetric;
598 default:
599 return armnn::PaddingMode::Constant;
600 }
601}
602
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100603armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
604{
605 switch (method)
606 {
607 case armnnSerializer::ResizeMethod_NearestNeighbor:
608 return armnn::ResizeMethod::NearestNeighbor;
609 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000610 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100611 default:
612 return armnn::ResizeMethod::NearestNeighbor;
613 }
614}
615
Finn Williams85d36712021-01-26 22:30:06 +0000616armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000617{
618 armnn::DataType type;
619 CHECK_TENSOR_PTR(tensorPtr);
620
621 switch (tensorPtr->dataType())
622 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000623 case DataType_QAsymmS8:
624 type = armnn::DataType::QAsymmS8;
625 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000626 case DataType_QSymmS8:
627 type = armnn::DataType::QSymmS8;
628 break;
Kevin May43a799c2019-02-08 16:31:42 +0000629 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000630 case DataType_QAsymmU8:
631 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000632 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000633 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000634 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000635 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000636 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000637 case DataType_Signed32:
638 type = armnn::DataType::Signed32;
639 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100640 case DataType_Signed64:
641 type = armnn::DataType::Signed64;
642 break;
Kevin May43a799c2019-02-08 16:31:42 +0000643 case DataType_Float32:
644 type = armnn::DataType::Float32;
645 break;
646 case DataType_Float16:
647 type = armnn::DataType::Float16;
648 break;
649 case DataType_Boolean:
650 type = armnn::DataType::Boolean;
651 break;
652 default:
653 {
654 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100655 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
656 tensorPtr->dataType(),
657 EnumNameDataType(tensorPtr->dataType()),
658 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000659 }
660 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000661
Colm Donelan800b2812021-02-12 12:43:35 +0000662 float quantizationScale = tensorPtr->quantizationScale();
663 int32_t quantizationOffset = tensorPtr->quantizationOffset();
664
Finn Williams2605b232020-06-10 15:53:46 +0100665 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
666 {
Colm Donelan800b2812021-02-12 12:43:35 +0000667 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100668 type,
669 quantizationScale,
670 quantizationOffset);
671 }
Colm Donelan800b2812021-02-12 12:43:35 +0000672 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
673 {
674 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
675 type,
676 quantizationScale,
677 quantizationOffset);
678 return result;
679 }
Kevin May43a799c2019-02-08 16:31:42 +0000680
681 auto dimensions = tensorPtr->dimensions();
682 unsigned int size = dimensions->size();
683 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000684 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
685 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
686 // For backwards compatibility check if the dimensionSpecificity vector is present first.
687 // The default is to have dimensionSpecificity set to all true's anyway.
688 if (tensorPtr->dimensionSpecificity() != nullptr)
689 {
690 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
691 size = dimensionSpecificity->size();
692 for (unsigned int i = 0; i < size; ++i)
693 {
694 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
695 }
696 }
697 // Construct a TensorShape
698 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000699
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000700 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000701 if (quantizationScales)
702 {
703 unsigned int quantizationScalesSize = quantizationScales->size();
704 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
705 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000706 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000707 type,
708 scales,
709 quantizationDim);
710 return result;
711 }
712
Kevin May43a799c2019-02-08 16:31:42 +0000713 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000714 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000715 type,
716 quantizationScale,
717 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000718
Kevin May43a799c2019-02-08 16:31:42 +0000719 return result;
720}
721
Finn Williams85d36712021-01-26 22:30:06 +0000722armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000723{
724 CHECK_CONST_TENSOR_PTR(constTensorPtr);
725 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100726 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000727
728 switch (constTensorPtr->data_type())
729 {
730 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000731 {
732 auto byteData = constTensorPtr->data_as_ByteData()->data();
733 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
734 return armnn::ConstTensor(tensorInfo, byteData->data());
735 }
Mike Kellya0766c32019-02-19 17:22:07 +0000736 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000737 {
738 auto shortData = constTensorPtr->data_as_ShortData()->data();
739 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
740 return armnn::ConstTensor(tensorInfo, shortData->data());
741 }
Mike Kellya0766c32019-02-19 17:22:07 +0000742 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000743 {
744 auto intData = constTensorPtr->data_as_IntData()->data();
745 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
746 return armnn::ConstTensor(tensorInfo, intData->data());
747 }
Mike Kellya0766c32019-02-19 17:22:07 +0000748 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000749 {
750 auto longData = constTensorPtr->data_as_LongData()->data();
751 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
752 return armnn::ConstTensor(tensorInfo, longData->data());
753 }
Mike Kellya0766c32019-02-19 17:22:07 +0000754 default:
755 {
756 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100757 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
758 constTensorPtr->data_type(),
759 EnumNameConstTensorData(constTensorPtr->data_type()),
760 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000761 }
762 }
763}
764
Finn Williams85d36712021-01-26 22:30:06 +0000765TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000766{
767 CHECK_LAYERS(graphPtr, 0, layerIndex);
768 auto layer = GetBaseLayer(graphPtr, layerIndex);
769 const auto& numInputs = layer->inputSlots()->size();
770
771 TensorRawPtrVector result(numInputs);
772
773 for (unsigned int i=0; i<numInputs; ++i)
774 {
775 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
776 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
777 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
778 }
779 return result;
780}
781
Finn Williams85d36712021-01-26 22:30:06 +0000782TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000783{
784 CHECK_LAYERS(graphPtr, 0, layerIndex);
785 auto layer = GetBaseLayer(graphPtr, layerIndex);
786 const auto& numOutputs = layer->outputSlots()->size();
787
788 TensorRawPtrVector result(numOutputs);
789
790 for (unsigned int i=0; i<numOutputs; ++i)
791 {
792 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
793 }
794 return result;
795}
796
Finn Williams85d36712021-01-26 22:30:06 +0000797void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000798{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000799 CHECK_LAYERS(graph, 0, layerIndex);
800 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100801 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
802 "layerName: {1} / {2}",
803 layerIndex,
804 layerName,
805 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000806}
807
Finn Williams85d36712021-01-26 22:30:06 +0000808void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000809{
810 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000811 m_InputBindings.clear();
812 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000813}
814
Kevin May43a799c2019-02-08 16:31:42 +0000815
Finn Williams85d36712021-01-26 22:30:06 +0000816INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000817{
818 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000819 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
820 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000821}
822
Finn Williams85d36712021-01-26 22:30:06 +0000823armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000824{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000825 ResetParser();
Simon Obutedb5804e2022-04-14 15:49:52 +0100826 if (binaryContent.fail()) {
827 ARMNN_LOG(error) << (std::string("Cannot read input"));
828 throw ParseException("Unable to read Input stream data");
829 }
830 binaryContent.seekg(0, std::ios::end);
831 const std::streamoff size = binaryContent.tellg();
832 std::vector<char> content(static_cast<size_t>(size));
833 binaryContent.seekg(0);
834 binaryContent.read(content.data(), static_cast<std::streamsize>(size));
835 GraphPtr graph = LoadGraphFromBinary(reinterpret_cast<uint8_t*>(content.data()), static_cast<size_t>(size));
Derek Lamberti8ddae332019-02-21 16:29:43 +0000836 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000837}
838
Finn Williams85d36712021-01-26 22:30:06 +0000839GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000840{
841 if (binaryContent == nullptr)
842 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100843 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
844 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000845 }
846 flatbuffers::Verifier verifier(binaryContent, len);
847 if (verifier.VerifyBuffer<SerializedGraph>() == false)
848 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100849 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
850 "flatbuffers format. size:{0} {1}",
851 len,
852 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000853 }
854 return GetSerializedGraph(binaryContent);
855}
856
Finn Williams85d36712021-01-26 22:30:06 +0000857INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000858{
859 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100860 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000861 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000862 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000863 {
864 if (layer->layer_type() != Layer_InputLayer &&
865 layer->layer_type() != Layer_OutputLayer)
866 {
867 // lookup and call the parser function
868 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000869 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000870 }
871 ++layerIndex;
872 }
873
Derek Lamberti8ddae332019-02-21 16:29:43 +0000874 SetupInputLayers(graph);
875 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000876
877 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100878 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000879 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100880 Connections& connections = graphIt.second;
881 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000882 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100883 const unsigned int outputSlotIndex = outputIt.first;
884 IOutputSlot* outputSlot = outputIt.second;
885 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000886 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100887 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000888 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100889 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000890 }
Kevin May43a799c2019-02-08 16:31:42 +0000891 }
892 }
893 }
894
895 return std::move(m_Network);
896}
897
Finn Williams85d36712021-01-26 22:30:06 +0000898BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000899 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000900{
Jan Eilers8eb25602020-03-09 12:13:48 +0000901 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000902 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000903 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000904 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000905 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000906 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000907 }
908 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100909 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
910 name,
911 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000912}
913
Finn Williams85d36712021-01-26 22:30:06 +0000914BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000915 const std::string& name) const
916{
Jan Eilers8eb25602020-03-09 12:13:48 +0000917 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000918 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000919 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000920 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000921 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000922 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000923 }
924 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100925 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
926 name,
927 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000928}
929
Finn Williams85d36712021-01-26 22:30:06 +0000930unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000931{
932 for (unsigned int i = 0; i < graph->layers()->size(); i++)
933 {
934 auto layer = graph->layers()->Get(i);
935 if (layer->layer_type() == Layer::Layer_InputLayer)
936 {
937 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
938 if (layerBindingId == targetId)
939 {
940 return i;
941 }
942 }
943 }
944 throw ParseException("Input layer with given layerBindingId not found");
945}
946
Finn Williams85d36712021-01-26 22:30:06 +0000947unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000948{
949 for (unsigned int i = 0; i < graph->layers()->size(); i++)
950 {
951 auto layer = graph->layers()->Get(i);
952 if (layer->layer_type() == Layer::Layer_OutputLayer)
953 {
954 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
955 if (layerBindingId == targetId)
956 {
957 return i;
958 }
959 }
960 }
961 throw ParseException("Output layer with given layerBindingId not found");
962}
963
Finn Williams85d36712021-01-26 22:30:06 +0000964unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100965{
966 for (unsigned int i = 0; i < graph->layers()->size(); i++)
967 {
968 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
969 if (layer->index() == targetIndex)
970 {
971 return i;
972 }
973 }
974 throw ParseException("Layer with given index not found");
975}
976
Finn Williams85d36712021-01-26 22:30:06 +0000977IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000978{
Finn Williams85d36712021-01-26 22:30:06 +0000979 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000980
981 if (graph->featureVersions())
982 {
983 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +0100984 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +0100985 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +0000986 }
987
988 return versions;
989}
990
Finn Williams85d36712021-01-26 22:30:06 +0000991void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000992{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000993 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100994 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000995 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100996 m_InputBindings.reserve(numInputs);
997
998 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000999 {
Tee Jungaa920c52019-11-05 10:48:25 +00001000 unsigned int inputLayerIndex = 0xFFFFFFFF;
1001 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1002 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001003 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001004 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
1005 }
1006 else
1007 {
1008 const int inputId = graph->inputIds()->Get(i);
1009 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1010 }
1011
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001012 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001013
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001014 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1015 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001016 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +00001017
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001018 IConnectableLayer* inputLayer =
1019 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001020
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001021 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
1022 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1023 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1024
Derek Lamberti8ddae332019-02-21 16:29:43 +00001025 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001026 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001027 }
1028}
1029
Finn Williams85d36712021-01-26 22:30:06 +00001030void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001031{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001032 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001033 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001034 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001035 m_OutputBindings.reserve(numOutputs);
1036
1037 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001038 {
Tee Jungaa920c52019-11-05 10:48:25 +00001039 unsigned int outputLayerIndex = 0xFFFFFFFF;
1040 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1041 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001042 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001043 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1044 }
1045 else
1046 {
1047 const int outputId = graph->outputIds()->Get(i);
1048 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1049 }
1050
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001051 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001052
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001053 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1054 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001055 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001056
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001057 IConnectableLayer* outputLayer =
1058 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001059
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001060 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001061 unsigned int sourceLayerIndex =
1062 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001063 unsigned int outputSlotIndex =
1064 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001065 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001066 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1067 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001068 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001069 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001070 }
1071}
1072
Finn Williams85d36712021-01-26 22:30:06 +00001073void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001074 uint32_t layerIndex,
1075 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001076{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001077 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001078 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001079 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1080 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001081 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001082 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1083 " for layer index: {2} {3}",
1084 baseLayer->outputSlots()->size(),
1085 layer->GetNumOutputSlots(),
1086 layerIndex,
1087 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001088 }
1089
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001090 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001091 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001092 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1093 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1094 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1095 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001096 }
1097}
1098
Finn Williams85d36712021-01-26 22:30:06 +00001099void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001100 uint32_t layerIndex,
1101 armnn::IConnectableLayer* layer,
1102 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001103{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001104 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001105 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001106 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001107
1108 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001109 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001110 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1111 " for layer index:{2} {3}",
1112 baseLayer->inputSlots()->size(),
1113 layer->GetNumInputSlots(),
1114 layerIndex,
1115 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001116 }
1117
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001118 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001119 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001120 // Check if slot should be ignored.
1121 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1122 {
1123 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1124 auto fbConnection = fbInputSlot->connection();
1125 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1126 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1127 }
Kevin May43a799c2019-02-08 16:31:42 +00001128 }
1129}
1130
Finn Williams85d36712021-01-26 22:30:06 +00001131void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001132 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001133 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001134{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001135 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001136 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001137 m_GraphConnections[sourceLayerIndex] = Connections();
1138 }
1139
1140 Connections& connections = m_GraphConnections[sourceLayerIndex];
1141 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1142 {
1143 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001144 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001145 else
1146 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001147 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001148 }
1149}
Kevin May43a799c2019-02-08 16:31:42 +00001150
Finn Williams85d36712021-01-26 22:30:06 +00001151void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001152 uint32_t outputSlotIndex,
1153 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001154{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001155 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1156 {
1157 m_GraphConnections[sourceLayerIndex] = Connections();
1158 }
1159
1160 Connections& connections = m_GraphConnections[sourceLayerIndex];
1161 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1162 {
1163 throw ParseException("Same output slot index processed twice");
1164 }
1165
1166 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001167}
1168
Finn Williams85d36712021-01-26 22:30:06 +00001169void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001170{
1171 CHECK_LAYERS(graph, 0, layerIndex);
1172 auto inputs = GetInputs(graph, layerIndex);
1173 CHECK_LOCATION();
1174 CHECK_VALID_SIZE(inputs.size(), 1);
1175
1176 auto outputs = GetOutputs(graph, layerIndex);
1177 CHECK_VALID_SIZE(outputs.size(), 1);
1178
1179 auto layerName = GetLayerName(graph, layerIndex);
1180
josh minor4a3c6102020-01-06 16:40:46 -06001181 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1182 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001183 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1184 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1185
1186 RegisterInputSlots(graph, layerIndex, layer);
1187 RegisterOutputSlots(graph, layerIndex, layer);
1188}
1189
Finn Williams85d36712021-01-26 22:30:06 +00001190void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001191{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001192 CHECK_LAYERS(graph, 0, layerIndex);
1193 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001194 CHECK_LOCATION();
1195 CHECK_VALID_SIZE(inputs.size(), 1);
1196
Derek Lamberti8ddae332019-02-21 16:29:43 +00001197 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001198 CHECK_VALID_SIZE(outputs.size(), 1);
1199
Derek Lamberti8ddae332019-02-21 16:29:43 +00001200 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001201 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001202 auto serializerDescriptor = serializerLayer->descriptor();
1203
1204 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001205 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001206 descriptor.m_A = serializerDescriptor->a();
1207 descriptor.m_B = serializerDescriptor->b();
1208
1209 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1210 layerName.c_str());
1211 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1212 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1213
Derek Lamberti8ddae332019-02-21 16:29:43 +00001214 RegisterInputSlots(graph, layerIndex, layer);
1215 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001216}
1217
Finn Williams85d36712021-01-26 22:30:06 +00001218void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001219{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001220 CHECK_LAYERS(graph, 0, layerIndex);
1221 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001222 CHECK_LOCATION();
1223 CHECK_VALID_SIZE(inputs.size(), 2);
1224
Derek Lamberti8ddae332019-02-21 16:29:43 +00001225 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001226 CHECK_VALID_SIZE(outputs.size(), 1);
1227
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001228 auto layerName = GetLayerName(graph, layerIndex);
1229 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001230
1231 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1232 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1233
Derek Lamberti8ddae332019-02-21 16:29:43 +00001234 RegisterInputSlots(graph, layerIndex, layer);
1235 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001236}
1237
Finn Williams85d36712021-01-26 22:30:06 +00001238void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001239{
1240 CHECK_LAYERS(graph, 0, layerIndex);
1241 auto inputs = GetInputs(graph, layerIndex);
1242 CHECK_LOCATION();
1243 CHECK_VALID_SIZE(inputs.size(), 1);
1244
1245 auto outputs = GetOutputs(graph, layerIndex);
1246 CHECK_VALID_SIZE(outputs.size(), 1);
1247
1248 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1249 auto serializerDescriptor = serializerLayer->descriptor();
1250
1251 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001252 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001253 descriptor.m_Axis = serializerDescriptor->axis();
1254 auto layerName = GetLayerName(graph, layerIndex);
1255 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1256
1257 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1258 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1259
1260 RegisterInputSlots(graph, layerIndex, layer);
1261 RegisterOutputSlots(graph, layerIndex, layer);
1262}
1263
Samuel Yapb9e6b5c2022-08-19 11:14:38 +01001264void IDeserializer::DeserializerImpl::ParseBatchMatMul(GraphPtr graph, unsigned int layerIndex)
1265{
1266 CHECK_LAYERS(graph, 0, layerIndex);
1267
1268 auto inputs = GetInputs(graph, layerIndex);
1269 CHECK_LOCATION();
1270 CHECK_VALID_SIZE(inputs.size(), 2);
1271
1272 auto outputs = GetOutputs(graph, layerIndex);
1273 CHECK_VALID_SIZE(outputs.size(), 1);
1274
1275 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchMatMulLayer();
1276 auto serializerDescriptor = serializerLayer->descriptor();
1277
1278 armnn::BatchMatMulDescriptor descriptor(serializerDescriptor->transposeX(),
1279 serializerDescriptor->transposeY(),
1280 serializerDescriptor->adjointX(),
1281 serializerDescriptor->adjointY(),
1282 ToDataLayout(serializerDescriptor->dataLayoutX()),
1283 ToDataLayout(serializerDescriptor->dataLayoutY()));
1284
1285 auto layerName = GetLayerName(graph, layerIndex);
1286 IConnectableLayer* layer = m_Network->AddBatchMatMulLayer(descriptor, layerName.c_str());
1287
1288 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1289 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1290
1291 RegisterInputSlots(graph, layerIndex, layer);
1292 RegisterOutputSlots(graph, layerIndex, layer);
1293}
1294
Finn Williams85d36712021-01-26 22:30:06 +00001295void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001296{
1297 CHECK_LAYERS(graph, 0, layerIndex);
1298
Finn Williams85d36712021-01-26 22:30:06 +00001299 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001300 CHECK_VALID_SIZE(inputs.size(), 1);
1301
Finn Williams85d36712021-01-26 22:30:06 +00001302 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001303 CHECK_VALID_SIZE(outputs.size(), 1);
1304
1305 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1306 auto flatBufferCrops = flatBufferDescriptor->crops();
1307 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1308
Mike Kelly51b8c312022-05-24 11:34:02 +01001309 if (flatBufferCrops->size() % 2 != 0)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001310 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001311 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001312 }
1313
1314 std::vector<std::pair<unsigned int, unsigned int>> crops;
Mike Kelly51b8c312022-05-24 11:34:02 +01001315 crops.reserve(flatBufferCrops->size() / 2);
1316 for (unsigned int i = 0; i < flatBufferCrops->size() - 1; i += 2)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001317 {
1318 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1319 }
1320
1321 armnn::BatchToSpaceNdDescriptor descriptor;
1322 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1323 descriptor.m_BlockShape =
1324 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1325 descriptor.m_Crops = crops;
1326
1327 auto layerName = GetLayerName(graph, layerIndex);
1328 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1329
1330 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1331 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1332
1333 RegisterInputSlots(graph, layerIndex, layer);
1334 RegisterOutputSlots(graph, layerIndex, layer);
1335}
1336
Finn Williams85d36712021-01-26 22:30:06 +00001337void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001338{
1339 CHECK_LAYERS(graph, 0, layerIndex);
1340
1341 auto inputs = GetInputs(graph, layerIndex);
1342 CHECK_VALID_SIZE(inputs.size(), 1);
1343
1344 auto outputs = GetOutputs(graph, layerIndex);
1345 CHECK_VALID_SIZE(outputs.size(), 1);
1346 auto outputInfo = ToTensorInfo(outputs[0]);
1347
ruoyan015c7ab052019-03-04 14:48:02 +00001348 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001349
1350 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1351 auto serializerDescriptor = serializerLayer->descriptor();
1352
1353 armnn::BatchNormalizationDescriptor descriptor;
1354 descriptor.m_Eps = serializerDescriptor->eps();
1355 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1356
1357 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1358 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1359 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1360 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1361
1362 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1363 mean,
1364 variance,
1365 beta,
1366 gamma,
1367 layerName.c_str());
1368 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1369
1370 RegisterInputSlots(graph, layerIndex, layer);
1371 RegisterOutputSlots(graph, layerIndex, layer);
1372}
1373
mathad01b392e982021-04-07 12:07:30 +01001374void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1375{
1376 CHECK_LAYERS(graph, 0, layerIndex);
1377 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1378 CHECK_LOCATION();
1379 CHECK_VALID_SIZE(inputs.size(), 1);
1380
1381 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1382 CHECK_VALID_SIZE(outputs.size(), 1);
1383
1384 auto layerName = GetLayerName(graph, layerIndex);
1385
1386 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1387
1388 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1389 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1390
1391 RegisterInputSlots(graph, layerIndex, layer);
1392 RegisterOutputSlots(graph, layerIndex, layer);
1393}
1394
Finn Williams85d36712021-01-26 22:30:06 +00001395void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001396{
1397 CHECK_LAYERS(graph, 0, layerIndex);
1398 CHECK_LOCATION();
1399
1400 auto outputs = GetOutputs(graph, layerIndex);
1401 CHECK_VALID_SIZE(outputs.size(), 1);
1402
1403 auto layerName = GetLayerName(graph, layerIndex);
1404
1405 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1406 auto serializerInput = serializerLayer->input();
1407
1408 armnn::ConstTensor input = ToConstTensor(serializerInput);
Cathal Corbett06902652022-04-14 17:55:11 +01001409 IConnectableLayer* layer;
Conor Kennedy76277882019-02-26 08:29:54 +00001410
Cathal Corbett06902652022-04-14 17:55:11 +01001411 // Required for when Constant Layer is used as an inputs to DepthwiseConvolution2d Layer.
1412 // Running a model that was created before weights layout scheme version was added to our flatbuffers
1413 // file ensuring older models can still be read and executed. featureVersion weights layout scheme 1
1414 // indicates a change in the depthwise weights layout within ArmNN from [M,I,H,W] --> [1,H,W,I*M]
1415 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1416 {
1417 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1418 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1419 PermutationVector permutationVector = { 3, 2, 0, 1 };
1420 armnn::TensorInfo weightsInfo = input.GetInfo();
1421 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1422 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1423 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1424 input.GetMemoryArea(), permuteBuffer.get(),
1425 GetDataTypeSize(weightsInfo.GetDataType()));
Conor Kennedy76277882019-02-26 08:29:54 +00001426
Cathal Corbett06902652022-04-14 17:55:11 +01001427 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1428 auto weightsShape = weightsInfo.GetShape();
1429 weightsInfo.SetShape({1,
1430 weightsShape[0],
1431 weightsShape[1],
1432 weightsShape[2]*weightsShape[3]});
Sadik Armagan2f2e0be2022-08-02 09:17:23 +01001433 weightsInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001434
1435 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1436
1437 layer = m_Network->AddConstantLayer(weightsPermuted, layerName.c_str());
1438
1439 layer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1440
1441 RegisterOutputSlots(graph, layerIndex, layer);
1442
1443 return;
1444 }
1445 else
1446 {
1447 layer = m_Network->AddConstantLayer(input, layerName.c_str());
1448
1449 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Sadik Armagan2f2e0be2022-08-02 09:17:23 +01001450 outputTensorInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001451 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1452 }
Conor Kennedy76277882019-02-26 08:29:54 +00001453
1454 RegisterOutputSlots(graph, layerIndex, layer);
1455}
1456
Finn Williams85d36712021-01-26 22:30:06 +00001457void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001458{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001459 CHECK_LAYERS(graph, 0, layerIndex);
1460 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001461 CHECK_LOCATION();
Mike Kellya0766c32019-02-19 17:22:07 +00001462
Derek Lamberti8ddae332019-02-21 16:29:43 +00001463 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001464 CHECK_VALID_SIZE(outputs.size(), 1);
1465
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001466 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1467
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001468 auto layerName = GetLayerName(graph, layerIndex);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001469 auto flatbufferDescriptor = flatBufferLayer->descriptor();
Mike Kellya0766c32019-02-19 17:22:07 +00001470
1471 armnn::Convolution2dDescriptor descriptor;
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001472 descriptor.m_PadLeft = flatbufferDescriptor->padLeft();
1473 descriptor.m_PadRight = flatbufferDescriptor->padRight();
1474 descriptor.m_PadTop = flatbufferDescriptor->padTop();
1475 descriptor.m_PadBottom = flatbufferDescriptor->padBottom();
1476 descriptor.m_StrideX = flatbufferDescriptor->strideX();
1477 descriptor.m_StrideY = flatbufferDescriptor->strideY();;
1478 descriptor.m_DilationX = flatbufferDescriptor->dilationX();
1479 descriptor.m_DilationY = flatbufferDescriptor->dilationY();;
1480 descriptor.m_BiasEnabled = flatbufferDescriptor->biasEnabled();;
1481 descriptor.m_DataLayout = ToDataLayout(flatbufferDescriptor->dataLayout());
Mike Kellya0766c32019-02-19 17:22:07 +00001482
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001483 armnn::IConnectableLayer* layer;
1484 std::vector<unsigned int> ignoreSlots {};
Mike Kellya0766c32019-02-19 17:22:07 +00001485
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001486 armnn::ConstTensor biasTensor;
1487 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1488 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1489 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Mike Kellya0766c32019-02-19 17:22:07 +00001490 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001491 // If the model stores weights and biases as members of the layer we have to read them from there
1492 // but add them to their own ConstantLayer for compatibility
1493 CHECK_VALID_SIZE(inputs.size(), 1);
1494
1495 layer = m_Network->AddConvolution2dLayer(descriptor,
1496 layerName.c_str());
1497
1498 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1499 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1500 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1501 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
1502 ignoreSlots.emplace_back(1u);
1503
1504 if (descriptor.m_BiasEnabled)
1505 {
1506 biasTensor = ToConstTensor(flatBufferLayer->biases());
1507 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1508 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1509 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
1510 ignoreSlots.emplace_back(2u);
1511 }
Mike Kellya0766c32019-02-19 17:22:07 +00001512 }
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001513 else
1514 {
1515 layer = m_Network->AddConvolution2dLayer(descriptor,
1516 layerName.c_str());
1517 uint32_t numInputs = descriptor.GetNumInputs();
1518 CHECK_VALID_SIZE(inputs.size(), numInputs);
1519 }
1520
Mike Kellya0766c32019-02-19 17:22:07 +00001521 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1522 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1523
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001524 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001525 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001526}
1527
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001528void IDeserializer::DeserializerImpl::ParseConvolution3d(GraphPtr graph, unsigned int layerIndex)
1529{
1530 CHECK_LAYERS(graph, 0, layerIndex);
1531 auto inputs = GetInputs(graph, layerIndex);
1532 CHECK_LOCATION();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001533
1534 auto outputs = GetOutputs(graph, layerIndex);
1535 CHECK_VALID_SIZE(outputs.size(), 1);
1536
1537 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1538 auto layerName = GetLayerName(graph, layerIndex);
1539 auto serializerDescriptor = serializerLayer->descriptor();
1540
1541 armnn::Convolution3dDescriptor descriptor;
1542 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1543 descriptor.m_PadRight = serializerDescriptor->padRight();
1544 descriptor.m_PadTop = serializerDescriptor->padTop();
1545 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1546 descriptor.m_PadFront = serializerDescriptor->padFront();
1547 descriptor.m_PadBack = serializerDescriptor->padBack();
1548 descriptor.m_StrideX = serializerDescriptor->strideX();
1549 descriptor.m_StrideY = serializerDescriptor->strideY();
1550 descriptor.m_StrideZ = serializerDescriptor->strideZ();
1551 descriptor.m_DilationX = serializerDescriptor->dilationX();
1552 descriptor.m_DilationY = serializerDescriptor->dilationY();
1553 descriptor.m_DilationZ = serializerDescriptor->dilationZ();
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001554 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001555 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1556
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001557 uint32_t numInputs = descriptor.GetNumInputs();
1558 CHECK_VALID_SIZE(inputs.size(), numInputs);
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001559
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001560 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1561
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001562 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1563 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1564
1565 RegisterInputSlots(graph, layerIndex, layer);
1566 RegisterOutputSlots(graph, layerIndex, layer);
1567}
1568
Finn Williams85d36712021-01-26 22:30:06 +00001569void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001570{
1571 CHECK_LAYERS(graph, 0, layerIndex);
1572
1573 auto inputs = GetInputs(graph, layerIndex);
1574 CHECK_VALID_SIZE(inputs.size(), 1);
1575
1576 auto outputs = GetOutputs(graph, layerIndex);
1577 CHECK_VALID_SIZE(outputs.size(), 1);
1578
1579 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1580
1581 armnn::DepthToSpaceDescriptor descriptor;
1582 descriptor.m_BlockSize = fbDescriptor->blockSize();
1583 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1584
1585 auto layerName = GetLayerName(graph, layerIndex);
1586 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1587
1588 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1589 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1590
1591 RegisterInputSlots(graph, layerIndex, layer);
1592 RegisterOutputSlots(graph, layerIndex, layer);
1593}
1594
Finn Williams85d36712021-01-26 22:30:06 +00001595void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001596{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001597 CHECK_LAYERS(graph, 0, layerIndex);
1598 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001599 CHECK_LOCATION();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001600
Derek Lamberti8ddae332019-02-21 16:29:43 +00001601 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001602 CHECK_VALID_SIZE(outputs.size(), 1);
1603
Derek Lamberti8ddae332019-02-21 16:29:43 +00001604 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001605 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001606 auto serializerDescriptor = serializerLayer->descriptor();
1607
1608 armnn::DepthwiseConvolution2dDescriptor descriptor;
Cathal Corbett06902652022-04-14 17:55:11 +01001609 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1610 descriptor.m_PadRight = serializerDescriptor->padRight();
1611 descriptor.m_PadTop = serializerDescriptor->padTop();
1612 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1613 descriptor.m_StrideX = serializerDescriptor->strideX();
1614 descriptor.m_StrideY = serializerDescriptor->strideY();
1615 descriptor.m_DilationX = serializerDescriptor->dilationX();
1616 descriptor.m_DilationY = serializerDescriptor->dilationY();
1617 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
1618 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001619
Jan Eilers53ef7952021-06-02 12:01:25 +01001620 IConnectableLayer* layer;
Cathal Corbett06902652022-04-14 17:55:11 +01001621 std::vector<unsigned int> ignoreSlots {};
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001622
Cathal Corbett06902652022-04-14 17:55:11 +01001623 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1624 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1625 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001626 {
Cathal Corbett06902652022-04-14 17:55:11 +01001627 CHECK_VALID_SIZE(inputs.size(), 1);
Jan Eilers53ef7952021-06-02 12:01:25 +01001628
Cathal Corbett06902652022-04-14 17:55:11 +01001629 // If the model stores weights and biases as members of the layer we have to read them from there
1630 // but add them to their own ConstantLayer for compatibility
1631 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1632 ignoreSlots.emplace_back(1u);
Jan Eilers53ef7952021-06-02 12:01:25 +01001633
1634 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001635 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001636
1637 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1638 if (descriptor.m_BiasEnabled)
1639 {
1640 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
1641 ignoreSlots.emplace_back(2u);
1642
1643 auto biasLayer = m_Network->AddConstantLayer(biases);
1644 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1645 biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
1646 }
1647
1648 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1649 {
1650 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1651 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1652 PermutationVector permutationVector = { 3, 2, 0, 1 };
1653 armnn::TensorInfo weightsInfo = weights.GetInfo();
1654 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1655 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1656 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1657 weights.GetMemoryArea(), permuteBuffer.get(),
1658 GetDataTypeSize(weightsInfo.GetDataType()));
1659
1660 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1661 auto weightsShape = weightsInfo.GetShape();
1662 weightsInfo.SetShape({1,
1663 weightsShape[0],
1664 weightsShape[1],
1665 weightsShape[2]*weightsShape[3]});
1666
1667 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1668
1669 auto weightsLayer = m_Network->AddConstantLayer(weightsPermuted);
1670 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1671 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1672 }
1673 else
1674 {
1675 auto weightsLayer = m_Network->AddConstantLayer(weights);
1676 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1677 weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
1678 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001679 }
1680 else
1681 {
1682 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001683 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001684 uint32_t numInputs = descriptor.GetNumInputs();
1685 CHECK_VALID_SIZE(inputs.size(), numInputs);
Jan Eilers53ef7952021-06-02 12:01:25 +01001686 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001687
1688 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1689 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1690
Cathal Corbett06902652022-04-14 17:55:11 +01001691 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001692 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001693}
1694
Finn Williams85d36712021-01-26 22:30:06 +00001695void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001696{
1697 CHECK_LAYERS(graph, 0, layerIndex);
1698 auto inputs = GetInputs(graph, layerIndex);
1699 CHECK_LOCATION();
1700 CHECK_VALID_SIZE(inputs.size(), 2);
1701
1702 auto outputs = GetOutputs(graph, layerIndex);
1703 CHECK_VALID_SIZE(outputs.size(), 4);
1704
1705 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1706 auto layerName = GetLayerName(graph, layerIndex);
1707 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1708
1709 armnn::DetectionPostProcessDescriptor descriptor;
1710 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1711 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1712 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1713 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1714 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1715 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1716 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1717 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1718 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1719 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1720 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1721
1722 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1723
1724 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1725 anchors,
1726 layerName.c_str());
1727
1728 for (unsigned int i = 0; i < 4; i++)
1729 {
1730 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1731 }
1732
1733 RegisterInputSlots(graph, layerIndex, layer);
1734 RegisterOutputSlots(graph, layerIndex, layer);
1735}
1736
Finn Williams85d36712021-01-26 22:30:06 +00001737void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001738{
1739 CHECK_LAYERS(graph, 0, layerIndex);
1740 auto inputs = GetInputs(graph, layerIndex);
1741 CHECK_LOCATION();
1742 CHECK_VALID_SIZE(inputs.size(), 2);
1743
1744 auto outputs = GetOutputs(graph, layerIndex);
1745 CHECK_VALID_SIZE(outputs.size(), 1);
1746
1747 auto layerName = GetLayerName(graph, layerIndex);
1748 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1749
1750 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1751 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1752
1753 RegisterInputSlots(graph, layerIndex, layer);
1754 RegisterOutputSlots(graph, layerIndex, layer);
1755}
1756
Finn Williams85d36712021-01-26 22:30:06 +00001757void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001758{
1759 CHECK_LAYERS(graph, 0, layerIndex);
1760 auto inputs = GetInputs(graph, layerIndex);
1761 CHECK_LOCATION();
1762 CHECK_VALID_SIZE(inputs.size(), 2);
1763
1764 auto outputs = GetOutputs(graph, layerIndex);
1765 CHECK_VALID_SIZE(outputs.size(), 1);
1766
1767 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001768 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1769 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001770
1771 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1772 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1773
1774 RegisterInputSlots(graph, layerIndex, layer);
1775 RegisterOutputSlots(graph, layerIndex, layer);
1776}
1777
Finn Williams85d36712021-01-26 22:30:06 +00001778void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001779{
1780 CHECK_LAYERS(graph, 0, layerIndex);
1781 auto inputs = GetInputs(graph, layerIndex);
1782 CHECK_LOCATION();
1783 CHECK_VALID_SIZE(inputs.size(), 1);
1784
1785 auto outputs = GetOutputs(graph, layerIndex);
1786 CHECK_VALID_SIZE(outputs.size(), 1);
1787
1788 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armaganfd0cae32021-11-08 17:18:31 +00001789 armnn::FillDescriptor descriptor;
1790 descriptor.m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
Keith Davis300ad562020-06-04 16:34:23 +01001791 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1792
1793 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1794 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1795
1796 RegisterInputSlots(graph, layerIndex, layer);
1797 RegisterOutputSlots(graph, layerIndex, layer);
1798}
1799
Finn Williams85d36712021-01-26 22:30:06 +00001800void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001801{
1802 CHECK_LAYERS(graph, 0, layerIndex);
1803 auto inputs = GetInputs(graph, layerIndex);
1804 CHECK_LOCATION();
1805 CHECK_VALID_SIZE(inputs.size(), 2);
1806
1807 auto outputs = GetOutputs(graph, layerIndex);
1808 CHECK_VALID_SIZE(outputs.size(), 1);
1809
1810 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001811 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1812 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001813
1814 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1815 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1816
1817 RegisterInputSlots(graph, layerIndex, layer);
1818 RegisterOutputSlots(graph, layerIndex, layer);
1819}
1820
Finn Williams85d36712021-01-26 22:30:06 +00001821void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001822{
1823 CHECK_LAYERS(graph, 0, layerIndex);
1824
1825 auto inputs = GetInputs(graph, layerIndex);
1826 CHECK_VALID_SIZE(inputs.size(), 1);
1827
1828 auto outputs = GetOutputs(graph, layerIndex);
1829 CHECK_VALID_SIZE(outputs.size(), 1);
1830
1831 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1832 auto fbDescriptor = fbLayer->descriptor();
1833
1834 armnn::InstanceNormalizationDescriptor descriptor;
1835 descriptor.m_Gamma = fbDescriptor->gamma();
1836 descriptor.m_Beta = fbDescriptor->beta();
1837 descriptor.m_Eps = fbDescriptor->eps();
1838 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1839
1840 const std::string layerName = GetLayerName(graph, layerIndex);
1841 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1842
1843 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1844 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1845
1846 RegisterInputSlots(graph, layerIndex, layer);
1847 RegisterOutputSlots(graph, layerIndex, layer);
1848}
1849
Finn Williams85d36712021-01-26 22:30:06 +00001850void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001851{
1852 CHECK_LAYERS(graph, 0, layerIndex);
1853
1854 auto inputs = GetInputs(graph, layerIndex);
1855 CHECK_VALID_SIZE(inputs.size(), 1);
1856
1857 auto outputs = GetOutputs(graph, layerIndex);
1858 CHECK_VALID_SIZE(outputs.size(), 1);
1859 auto outputInfo = ToTensorInfo(outputs[0]);
1860
1861 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1862 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1863
1864 auto layerName = GetLayerName(graph, layerIndex);
1865 armnn::L2NormalizationDescriptor descriptor;
1866 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001867 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001868
1869 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1870 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1871
1872 RegisterInputSlots(graph, layerIndex, layer);
1873 RegisterOutputSlots(graph, layerIndex, layer);
1874}
1875
Finn Williams85d36712021-01-26 22:30:06 +00001876void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001877{
1878 CHECK_LAYERS(graph, 0, layerIndex);
1879 CHECK_LOCATION();
1880
1881 auto inputs = GetInputs(graph, layerIndex);
1882 CHECK_VALID_SIZE(inputs.size(), 2);
1883
1884 auto outputs = GetOutputs(graph, layerIndex);
1885 CHECK_VALID_SIZE(outputs.size(), 1);
1886
1887 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1888 auto fbDescriptor = fbLayer->descriptor();
1889
1890 armnn::LogicalBinaryDescriptor descriptor;
1891 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1892
1893 const std::string& layerName = GetLayerName(graph, layerIndex);
1894 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1895
1896 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1897 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1898
1899 RegisterInputSlots(graph, layerIndex, layer);
1900 RegisterOutputSlots(graph, layerIndex, layer);
1901}
1902
Finn Williams85d36712021-01-26 22:30:06 +00001903void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001904{
1905 CHECK_LAYERS(graph, 0, layerIndex);
1906
Finn Williams85d36712021-01-26 22:30:06 +00001907 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001908 CHECK_VALID_SIZE(inputs.size(), 1);
1909
Finn Williams85d36712021-01-26 22:30:06 +00001910 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001911 CHECK_VALID_SIZE(outputs.size(), 1);
1912
1913 armnn::LogSoftmaxDescriptor descriptor;
1914 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1915 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1916 auto layerName = GetLayerName(graph, layerIndex);
1917
1918 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1919
1920 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1921 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1922
1923 RegisterInputSlots(graph, layerIndex, layer);
1924 RegisterOutputSlots(graph, layerIndex, layer);
1925}
1926
Finn Williams85d36712021-01-26 22:30:06 +00001927void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001928{
1929 CHECK_LAYERS(graph, 0, layerIndex);
1930 auto inputs = GetInputs(graph, layerIndex);
1931 CHECK_LOCATION();
1932 CHECK_VALID_SIZE(inputs.size(), 2);
1933
1934 auto outputs = GetOutputs(graph, layerIndex);
1935 CHECK_VALID_SIZE(outputs.size(), 1);
1936
1937 auto layerName = GetLayerName(graph, layerIndex);
1938 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1939
1940 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1941 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1942
1943 RegisterInputSlots(graph, layerIndex, layer);
1944 RegisterOutputSlots(graph, layerIndex, layer);
1945}
1946
Finn Williams85d36712021-01-26 22:30:06 +00001947void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001948{
1949 CHECK_LAYERS(graph, 0, layerIndex);
1950 auto inputs = GetInputs(graph, layerIndex);
1951 CHECK_LOCATION();
1952 CHECK_VALID_SIZE(inputs.size(), 2);
1953
1954 auto outputs = GetOutputs(graph, layerIndex);
1955 CHECK_VALID_SIZE(outputs.size(), 1);
1956
1957 auto layerName = GetLayerName(graph, layerIndex);
1958 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1959
1960 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1961 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1962
1963 RegisterInputSlots(graph, layerIndex, layer);
1964 RegisterOutputSlots(graph, layerIndex, layer);
1965}
1966
Jim Flynne242f2d2019-05-22 14:24:13 +01001967const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1968 unsigned int layerIndex)
1969{
1970 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1971
1972 switch (layerType)
1973 {
1974 case Layer::Layer_ConcatLayer:
1975 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1976 case Layer::Layer_MergerLayer:
1977 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1978 default:
1979 throw armnn::Exception("unknown layer type, should be concat or merger");
1980 }
1981}
Simon Obute51f67772021-09-03 15:50:13 +01001982void IDeserializer::DeserializerImpl::ParseChannelShuffle(GraphPtr graph, unsigned int layerIndex)
1983{
1984 CHECK_LAYERS(graph, 0, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001985
Simon Obute51f67772021-09-03 15:50:13 +01001986 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1987 CHECK_VALID_SIZE(inputs.size(), 1);
1988
1989 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1990 CHECK_VALID_SIZE(outputs.size(), 1);
1991
1992 armnn::ChannelShuffleDescriptor descriptor;
1993 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
1994 descriptor.m_NumGroups =
1995 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
1996
1997 auto layerName = GetLayerName(graph, layerIndex);
1998 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
1999
2000 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2001 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2002
2003 RegisterInputSlots(graph, layerIndex, layer);
2004 RegisterOutputSlots(graph, layerIndex, layer);
2005}
Finn Williams85d36712021-01-26 22:30:06 +00002006void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01002007{
2008 CHECK_LAYERS(graph, 0, layerIndex);
2009 CHECK_LOCATION();
2010
2011 auto inputs = GetInputs(graph, layerIndex);
2012 CHECK_VALID_SIZE(inputs.size(), 2);
2013
2014 auto outputs = GetOutputs(graph, layerIndex);
2015 CHECK_VALID_SIZE(outputs.size(), 1);
2016
2017 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
2018 auto fbDescriptor = fbLayer->descriptor();
2019
2020 armnn::ComparisonDescriptor descriptor;
2021 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
2022
2023 const std::string& layerName = GetLayerName(graph, layerIndex);
2024 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
2025
2026 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2027 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2028
2029 RegisterInputSlots(graph, layerIndex, layer);
2030 RegisterOutputSlots(graph, layerIndex, layer);
2031}
2032
Finn Williams85d36712021-01-26 22:30:06 +00002033void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06002034{
2035 CHECK_LAYERS(graph, 0, layerIndex);
2036 CHECK_LOCATION();
2037
2038 auto inputs = GetInputs(graph, layerIndex);
2039 CHECK_VALID_SIZE(inputs.size(), 1);
2040
2041 auto outputs = GetOutputs(graph, layerIndex);
2042 CHECK_VALID_SIZE(outputs.size(), 1);
2043
2044 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
2045 auto fbDescriptor = fbLayer->descriptor();
2046
2047 armnn::ElementwiseUnaryDescriptor descriptor;
2048 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
2049
2050 const std::string& layerName = GetLayerName(graph, layerIndex);
2051 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2052
2053 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2054 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2055
2056 RegisterInputSlots(graph, layerIndex, layer);
2057 RegisterOutputSlots(graph, layerIndex, layer);
2058}
2059
Finn Williams85d36712021-01-26 22:30:06 +00002060void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00002061{
2062 CHECK_LAYERS(graph, 0, layerIndex);
2063 CHECK_LOCATION();
2064
2065 auto outputs = GetOutputs(graph, layerIndex);
2066 CHECK_VALID_SIZE(outputs.size(), 1);
2067
Jim Flynnac25a1b2019-02-28 10:40:49 +00002068 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002069 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
2070 unsigned int numViews = originsDescriptor->numViews();
2071 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002072
2073 // can now check the number of inputs == number of views
2074 auto inputs = GetInputs(graph, layerIndex);
2075 CHECK_VALID_SIZE(inputs.size(), numViews);
2076
2077 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01002078 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002079 for (unsigned int v = 0; v < numViews; ++v)
2080 {
2081 auto originPtr = originsPtr->Get(v);
2082 for (unsigned int d = 0; d < numDimensions; ++d)
2083 {
2084 uint32_t value = originPtr->data()->Get(d);
2085 descriptor.SetViewOriginCoord(v, d, value);
2086 }
2087 }
Jim Flynne242f2d2019-05-22 14:24:13 +01002088 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002089
Jim Flynn906f9462019-05-10 13:55:21 +01002090 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002091 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2092 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2093
2094 RegisterInputSlots(graph, layerIndex, layer);
2095 RegisterOutputSlots(graph, layerIndex, layer);
2096}
2097
Finn Williams85d36712021-01-26 22:30:06 +00002098void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00002099{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002100 CHECK_LAYERS(graph, 0, layerIndex);
2101 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002102 CHECK_LOCATION();
2103 CHECK_VALID_SIZE(inputs.size(), 2);
2104
Derek Lamberti8ddae332019-02-21 16:29:43 +00002105 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002106 CHECK_VALID_SIZE(outputs.size(), 1);
2107
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002108 auto layerName = GetLayerName(graph, layerIndex);
2109 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00002110
2111 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2112 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2113
Derek Lamberti8ddae332019-02-21 16:29:43 +00002114 RegisterInputSlots(graph, layerIndex, layer);
2115 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00002116}
2117
Finn Williams85d36712021-01-26 22:30:06 +00002118void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002119{
2120 CHECK_LAYERS(graph, 0, layerIndex);
2121 CHECK_LOCATION();
2122
2123 auto inputs = GetInputs(graph, layerIndex);
2124 CHECK_VALID_SIZE(inputs.size(), 1);
2125
2126 auto outputs = GetOutputs(graph, layerIndex);
2127 CHECK_VALID_SIZE(outputs.size(), 1);
2128
2129 auto layerName = GetLayerName(graph, layerIndex);
2130
2131 armnn::IConnectableLayer* layer;
2132
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00002133 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002134
2135 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2136 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2137
2138 RegisterInputSlots(graph, layerIndex, layer);
2139 RegisterOutputSlots(graph, layerIndex, layer);
2140}
2141
Finn Williams85d36712021-01-26 22:30:06 +00002142void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002143{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002144 CHECK_LAYERS(graph, 0, layerIndex);
2145 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002146 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002147
Derek Lamberti8ddae332019-02-21 16:29:43 +00002148 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002149 CHECK_VALID_SIZE(outputs.size(), 1);
2150
Derek Lamberti8ddae332019-02-21 16:29:43 +00002151 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002152 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002153 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2154
2155 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
2156 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2157 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002158 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01002159
2160 armnn::IConnectableLayer* layer;
2161 std::vector<unsigned int> ignoreSlots {};
2162
2163 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
2164 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
2165 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002166 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002167 // If the model stores weights and biases as members of the layer we have to read them from there
2168 // but add them to their own ConstantLayer for compatibility
2169 CHECK_VALID_SIZE(inputs.size(), 1);
2170 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2171 layerName.c_str());
2172
2173 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
2174 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2175 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2176 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
2177 ignoreSlots.emplace_back(1u);
2178
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002179 if (fullyConnectedDescriptor.m_BiasEnabled)
2180 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002181 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
2182 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2183 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2184 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
2185 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002186 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002187 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01002188 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002189 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002190 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2191 layerName.c_str());
2192 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
2193 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002194 }
2195
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002196 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2197 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2198
Matthew Sloyan81beae32021-07-13 19:46:11 +01002199 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00002200 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002201}
2202
Finn Williams85d36712021-01-26 22:30:06 +00002203void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002204{
2205 CHECK_LAYERS(graph, 0, layerIndex);
2206
Finn Williams85d36712021-01-26 22:30:06 +00002207 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002208 CHECK_VALID_SIZE(inputs.size(), 1);
2209
Finn Williams85d36712021-01-26 22:30:06 +00002210 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002211 CHECK_VALID_SIZE(outputs.size(), 1);
2212
2213 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2214 auto flatBufferPadList = flatBufferDescriptor->padList();
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002215 auto paddingMode = flatBufferDescriptor->paddingMode();
David Monahan34757812019-06-19 11:47:21 +01002216 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002217
Mike Kelly51b8c312022-05-24 11:34:02 +01002218 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002219 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002220 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2221 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002222 }
2223
2224 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002225 padList.reserve(flatBufferPadList->size() / 2);
2226 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002227 {
2228 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2229 }
2230
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002231 armnn::PadDescriptor descriptor(padList, padValue, ToPaddingMode(paddingMode));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002232
2233 auto layerName = GetLayerName(graph, layerIndex);
2234 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2235
2236 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2237 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2238
2239 RegisterInputSlots(graph, layerIndex, layer);
2240 RegisterOutputSlots(graph, layerIndex, layer);
2241}
2242
Finn Williams85d36712021-01-26 22:30:06 +00002243void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002244{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002245 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002246
2247 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002248 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002249
Derek Lamberti8ddae332019-02-21 16:29:43 +00002250 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002251 CHECK_VALID_SIZE(inputs.size(), 1);
2252
Derek Lamberti8ddae332019-02-21 16:29:43 +00002253 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002254 CHECK_VALID_SIZE(outputs.size(), 1);
2255 auto outputInfo = ToTensorInfo(outputs[0]);
2256
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002257 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01002258 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002259
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002260 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002261 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2262
Derek Lamberti8ddae332019-02-21 16:29:43 +00002263 RegisterInputSlots(graph, layerIndex, layer);
2264 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002265}
2266
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002267armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002268 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002269{
Jan Eilers8eb25602020-03-09 12:13:48 +00002270 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002271 armnn::Pooling2dDescriptor desc;
2272
2273 switch (pooling2dDesc->poolType())
2274 {
2275 case PoolingAlgorithm_Average:
2276 {
2277 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002278 break;
2279 }
2280 case PoolingAlgorithm_Max:
2281 {
2282 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002283 break;
2284 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002285 case PoolingAlgorithm_L2:
2286 {
2287 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2288 break;
2289 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002290 default:
2291 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002292 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002293 }
2294 }
2295
2296 switch (pooling2dDesc->outputShapeRounding())
2297 {
2298 case OutputShapeRounding_Floor:
2299 {
2300 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2301 break;
2302 }
2303 case OutputShapeRounding_Ceiling:
2304 {
2305 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2306 break;
2307 }
2308 default:
2309 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002310 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002311 }
2312 }
2313
2314 switch (pooling2dDesc->paddingMethod())
2315 {
2316 case PaddingMethod_Exclude:
2317 {
2318 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2319 break;
2320 }
2321 case PaddingMethod_IgnoreValue:
2322 {
2323 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2324 break;
2325 }
2326 default:
2327 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002328 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002329 }
2330 }
2331
2332 switch (pooling2dDesc->dataLayout())
2333 {
2334 case DataLayout_NCHW:
2335 {
2336 desc.m_DataLayout = armnn::DataLayout::NCHW;
2337 break;
2338 }
2339 case DataLayout_NHWC:
2340 {
2341 desc.m_DataLayout = armnn::DataLayout::NHWC;
2342 break;
2343 }
2344 default:
2345 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002346 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002347 }
2348 }
2349
2350 desc.m_PadRight = pooling2dDesc->padRight();
2351 desc.m_PadLeft = pooling2dDesc->padLeft();
2352 desc.m_PadBottom = pooling2dDesc->padBottom();
2353 desc.m_PadTop = pooling2dDesc->padTop();
2354 desc.m_StrideX = pooling2dDesc->strideX();
2355 desc.m_StrideY = pooling2dDesc->strideY();
2356 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2357 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2358
2359 return desc;
2360}
2361
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002362armnn::Pooling3dDescriptor IDeserializer::DeserializerImpl::GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDesc,
2363 unsigned int layerIndex)
2364{
2365 IgnoreUnused(layerIndex);
2366 armnn::Pooling3dDescriptor desc;
Finn Williams85d36712021-01-26 22:30:06 +00002367
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002368 switch (pooling3dDesc->poolType())
2369 {
2370 case PoolingAlgorithm_Average:
2371 {
2372 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2373 break;
2374 }
2375 case PoolingAlgorithm_Max:
2376 {
2377 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
2378 break;
2379 }
2380 case PoolingAlgorithm_L2:
2381 {
2382 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2383 break;
2384 }
2385 default:
2386 {
2387 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
2388 }
2389 }
2390
2391 switch (pooling3dDesc->outputShapeRounding())
2392 {
2393 case OutputShapeRounding_Floor:
2394 {
2395 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2396 break;
2397 }
2398 case OutputShapeRounding_Ceiling:
2399 {
2400 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2401 break;
2402 }
2403 default:
2404 {
2405 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
2406 }
2407 }
2408
2409 switch (pooling3dDesc->paddingMethod())
2410 {
2411 case PaddingMethod_Exclude:
2412 {
2413 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2414 break;
2415 }
2416 case PaddingMethod_IgnoreValue:
2417 {
2418 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2419 break;
2420 }
2421 default:
2422 {
2423 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
2424 }
2425 }
2426
2427 switch (pooling3dDesc->dataLayout())
2428 {
2429 case DataLayout_NCDHW:
2430 {
2431 desc.m_DataLayout = armnn::DataLayout::NCDHW;
2432 break;
2433 }
2434 case DataLayout_NDHWC:
2435 {
2436 desc.m_DataLayout = armnn::DataLayout::NDHWC;
2437 break;
2438 }
2439 default:
2440 {
2441 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
2442 }
2443 }
2444
2445 desc.m_PadRight = pooling3dDesc->padRight();
2446 desc.m_PadLeft = pooling3dDesc->padLeft();
2447 desc.m_PadBottom = pooling3dDesc->padBottom();
2448 desc.m_PadTop = pooling3dDesc->padTop();
2449 desc.m_PadFront = pooling3dDesc->padFront();
2450 desc.m_PadBack = pooling3dDesc->padBack();
2451 desc.m_StrideX = pooling3dDesc->strideX();
2452 desc.m_StrideY = pooling3dDesc->strideY();
2453 desc.m_StrideZ = pooling3dDesc->strideZ();
2454 desc.m_PoolWidth = pooling3dDesc->poolWidth();
2455 desc.m_PoolHeight = pooling3dDesc->poolHeight();
2456 desc.m_PoolDepth = pooling3dDesc->poolDepth();
2457
2458 return desc;
2459}
Finn Williams85d36712021-01-26 22:30:06 +00002460
2461void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002462{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002463 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002464
Derek Lamberti8ddae332019-02-21 16:29:43 +00002465 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002466 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002467 CHECK_VALID_SIZE(inputs.size(), 1);
2468
Derek Lamberti8ddae332019-02-21 16:29:43 +00002469 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002470 CHECK_VALID_SIZE(outputs.size(), 1);
2471 auto outputInfo = ToTensorInfo(outputs[0]);
2472
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002473 auto pooling2dDescriptor = GetPooling2dDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002474 auto layerName = GetLayerName(graph, layerIndex);
2475 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002476 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2477
Derek Lamberti8ddae332019-02-21 16:29:43 +00002478 RegisterInputSlots(graph, layerIndex, layer);
2479 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002480}
2481
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002482void IDeserializer::DeserializerImpl::ParsePooling3d(GraphPtr graph, unsigned int layerIndex)
2483{
2484 CHECK_LAYERS(graph, 0, layerIndex);
2485
2486 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2487 auto inputs = GetInputs(graph, layerIndex);
2488 CHECK_VALID_SIZE(inputs.size(), 1);
2489
2490 auto outputs = GetOutputs(graph, layerIndex);
2491 CHECK_VALID_SIZE(outputs.size(), 1);
2492 auto outputInfo = ToTensorInfo(outputs[0]);
2493
2494 auto pooling3dDescriptor = GetPooling3dDescriptor(pooling3dDes, layerIndex);
2495 auto layerName = GetLayerName(graph, layerIndex);
2496 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2497 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2498
2499 RegisterInputSlots(graph, layerIndex, layer);
2500 RegisterOutputSlots(graph, layerIndex, layer);
2501}
2502
Finn Williams85d36712021-01-26 22:30:06 +00002503void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002504{
2505 CHECK_LAYERS(graph, 0, layerIndex);
2506
2507 auto inputs = GetInputs(graph, layerIndex);
2508 CHECK_VALID_SIZE(inputs.size(), 1);
2509
2510 auto outputs = GetOutputs(graph, layerIndex);
2511 CHECK_VALID_SIZE(outputs.size(), 1);
2512 auto outputInfo = ToTensorInfo(outputs[0]);
2513
2514 auto layerName = GetLayerName(graph, layerIndex);
2515 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2516 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2517
2518 RegisterInputSlots(graph, layerIndex, layer);
2519 RegisterOutputSlots(graph, layerIndex, layer);
2520}
2521
Finn Williams85d36712021-01-26 22:30:06 +00002522armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002523 const std::vector<uint32_t>& targetDimsIn)
2524{
2525 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2526 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2527
2528 if (stretchDim != targetDimsIn.end())
2529 {
2530 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2531 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002532 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2533 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002534 }
2535
2536 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002537 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002538 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2539
2540 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2541 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2542 }
2543
2544 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2545
2546 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2547 reshapeInfo.SetShape(outputShape);
2548
2549 return reshapeInfo;
2550}
2551
Finn Williams85d36712021-01-26 22:30:06 +00002552void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002553{
2554 CHECK_LAYERS(graph, 0, layerIndex);
2555
Finn Williams85d36712021-01-26 22:30:06 +00002556 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002557 CHECK_VALID_SIZE(inputs.size(), 1);
2558
Finn Williams85d36712021-01-26 22:30:06 +00002559 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002560 CHECK_VALID_SIZE(outputs.size(), 1);
2561
2562 auto layerName = GetLayerName(graph, layerIndex);
2563 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2564
2565 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2566 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2567
2568 RegisterInputSlots(graph, layerIndex, layer);
2569 RegisterOutputSlots(graph, layerIndex, layer);
2570}
2571
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002572void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2573{
2574 CHECK_LAYERS(graph, 0, layerIndex);
2575 CHECK_LOCATION();
2576
2577 auto inputs = GetInputs(graph, layerIndex);
2578 CHECK_VALID_SIZE(inputs.size(), 1);
2579
2580 auto outputs = GetOutputs(graph, layerIndex);
2581 CHECK_VALID_SIZE(outputs.size(), 1);
2582
2583 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2584 auto fbDescriptor = fbLayer->descriptor();
2585 auto flatBufferAxis = fbDescriptor->axis();
2586
2587 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002588 descriptor.m_KeepDims = fbDescriptor->keepDims();
2589 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2590 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2591
2592 const std::string& layerName = GetLayerName(graph, layerIndex);
2593 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2594
2595 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2596 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2597
2598 RegisterInputSlots(graph, layerIndex, layer);
2599 RegisterOutputSlots(graph, layerIndex, layer);
2600}
2601
Finn Williams85d36712021-01-26 22:30:06 +00002602void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002603{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002604 CHECK_LAYERS(graph, 0, layerIndex);
2605 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002606
Derek Lamberti8ddae332019-02-21 16:29:43 +00002607 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002608 CHECK_VALID_SIZE(outputs.size(), 1);
2609
2610 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2611 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2612
Derek Lamberti8ddae332019-02-21 16:29:43 +00002613 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002614 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2615
Finn Williams85d36712021-01-26 22:30:06 +00002616 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002617 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2618
2619 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2620 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2621
2622 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2623 {
2624 std::stringstream ss;
2625 ss << "New shape defined in reshape parameters "
2626 << reshapeOutputTensorShape
2627 << " does not equal output shape "
2628 << actualOutputTensorInfo.GetShape()
2629 << ": "
2630 << CHECK_LOCATION().AsString();
2631 throw ParseException(ss.str());
2632 }
2633
2634 armnn::ReshapeDescriptor reshapeDesc;
2635 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2636
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002637 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002638 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2639 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2640
Derek Lamberti8ddae332019-02-21 16:29:43 +00002641 RegisterInputSlots(graph, layerIndex, layer);
2642 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002643}
2644
Finn Williams85d36712021-01-26 22:30:06 +00002645void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002646{
2647 CHECK_LAYERS(graph, 0, layerIndex);
2648
Finn Williams85d36712021-01-26 22:30:06 +00002649 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002650 CHECK_VALID_SIZE(inputs.size(), 1);
2651
Finn Williams85d36712021-01-26 22:30:06 +00002652 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002653 CHECK_VALID_SIZE(outputs.size(), 1);
2654
2655 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2656
2657 armnn::ResizeDescriptor descriptor;
2658 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2659 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2660 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2661 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002662 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2663 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002664
2665 auto layerName = GetLayerName(graph, layerIndex);
2666 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2667
2668 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2669 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2670
2671 RegisterInputSlots(graph, layerIndex, layer);
2672 RegisterOutputSlots(graph, layerIndex, layer);
2673}
2674
Jan Eilers1b2654f2021-09-24 15:45:46 +01002675
2676/// @Note The ResizeBiliniar operation was deprecated and removed in favor of the Resize operation.
2677/// This function is kept for backwards compatibility.
Finn Williams85d36712021-01-26 22:30:06 +00002678void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002679{
2680 CHECK_LAYERS(graph, 0, layerIndex);
2681
Finn Williams85d36712021-01-26 22:30:06 +00002682 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002683 CHECK_VALID_SIZE(inputs.size(), 1);
2684
Finn Williams85d36712021-01-26 22:30:06 +00002685 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002686 CHECK_VALID_SIZE(outputs.size(), 1);
2687
2688 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2689
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002690 armnn::ResizeDescriptor descriptor;
2691 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002692 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002693 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2694 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002695 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2696 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002697
2698 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002699 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002700
2701 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2702 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2703
2704 RegisterInputSlots(graph, layerIndex, layer);
2705 RegisterOutputSlots(graph, layerIndex, layer);
2706}
2707
Keith Davis3ae3f972021-05-21 16:33:48 +01002708void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2709{
2710 CHECK_LAYERS(graph, 0, layerIndex);
2711
2712 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2713 CHECK_VALID_SIZE(inputs.size(), 1);
2714
2715 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2716 CHECK_VALID_SIZE(outputs.size(), 1);
2717
2718 auto layerName = GetLayerName(graph, layerIndex);
2719 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2720
2721 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2722 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2723
2724 RegisterInputSlots(graph, layerIndex, layer);
2725 RegisterOutputSlots(graph, layerIndex, layer);
2726}
2727
Finn Williams85d36712021-01-26 22:30:06 +00002728void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002729{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002730 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002731
Finn Williams85d36712021-01-26 22:30:06 +00002732 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002733 CHECK_VALID_SIZE(inputs.size(), 1);
2734
Finn Williams85d36712021-01-26 22:30:06 +00002735 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002736 CHECK_VALID_SIZE(outputs.size(), 1);
2737
2738 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002739 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Sadik Armaganfd0cae32021-11-08 17:18:31 +00002740 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002741 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002742
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002743 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2744
2745 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2746 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2747
Derek Lamberti8ddae332019-02-21 16:29:43 +00002748 RegisterInputSlots(graph, layerIndex, layer);
2749 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002750}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002751
Finn Williams85d36712021-01-26 22:30:06 +00002752void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002753{
2754 CHECK_LAYERS(graph, 0, layerIndex);
2755
Finn Williams85d36712021-01-26 22:30:06 +00002756 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002757 CHECK_VALID_SIZE(inputs.size(), 1);
2758
Finn Williams85d36712021-01-26 22:30:06 +00002759 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002760 CHECK_VALID_SIZE(outputs.size(), 1);
2761
2762 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2763 auto flatBufferPadList = flatBufferDescriptor->padList();
2764 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2765
Mike Kelly51b8c312022-05-24 11:34:02 +01002766 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002767 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002768 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2769 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002770 }
2771
2772 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002773 padList.reserve(flatBufferPadList->size() / 2);
2774 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002775 {
2776 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2777 }
2778
2779 armnn::SpaceToBatchNdDescriptor descriptor;
2780 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2781 descriptor.m_BlockShape =
2782 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2783 descriptor.m_PadList = padList;
2784
2785 auto layerName = GetLayerName(graph, layerIndex);
2786 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2787
2788 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2789 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2790
2791 RegisterInputSlots(graph, layerIndex, layer);
2792 RegisterOutputSlots(graph, layerIndex, layer);
2793}
2794
Finn Williams85d36712021-01-26 22:30:06 +00002795void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002796{
2797 CHECK_LAYERS(graph, 0, layerIndex);
2798
Finn Williams85d36712021-01-26 22:30:06 +00002799 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002800 CHECK_VALID_SIZE(inputs.size(), 1);
2801
Finn Williams85d36712021-01-26 22:30:06 +00002802 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002803 CHECK_VALID_SIZE(outputs.size(), 1);
2804
2805 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2806
2807 armnn::SpaceToDepthDescriptor descriptor;
2808 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2809 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2810
2811 auto layerName = GetLayerName(graph, layerIndex);
2812 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2813
2814 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2815 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2816
2817 RegisterInputSlots(graph, layerIndex, layer);
2818 RegisterOutputSlots(graph, layerIndex, layer);
2819}
2820
Finn Williams85d36712021-01-26 22:30:06 +00002821armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2822 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002823 unsigned int layerIndex)
2824{
Jan Eilers8eb25602020-03-09 12:13:48 +00002825 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002826 armnn::NormalizationDescriptor desc;
2827
2828 switch (normalizationDescriptor->normChannelType())
2829 {
2830 case NormalizationAlgorithmChannel_Across:
2831 {
2832 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2833 break;
2834 }
2835 case NormalizationAlgorithmChannel_Within:
2836 {
2837 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2838 break;
2839 }
2840 default:
2841 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002842 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002843 }
2844 }
2845
2846 switch (normalizationDescriptor->normMethodType())
2847 {
2848 case NormalizationAlgorithmMethod_LocalBrightness:
2849 {
2850 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2851 break;
2852 }
2853 case NormalizationAlgorithmMethod_LocalContrast:
2854 {
2855 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2856 break;
2857 }
2858 default:
2859 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002860 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002861 }
2862 }
2863
2864 switch (normalizationDescriptor->dataLayout())
2865 {
2866 case DataLayout_NCHW:
2867 {
2868 desc.m_DataLayout = armnn::DataLayout::NCHW;
2869 break;
2870 }
2871 case DataLayout_NHWC:
2872 {
2873 desc.m_DataLayout = armnn::DataLayout::NHWC;
2874 break;
2875 }
2876 default:
2877 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002878 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002879 }
2880 }
2881
2882 desc.m_Alpha = normalizationDescriptor->alpha();
2883 desc.m_Beta = normalizationDescriptor->beta();
2884 desc.m_K = normalizationDescriptor->k();
2885 desc.m_NormSize = normalizationDescriptor->normSize();
2886
2887 return desc;
2888}
2889
Finn Williams85d36712021-01-26 22:30:06 +00002890void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002891{
2892 CHECK_LAYERS(graph, 0, layerIndex);
2893
2894 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2895
Finn Williams85d36712021-01-26 22:30:06 +00002896 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002897 CHECK_VALID_SIZE(inputs.size(), 1);
2898
Finn Williams85d36712021-01-26 22:30:06 +00002899 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002900 CHECK_VALID_SIZE(outputs.size(), 1);
2901
2902 auto outputInfo = ToTensorInfo(outputs[0]);
2903
2904 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2905 auto layerName = GetLayerName(graph, layerIndex);
2906
2907 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2908 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2909
2910 RegisterInputSlots(graph, layerIndex, layer);
2911 RegisterOutputSlots(graph, layerIndex, layer);
2912}
2913
Finn Williams85d36712021-01-26 22:30:06 +00002914void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002915{
2916 CHECK_LAYERS(graph, 0, layerIndex);
2917 auto inputs = GetInputs(graph, layerIndex);
2918 CHECK_LOCATION();
2919 CHECK_VALID_SIZE(inputs.size(), 1);
2920
2921 auto outputs = GetOutputs(graph, layerIndex);
2922 CHECK_VALID_SIZE(outputs.size(), 1);
2923
2924 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002925
josh minor4a3c6102020-01-06 16:40:46 -06002926 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2927 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002928 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2929 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2930
2931 RegisterInputSlots(graph, layerIndex, layer);
2932 RegisterOutputSlots(graph, layerIndex, layer);
2933}
2934
Finn Williams85d36712021-01-26 22:30:06 +00002935void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002936{
2937 CHECK_LAYERS(graph, 0, layerIndex);
2938
2939 auto inputs = GetInputs(graph, layerIndex);
2940 CHECK_VALID_SIZE(inputs.size(), 1);
2941
2942 auto outputs = GetOutputs(graph, layerIndex);
2943 CHECK_VALID_SIZE(outputs.size(), 1);
2944
2945 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2946
2947 auto fbBegin = fbDescriptor->begin();
2948 auto fbSize = fbDescriptor->size();
2949
Mike Kelly51b8c312022-05-24 11:34:02 +01002950 if (fbBegin->size() != fbSize->size())
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002951 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002952 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2953 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002954 }
2955
2956 armnn::SliceDescriptor descriptor;
2957 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2958 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2959
2960 auto layerName = GetLayerName(graph, layerIndex);
2961 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2962
2963 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2964 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2965
2966 RegisterInputSlots(graph, layerIndex, layer);
2967 RegisterOutputSlots(graph, layerIndex, layer);
2968}
2969
Finn Williams85d36712021-01-26 22:30:06 +00002970void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002971{
2972 CHECK_LAYERS(graph, 0, layerIndex);
2973
Finn Williams85d36712021-01-26 22:30:06 +00002974 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002975 CHECK_VALID_SIZE(inputs.size(), 1);
2976
Finn Williams85d36712021-01-26 22:30:06 +00002977 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002978 CHECK_VALID_SIZE(outputs.size(), 1);
2979
2980 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2981
2982 auto flatBufferBegin = flatBufferDescriptor->begin();
2983 auto flatBufferEnd = flatBufferDescriptor->end();
2984 auto flatBufferStride = flatBufferDescriptor->stride();
2985
Mike Kelly51b8c312022-05-24 11:34:02 +01002986 if (!(flatBufferBegin->size() == flatBufferEnd->size() &&
2987 flatBufferBegin->size() == flatBufferStride->size()))
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002988 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002989 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2990 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002991 }
2992
2993 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2994 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2995 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2996
2997 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2998 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2999 descriptor.m_EndMask = flatBufferDescriptor->endMask();
3000 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
3001 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
3002 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
3003 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
3004
3005 auto layerName = GetLayerName(graph, layerIndex);
3006 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
3007
3008 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3009 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3010
3011 RegisterInputSlots(graph, layerIndex, layer);
3012 RegisterOutputSlots(graph, layerIndex, layer);
3013}
3014
Finn Williams85d36712021-01-26 22:30:06 +00003015void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00003016{
3017 CHECK_LAYERS(graph, 0, layerIndex);
3018 auto inputs = GetInputs(graph, layerIndex);
3019 CHECK_LOCATION();
3020 CHECK_VALID_SIZE(inputs.size(), 2);
3021
3022 auto outputs = GetOutputs(graph, layerIndex);
3023 CHECK_VALID_SIZE(outputs.size(), 1);
3024
3025 auto layerName = GetLayerName(graph, layerIndex);
3026 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
3027
3028 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3029 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3030
3031 RegisterInputSlots(graph, layerIndex, layer);
3032 RegisterOutputSlots(graph, layerIndex, layer);
3033}
3034
Finn Williams85d36712021-01-26 22:30:06 +00003035void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003036{
3037 CHECK_LAYERS(graph, 0, layerIndex);
3038
Finn Williams85d36712021-01-26 22:30:06 +00003039 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003040 CHECK_VALID_SIZE(inputs.size(), 2);
3041
Finn Williams85d36712021-01-26 22:30:06 +00003042 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003043 CHECK_VALID_SIZE(outputs.size(), 1);
3044
Teresa Charlin52664732020-06-29 16:27:03 +01003045 armnn::GatherDescriptor descriptor;
3046 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
3047
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003048 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01003049 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003050
3051 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003052 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3053
3054 RegisterInputSlots(graph, layerIndex, layer);
3055 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003056}
3057
Teresa Charlin6966bfa2022-04-25 17:14:50 +01003058void IDeserializer::DeserializerImpl::ParseGatherNd(GraphPtr graph, unsigned int layerIndex)
3059{
3060 CHECK_LAYERS(graph, 0, layerIndex);
3061
3062 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
3063 CHECK_VALID_SIZE(inputs.size(), 2);
3064
3065 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
3066 CHECK_VALID_SIZE(outputs.size(), 1);
3067
3068 auto layerName = GetLayerName(graph, layerIndex);
3069 IConnectableLayer* layer = m_Network->AddGatherNdLayer(layerName.c_str());
3070
3071 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3072 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3073
3074 RegisterInputSlots(graph, layerIndex, layer);
3075 RegisterOutputSlots(graph, layerIndex, layer);
3076}
3077
Finn Williams85d36712021-01-26 22:30:06 +00003078void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003079{
3080 CHECK_LAYERS(graph, 0, layerIndex);
3081
Finn Williams85d36712021-01-26 22:30:06 +00003082 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003083 CHECK_VALID_SIZE(inputs.size(), 1);
3084
Finn Williams85d36712021-01-26 22:30:06 +00003085 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003086 CHECK_VALID_SIZE(outputs.size(), 1);
3087
3088 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
3089 auto flatBufferAxis = flatBufferDescriptor->axis();
3090 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
3091
3092 armnn::MeanDescriptor descriptor;
3093 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
3094 descriptor.m_KeepDims = flatBufferKeepDims;
3095
3096 auto layerName = GetLayerName(graph, layerIndex);
3097 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
3098
3099 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3100 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3101
3102 RegisterInputSlots(graph, layerIndex, layer);
3103 RegisterOutputSlots(graph, layerIndex, layer);
3104}
3105
Finn Williams85d36712021-01-26 22:30:06 +00003106void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00003107{
3108 CHECK_LAYERS(graph, 0, layerIndex);
3109
Finn Williams85d36712021-01-26 22:30:06 +00003110 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003111 CHECK_VALID_SIZE(inputs.size(), 1);
3112
Finn Williams85d36712021-01-26 22:30:06 +00003113 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003114
3115 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
3116 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
3117 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
3118 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
3119 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
3120 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
3121
3122 // Check numViews and numDimensions corresponds to the ones already serialized ...
3123 // numViews == flatBufferViewSizes.size();
3124 // foreach: numDimensions == flatBufferViewSizes[x].size();
3125
3126 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
3127 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3128 {
3129 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
3130 {
3131 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
3132 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
3133 }
3134 }
3135
3136 auto layerName = GetLayerName(graph, layerIndex);
3137 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
3138
3139 // I could have as many outputs as views ...
3140 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3141 {
3142 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
3143 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
3144 }
3145
3146 RegisterInputSlots(graph, layerIndex, layer);
3147 RegisterOutputSlots(graph, layerIndex, layer);
3148}
3149
Finn Williams85d36712021-01-26 22:30:06 +00003150armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00003151{
3152 armnn::LstmDescriptor desc;
3153
3154 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
3155 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
3156 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
3157 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
3158 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
3159 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01003160 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00003161
3162 return desc;
3163}
3164
Finn Williams85d36712021-01-26 22:30:06 +00003165void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00003166{
3167 CHECK_LAYERS(graph, 0, layerIndex);
3168
3169 auto inputs = GetInputs(graph, layerIndex);
3170 CHECK_VALID_SIZE(inputs.size(), 3);
3171
3172 auto outputs = GetOutputs(graph, layerIndex);
3173 CHECK_VALID_SIZE(outputs.size(), 4);
3174
3175 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3176 auto layerName = GetLayerName(graph, layerIndex);
3177 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3178 auto flatBufferInputParams = flatBufferLayer->inputParams();
3179
3180 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
3181
3182 armnn::LstmInputParams lstmInputParams;
3183
3184 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3185 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3186 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3187 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3188 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3189 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3190 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3191 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3192 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3193
3194 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3195 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3196 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3197 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3198 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3199 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3200 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3201 lstmInputParams.m_CellBias = &cellBias;
3202 lstmInputParams.m_OutputGateBias = &outputGateBias;
3203
3204 armnn::ConstTensor inputToInputWeights;
3205 armnn::ConstTensor recurrentToInputWeights;
3206 armnn::ConstTensor cellToInputWeights;
3207 armnn::ConstTensor inputGateBias;
3208 if (!lstmDescriptor.m_CifgEnabled)
3209 {
3210 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3211 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3212 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3213 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3214
3215 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3216 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3217 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3218 lstmInputParams.m_InputGateBias = &inputGateBias;
3219 }
3220
3221 armnn::ConstTensor projectionWeights;
3222 armnn::ConstTensor projectionBias;
3223 if (lstmDescriptor.m_ProjectionEnabled)
3224 {
3225 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3226 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3227
3228 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3229 lstmInputParams.m_ProjectionBias = &projectionBias;
3230 }
3231
3232 armnn::ConstTensor cellToForgetWeights;
3233 armnn::ConstTensor cellToOutputWeights;
3234 if (lstmDescriptor.m_PeepholeEnabled)
3235 {
3236 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3237 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3238
3239 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3240 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3241 }
3242
Jan Eilersf8c62972019-07-17 11:07:49 +01003243 armnn::ConstTensor inputLayerNormWeights;
3244 armnn::ConstTensor forgetLayerNormWeights;
3245 armnn::ConstTensor cellLayerNormWeights;
3246 armnn::ConstTensor outputLayerNormWeights;
3247 if (lstmDescriptor.m_LayerNormEnabled)
3248 {
3249 if (!lstmDescriptor.m_CifgEnabled)
3250 {
3251 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3252 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3253 }
3254 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3255 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3256 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3257
3258 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3259 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3260 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3261 }
3262
Jim Flynn11af3752019-03-19 17:22:29 +00003263 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3264
3265 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3266 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3267
3268 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3269 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3270
3271 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
3272 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
3273
3274 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
3275 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
3276
3277 RegisterInputSlots(graph, layerIndex, layer);
3278 RegisterOutputSlots(graph, layerIndex, layer);
3279}
3280
Finn Williams85d36712021-01-26 22:30:06 +00003281armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01003282{
3283 armnn::QLstmDescriptor desc;
3284
3285 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
3286 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
3287 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
3288 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
3289
3290 desc.m_CellClip = qLstmDescriptor->cellClip();
3291 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
3292
3293 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
3294 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
3295 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
3296 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
3297
3298 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
3299 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
3300
3301 return desc;
3302}
3303
Finn Williams85d36712021-01-26 22:30:06 +00003304void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01003305{
3306 CHECK_LAYERS(graph, 0, layerIndex);
3307
3308 auto inputs = GetInputs(graph, layerIndex);
3309 CHECK_VALID_SIZE(inputs.size(), 3);
3310
3311 auto outputs = GetOutputs(graph, layerIndex);
3312 CHECK_VALID_SIZE(outputs.size(), 3);
3313
3314 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3315 auto layerName = GetLayerName(graph, layerIndex);
3316 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3317 auto flatBufferInputParams = flatBufferLayer->inputParams();
3318
3319 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
3320 armnn::LstmInputParams qLstmInputParams;
3321
3322 // Mandatory params
3323 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3324 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3325 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3326 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3327 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3328 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3329 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3330 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3331 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3332
3333 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3334 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3335 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3336 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3337 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3338 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3339 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
3340 qLstmInputParams.m_CellBias = &cellBias;
3341 qLstmInputParams.m_OutputGateBias = &outputGateBias;
3342
3343 // Optional CIFG params
3344 armnn::ConstTensor inputToInputWeights;
3345 armnn::ConstTensor recurrentToInputWeights;
3346 armnn::ConstTensor inputGateBias;
3347
3348 if (!qLstmDescriptor.m_CifgEnabled)
3349 {
3350 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3351 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3352 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3353
3354 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3355 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3356 qLstmInputParams.m_InputGateBias = &inputGateBias;
3357 }
3358
3359 // Optional projection params
3360 armnn::ConstTensor projectionWeights;
3361 armnn::ConstTensor projectionBias;
3362
3363 if (qLstmDescriptor.m_ProjectionEnabled)
3364 {
3365 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3366 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3367
3368 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3369 qLstmInputParams.m_ProjectionBias = &projectionBias;
3370 }
3371
3372 // Optional peephole params
3373 armnn::ConstTensor cellToInputWeights;
3374 armnn::ConstTensor cellToForgetWeights;
3375 armnn::ConstTensor cellToOutputWeights;
3376
3377 if (qLstmDescriptor.m_PeepholeEnabled)
3378 {
3379 if (!qLstmDescriptor.m_CifgEnabled)
3380 {
3381 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3382 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3383 }
3384
3385 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3386 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3387
3388 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3389 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3390 }
3391
3392 // Optional layer norm params
3393 armnn::ConstTensor inputLayerNormWeights;
3394 armnn::ConstTensor forgetLayerNormWeights;
3395 armnn::ConstTensor cellLayerNormWeights;
3396 armnn::ConstTensor outputLayerNormWeights;
3397
3398 if (qLstmDescriptor.m_LayerNormEnabled)
3399 {
3400 if (!qLstmDescriptor.m_CifgEnabled)
3401 {
3402 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3403 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3404 }
3405
3406 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3407 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3408 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3409
3410 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3411 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3412 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3413 }
3414
3415 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3416
3417 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3418 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3419
3420 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3421 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3422
3423 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3424 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3425
3426 RegisterInputSlots(graph, layerIndex, layer);
3427 RegisterOutputSlots(graph, layerIndex, layer);
3428}
3429
Finn Williams85d36712021-01-26 22:30:06 +00003430void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003431{
3432 CHECK_LAYERS(graph, 0, layerIndex);
3433
3434 auto inputs = GetInputs(graph, layerIndex);
3435 CHECK_VALID_SIZE(inputs.size(), 3);
3436
3437 auto outputs = GetOutputs(graph, layerIndex);
3438 CHECK_VALID_SIZE(outputs.size(), 2);
3439
3440 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3441 auto layerName = GetLayerName(graph, layerIndex);
3442 auto flatBufferInputParams = flatBufferLayer->inputParams();
3443
3444 armnn::QuantizedLstmInputParams lstmInputParams;
3445
3446 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3447 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3448 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3449 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3450 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3451 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3452 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3453 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3454 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3455 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3456 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3457 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3458
3459 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3460 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3461 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3462 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3463 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3464 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3465 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3466 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3467 lstmInputParams.m_InputGateBias = &inputGateBias;
3468 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3469 lstmInputParams.m_CellBias = &cellBias;
3470 lstmInputParams.m_OutputGateBias = &outputGateBias;
3471
3472 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3473
3474 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3475 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3476
3477 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3478 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3479
3480 RegisterInputSlots(graph, layerIndex, layer);
3481 RegisterOutputSlots(graph, layerIndex, layer);
3482}
3483
Finn Williams85d36712021-01-26 22:30:06 +00003484void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003485{
3486 CHECK_LAYERS(graph, 0, layerIndex);
3487
Finn Williams85d36712021-01-26 22:30:06 +00003488 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003489 CHECK_VALID_SIZE(inputs.size(), 1);
3490
Finn Williams85d36712021-01-26 22:30:06 +00003491 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003492 CHECK_VALID_SIZE(outputs.size(), 1);
3493
3494 const std::string layerName = GetLayerName(graph, layerIndex);
3495 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3496
3497 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3498 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3499
3500 RegisterInputSlots(graph, layerIndex, layer);
3501 RegisterOutputSlots(graph, layerIndex, layer);
3502}
3503
Finn Williams85d36712021-01-26 22:30:06 +00003504void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003505{
3506 CHECK_LAYERS(graph, 0, layerIndex);
3507
Finn Williams85d36712021-01-26 22:30:06 +00003508 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003509 CHECK_VALID_SIZE(inputs.size(), 2);
3510
Finn Williams85d36712021-01-26 22:30:06 +00003511 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003512 CHECK_VALID_SIZE(outputs.size(), 1);
3513
3514 const std::string layerName = GetLayerName(graph, layerIndex);
3515 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3516
3517 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3518 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3519
3520 RegisterInputSlots(graph, layerIndex, layer);
3521 RegisterOutputSlots(graph, layerIndex, layer);
3522}
3523
Finn Williams85d36712021-01-26 22:30:06 +00003524void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003525{
3526 CHECK_LAYERS(graph, 0, layerIndex);
3527 auto inputs = GetInputs(graph, layerIndex);
3528 CHECK_LOCATION();
3529 CHECK_VALID_SIZE(inputs.size(), 2);
3530
3531 auto outputs = GetOutputs(graph, layerIndex);
3532 CHECK_VALID_SIZE(outputs.size(), 2);
3533
3534 auto layerName = GetLayerName(graph, layerIndex);
3535 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3536
3537 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3538 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3539
3540 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3541 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3542
3543 RegisterInputSlots(graph, layerIndex, layer);
3544 RegisterOutputSlots(graph, layerIndex, layer);
3545}
3546
Finn Williams85d36712021-01-26 22:30:06 +00003547void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003548{
3549 CHECK_LAYERS(graph, 0, layerIndex);
3550 auto inputs = GetInputs(graph, layerIndex);
3551 CHECK_LOCATION();
3552 CHECK_VALID_SIZE(inputs.size(), 2);
3553
3554 auto outputs = GetOutputs(graph, layerIndex);
3555 CHECK_VALID_SIZE(outputs.size(), 1);
3556
3557 auto layerName = GetLayerName(graph, layerIndex);
3558 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3559
3560 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3561 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3562
3563 RegisterInputSlots(graph, layerIndex, layer);
3564 RegisterOutputSlots(graph, layerIndex, layer);
3565}
3566
Finn Williams85d36712021-01-26 22:30:06 +00003567void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003568{
3569 CHECK_LAYERS(graph, 0, layerIndex);
3570
3571 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3572
3573 auto inputs = GetInputs(graph, layerIndex);
3574 CHECK_VALID_SIZE(inputs.size(), 1);
3575
3576 auto outputs = GetOutputs(graph, layerIndex);
3577 CHECK_VALID_SIZE(outputs.size(), 1);
3578 auto outputInfo = ToTensorInfo(outputs[0]);
3579
3580 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01003581 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003582
3583 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3584 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3585
3586 RegisterInputSlots(graph, layerIndex, layer);
3587 RegisterOutputSlots(graph, layerIndex, layer);
3588}
3589
Finn Williams85d36712021-01-26 22:30:06 +00003590void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003591{
3592 CHECK_LAYERS(graph, 0, layerIndex);
3593
3594 auto inputs = GetInputs(graph, layerIndex);
3595 CHECK_VALID_SIZE(inputs.size(), 1);
3596
3597 auto outputs = GetOutputs(graph, layerIndex);
3598 CHECK_VALID_SIZE(outputs.size(), 1);
3599
3600 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3601 auto layerName = GetLayerName(graph, layerIndex);
3602 auto serializerDescriptor = serializerLayer->descriptor();
3603
3604 armnn::TransposeConvolution2dDescriptor descriptor;
3605 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3606 descriptor.m_PadRight = serializerDescriptor->padRight();
3607 descriptor.m_PadTop = serializerDescriptor->padTop();
3608 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3609 descriptor.m_StrideX = serializerDescriptor->strideX();
3610 descriptor.m_StrideY = serializerDescriptor->strideY();;
3611 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3612 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3613
3614 // weights & biases
3615 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3616 armnn::Optional<armnn::ConstTensor> optionalBiases;
3617 if (descriptor.m_BiasEnabled)
3618 {
3619 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3620 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3621 }
3622
3623 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3624 weights,
3625 optionalBiases,
3626 layerName.c_str());
3627
3628 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3629 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3630
3631 RegisterInputSlots(graph, layerIndex, layer);
3632 RegisterOutputSlots(graph, layerIndex, layer);
3633}
3634
Finn Williams85d36712021-01-26 22:30:06 +00003635void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003636{
3637 CHECK_LAYERS(graph, 0, layerIndex);
3638 auto inputs = GetInputs(graph, layerIndex);
3639
3640 auto outputs = GetOutputs(graph, layerIndex);
3641 CHECK_VALID_SIZE(outputs.size(), 1);
3642
3643 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3644 unsigned int axis = flatBufferDescriptor->axis();
3645 unsigned int numInputs = flatBufferDescriptor->numInputs();
3646 CHECK_VALID_SIZE(inputs.size(), numInputs);
3647
3648 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3649 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3650 flatBufferInputShape->begin() + flatBufferInputShape->size());
3651
3652 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3653 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3654
3655 for (unsigned int i=0; i<inputs.size(); ++i)
3656 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003657 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003658 if (descriptor.m_InputShape != inputShape)
3659 {
3660 std::stringstream ss;
3661 ss << "Shape of input "
3662 << i
3663 << " "
3664 << inputShape
3665 << " does not equal defined input shape "
3666 << descriptor.m_InputShape
3667 << ": "
3668 << CHECK_LOCATION().AsString();
3669 throw ParseException(ss.str());
3670 }
3671 }
3672
3673 auto layerName = GetLayerName(graph, layerIndex);
3674 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3675
3676 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3677 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3678
3679 RegisterInputSlots(graph, layerIndex, layer);
3680 RegisterOutputSlots(graph, layerIndex, layer);
3681}
3682
Finn Williams85d36712021-01-26 22:30:06 +00003683void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003684{
3685 CHECK_LAYERS(graph, 0, layerIndex);
3686
3687 auto inputs = GetInputs(graph, layerIndex);
3688 auto outputs = GetOutputs(graph, layerIndex);
3689
3690 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3691 auto fbDescriptor = fbLayer->descriptor();
3692
3693 armnn::StandInDescriptor descriptor;
3694 descriptor.m_NumInputs = fbDescriptor->numInputs();
3695 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3696
3697 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3698 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3699
3700 const std::string layerName = GetLayerName(graph, layerIndex);
3701 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3702
3703 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3704 {
3705 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3706 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3707 }
3708
3709 RegisterInputSlots(graph, layerIndex, layer);
3710 RegisterOutputSlots(graph, layerIndex, layer);
3711}
3712
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003713armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3714 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3715{
3716 armnn::UnidirectionalSequenceLstmDescriptor desc;
3717
3718 desc.m_ActivationFunc = descriptor->activationFunc();
3719 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3720 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3721 desc.m_CifgEnabled = descriptor->cifgEnabled();
3722 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3723 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3724 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3725 desc.m_TimeMajor = descriptor->timeMajor();
3726
3727 return desc;
3728}
3729
3730void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3731{
3732 CHECK_LAYERS(graph, 0, layerIndex);
3733
3734 auto inputs = GetInputs(graph, layerIndex);
3735 CHECK_VALID_SIZE(inputs.size(), 3);
3736
3737 auto outputs = GetOutputs(graph, layerIndex);
Mike Kelly12994962022-04-21 11:57:09 +01003738 CHECK_VALID_SIZE(outputs.size(), 3);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003739
3740 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3741 auto layerName = GetLayerName(graph, layerIndex);
3742 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3743 auto flatBufferInputParams = flatBufferLayer->inputParams();
3744
3745 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3746
3747 armnn::LstmInputParams lstmInputParams;
3748
3749 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3750 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3751 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3752 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3753 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3754 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3755 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3756 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3757 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3758
3759 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3760 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3761 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3762 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3763 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3764 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3765 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3766 lstmInputParams.m_CellBias = &cellBias;
3767 lstmInputParams.m_OutputGateBias = &outputGateBias;
3768
3769 armnn::ConstTensor inputToInputWeights;
3770 armnn::ConstTensor recurrentToInputWeights;
3771 armnn::ConstTensor cellToInputWeights;
3772 armnn::ConstTensor inputGateBias;
3773 if (!descriptor.m_CifgEnabled)
3774 {
3775 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3776 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3777 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3778
3779 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3780 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3781 lstmInputParams.m_InputGateBias = &inputGateBias;
3782
3783 if (descriptor.m_PeepholeEnabled)
3784 {
3785 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3786 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3787 }
3788 }
3789
3790 armnn::ConstTensor projectionWeights;
3791 armnn::ConstTensor projectionBias;
3792 if (descriptor.m_ProjectionEnabled)
3793 {
3794 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3795 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3796
3797 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3798 lstmInputParams.m_ProjectionBias = &projectionBias;
3799 }
3800
3801 armnn::ConstTensor cellToForgetWeights;
3802 armnn::ConstTensor cellToOutputWeights;
3803 if (descriptor.m_PeepholeEnabled)
3804 {
3805 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3806 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3807
3808 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3809 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3810 }
3811
3812 armnn::ConstTensor inputLayerNormWeights;
3813 armnn::ConstTensor forgetLayerNormWeights;
3814 armnn::ConstTensor cellLayerNormWeights;
3815 armnn::ConstTensor outputLayerNormWeights;
3816 if (descriptor.m_LayerNormEnabled)
3817 {
3818 if (!descriptor.m_CifgEnabled)
3819 {
3820 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3821 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3822 }
3823 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3824 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3825 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3826
3827 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3828 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3829 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3830 }
3831
3832 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3833 lstmInputParams,
3834 layerName.c_str());
3835
Mike Kelly12994962022-04-21 11:57:09 +01003836 armnn::TensorInfo outputTensorInfo0 = ToTensorInfo(outputs[0]);
3837 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo0);
3838
3839 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[1]);
3840 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo1);
3841
3842 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[2]);
3843 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo2);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003844
3845 RegisterInputSlots(graph, layerIndex, layer);
3846 RegisterOutputSlots(graph, layerIndex, layer);
3847}
3848
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003849} // namespace armnnDeserializer