blob: a405cb92a539734a94d84c6e456603036fec69b0 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Simon Obutedb5804e2022-04-14 15:49:52 +010013#include <armnn/Logging.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000014
Matteo Martincighe011d202019-11-28 11:35:47 +000015#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000016#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010017#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000018#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010019#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000020
Kevin May43a799c2019-02-08 16:31:42 +000021#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022#include <VerificationHelpers.hpp>
23
Colm Donelan5b5c2222020-09-09 12:48:16 +010024#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Finn Williams85d36712021-01-26 22:30:06 +000038IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
39
40IDeserializer::~IDeserializer() = default;
41
42IDeserializer *IDeserializer::CreateRaw()
43{
44 return new IDeserializer();
45}
46
47IDeserializerPtr IDeserializer::Create()
48{
49 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
50}
51
52void IDeserializer::Destroy(IDeserializer *parser)
53{
54 delete parser;
55}
56
57armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
58{
59 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
60}
61
62armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
63{
64 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
65}
66
67BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
68{
69 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
70}
71
72BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
73{
74 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
75}
76
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000077namespace
78{
Kevin May43a799c2019-02-08 16:31:42 +000079
80const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
81
Finn Williams85d36712021-01-26 22:30:06 +000082 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000083 unsigned int layersIndex,
84 const CheckLocation& location)
85{
86 if (graph->layers() == nullptr)
87 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010088 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
89 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
90 "layers:{1} at {2}",
91 location.m_Function,
92 layersIndex,
93 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000094 }
95 else if (layersIndex >= graph->layers()->size())
96 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010097 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
98 location.m_Function,
99 layersIndex,
100 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000101 }
102}
103
Finn Williams85d36712021-01-26 22:30:06 +0000104void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000105 unsigned int layersIndex,
106 unsigned int layerIndex,
107 const CheckLocation& location)
108{
109 if (graph->layers() == nullptr)
110 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100111 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
112 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
113 "layers:{1} at {2}",
114 location.m_Function,
115 layersIndex,
116 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000117 }
118 else if (layersIndex >= graph->layers()->size())
119 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100120 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
121 "layers:{1} at {2}",
122 location.m_Function,
123 layersIndex,
124 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000125 }
126 else if (layerIndex >= graph->layers()[layersIndex].size()
127 && layerIndex != VIRTUAL_LAYER_ID)
128 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100129 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
130 "layers:{1} layer:{2} at {3}",
131 location.m_Function,
132 layersIndex,
133 layerIndex,
134 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000135 }
136}
137
Finn Williams85d36712021-01-26 22:30:06 +0000138void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000139 const CheckLocation& location)
140{
141 if (rawPtr == nullptr)
142 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100143 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
144 location.m_Function,
145 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000146 }
147}
148
Finn Williams85d36712021-01-26 22:30:06 +0000149void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000150 const CheckLocation& location)
151{
152 if (rawPtr == nullptr)
153 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100154 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
155 location.m_Function,
156 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000157 }
158}
159
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000160void CheckConstTensorSize(const unsigned int constTensorSize,
161 const unsigned int tensorSize,
162 const CheckLocation& location)
163{
164 if (constTensorSize != tensorSize)
165 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100166 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
167 location.m_Function,
168 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000169 }
170}
171
Kevin May43a799c2019-02-08 16:31:42 +0000172#define CHECK_TENSOR_PTR(TENSOR_PTR) \
173 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
174
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000175#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
176 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
177
Mike Kellya0766c32019-02-19 17:22:07 +0000178#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
179 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
180
Kevin May43a799c2019-02-08 16:31:42 +0000181#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
182 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
183
184#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
185 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
186}
187
Saoirse Stewart263829c2019-02-19 15:54:14 +0000188bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
189{
190 const unsigned int actualSize = actual.GetNumDimensions();
191 if (actualSize != expected.size())
192 {
193 return false;
194 }
195
196 for (unsigned int i = 0u; i < actualSize; i++)
197 {
198 if (actual[i] != static_cast<unsigned int>(expected[i]))
199 {
200 return false;
201 }
202 }
203
204 return true;
205}
206
Finn Williams85d36712021-01-26 22:30:06 +0000207IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000208: m_Network(nullptr, nullptr),
209//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000210m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000211{
212 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000213 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
214 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
215 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
216 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
217 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
218 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
mathad01b392e982021-04-07 12:07:30 +0100219 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Simon Obute51f67772021-09-03 15:50:13 +0100220 m_ParserFunctions[Layer_ChannelShuffleLayer] = &DeserializerImpl::ParseChannelShuffle;
221 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
Finn Williams85d36712021-01-26 22:30:06 +0000222 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
223 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
224 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100225 m_ParserFunctions[Layer_Convolution3dLayer] = &DeserializerImpl::ParseConvolution3d;
Finn Williams85d36712021-01-26 22:30:06 +0000226 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
227 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
228 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
229 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
230 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
231 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
232 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
233 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
234 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
235 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
236 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100237 m_ParserFunctions[Layer_GatherNdLayer] = &DeserializerImpl::ParseGatherNd;
Finn Williams85d36712021-01-26 22:30:06 +0000238 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
239 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
240 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
241 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
242 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
243 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
244 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
245 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
246 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
247 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
248 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
249 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
250 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
251 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
252 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
253 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000254 m_ParserFunctions[Layer_Pooling3dLayer] = &DeserializerImpl::ParsePooling3d;
Finn Williams85d36712021-01-26 22:30:06 +0000255 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
256 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
257 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
258 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
259 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000260 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000261 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
262 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
263 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
264 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100265 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000266 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
267 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
268 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
269 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
270 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
271 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
272 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
273 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
274 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
275 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
276 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
277 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100278 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000279}
280
Finn Williams85d36712021-01-26 22:30:06 +0000281LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000282{
283 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
284
285 switch(layerType)
286 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100287 case Layer::Layer_AbsLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000289 case Layer::Layer_ActivationLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000291 case Layer::Layer_AdditionLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100293 case Layer::Layer_ArgMinMaxLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000295 case Layer::Layer_BatchToSpaceNdLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000297 case Layer::Layer_BatchNormalizationLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100299 case Layer::Layer_CastLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Simon Obute51f67772021-09-03 15:50:13 +0100301 case Layer::Layer_ChannelShuffleLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100303 case Layer::Layer_ComparisonLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100305 case Layer::Layer_ConcatLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000307 case Layer::Layer_ConstantLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000309 case Layer::Layer_Convolution2dLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100311 case Layer::Layer_Convolution3dLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100313 case Layer::Layer_DepthToSpaceLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000315 case Layer::Layer_DepthwiseConvolution2dLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000317 case Layer::Layer_DequantizeLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000319 case Layer::Layer_DetectionPostProcessLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000321 case Layer::Layer_DivisionLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000323 case Layer::Layer_EqualLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000325 case Layer::Layer_ElementwiseUnaryLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000327 case Layer::Layer_FullyConnectedLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100329 case Layer::Layer_FillLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000331 case Layer::Layer_FloorLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000333 case Layer::Layer_GatherLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100335 case Layer::Layer_GatherNdLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherNdLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000337 case Layer::Layer_GreaterLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000339 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000340 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100341 case Layer::Layer_InstanceNormalizationLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000343 case Layer::Layer_L2NormalizationLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000345 case Layer::Layer_LogicalBinaryLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100347 case Layer::Layer_LogSoftmaxLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000349 case Layer::Layer_LstmLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000351 case Layer::Layer_MeanLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000353 case Layer::Layer_MinimumLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000355 case Layer::Layer_MaximumLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100357 case Layer::Layer_MergeLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000359 case Layer::Layer_MergerLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000361 case Layer::Layer_MultiplicationLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000363 case Layer::Layer_NormalizationLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000365 case Layer::Layer_OutputLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000367 case Layer::Layer_PadLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000369 case Layer::Layer_PermuteLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000371 case Layer::Layer_Pooling2dLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000373 case Layer::Layer_Pooling3dLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100375 case Layer::Layer_PreluLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100377 case Layer::Layer_QLstmLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000379 case Layer::Layer_QuantizeLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100381 case Layer::Layer_QuantizedLstmLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100383 case Layer::Layer_RankLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000385 case Layer::Layer_ReduceLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000387 case Layer::Layer_ReshapeLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000389 case Layer::Layer_ResizeBilinearLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100391 case Layer::Layer_ResizeLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000393 case Layer::Layer_RsqrtLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100395 case Layer::Layer_ShapeLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100397 case Layer::Layer_SliceLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000399 case Layer::Layer_SoftmaxLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000401 case Layer::Layer_SpaceToBatchNdLayer:
402 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100403 case Layer::Layer_SpaceToDepthLayer:
404 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000405 case Layer::Layer_SplitterLayer:
406 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100407 case Layer::Layer_StackLayer:
408 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100409 case Layer::Layer_StandInLayer:
410 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000411 case Layer::Layer_StridedSliceLayer:
412 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000413 case Layer::Layer_SubtractionLayer:
414 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100415 case Layer::Layer_SwitchLayer:
416 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100417 case Layer::Layer_TransposeConvolution2dLayer:
418 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000419 case Layer::Layer_TransposeLayer:
420 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100421 case Layer::Layer_UnidirectionalSequenceLstmLayer:
422 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000423 case Layer::Layer_NONE:
424 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100425 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000426 }
427}
428
Finn Williams85d36712021-01-26 22:30:06 +0000429std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000430{
431 auto layer = GetBaseLayer(graph, index);
432 assert(layer);
433 return layer->layerName()->str();
434}
435
Finn Williams85d36712021-01-26 22:30:06 +0000436int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000437{
438 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
439
440 if (layerType == Layer::Layer_InputLayer)
441 {
442 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
443 }
444 else if ( layerType == Layer::Layer_OutputLayer )
445 {
446 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
447 }
448 return 0;
449}
450
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000451armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000452{
453 switch (dataLayout)
454 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000455 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000456 return armnn::DataLayout::NHWC;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100457 case armnnSerializer::DataLayout::DataLayout_NDHWC:
458 return armnn::DataLayout::NDHWC;
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100459 case armnnSerializer::DataLayout::DataLayout_NCDHW:
460 return armnn::DataLayout::NCDHW;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000462 default:
463 return armnn::DataLayout::NCHW;
464 }
465}
466
Mike Kellyaf484012019-02-20 16:53:11 +0000467armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
468{
469 switch (function)
470 {
471 case armnnSerializer::ActivationFunction_Sigmoid:
472 return armnn::ActivationFunction::Sigmoid;
473 case armnnSerializer::ActivationFunction_TanH:
474 return armnn::ActivationFunction::TanH;
475 case armnnSerializer::ActivationFunction_Linear:
476 return armnn::ActivationFunction::Linear;
477 case armnnSerializer::ActivationFunction_ReLu:
478 return armnn::ActivationFunction::ReLu;
479 case armnnSerializer::ActivationFunction_BoundedReLu:
480 return armnn::ActivationFunction::BoundedReLu;
481 case armnnSerializer::ActivationFunction_LeakyReLu:
482 return armnn::ActivationFunction::LeakyReLu;
483 case armnnSerializer::ActivationFunction_Abs:
484 return armnn::ActivationFunction::Abs;
485 case armnnSerializer::ActivationFunction_Sqrt:
486 return armnn::ActivationFunction::Sqrt;
487 case armnnSerializer::ActivationFunction_Square:
488 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000489 case armnnSerializer::ActivationFunction_Elu:
490 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000491 case armnnSerializer::ActivationFunction_HardSwish:
492 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000493 default:
494 return armnn::ActivationFunction::Sigmoid;
495 }
496}
497
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100498armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
499{
500 switch (function)
501 {
502 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
503 return armnn::ArgMinMaxFunction::Max;
504 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
505 default:
506 return armnn::ArgMinMaxFunction::Min;
507 }
508}
509
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100510armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
511{
512 switch (operation)
513 {
514 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
515 return armnn::ComparisonOperation::Equal;
516 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
517 return armnn::ComparisonOperation::Greater;
518 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
519 return armnn::ComparisonOperation::GreaterOrEqual;
520 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
521 return armnn::ComparisonOperation::Less;
522 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
523 return armnn::ComparisonOperation::LessOrEqual;
524 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
525 default:
526 return armnn::ComparisonOperation::NotEqual;
527 }
528}
529
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000530armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
531{
532 switch (operation)
533 {
534 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
535 return armnn::ReduceOperation::Sum;
536 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
537 return armnn::ReduceOperation::Max;
538 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
539 return armnn::ReduceOperation::Mean;
540 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
541 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100542 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
543 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000544 default:
545 return armnn::ReduceOperation::Sum;
546 }
547}
548
James Conroyaba90cd2020-11-06 16:28:18 +0000549armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
550{
551 switch (operation)
552 {
553 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
554 return armnn::LogicalBinaryOperation::LogicalAnd;
555 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
556 return armnn::LogicalBinaryOperation::LogicalOr;
557 default:
558 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
559 }
560}
561
josh minor4a3c6102020-01-06 16:40:46 -0600562armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
563{
564 switch (operation)
565 {
566 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
567 return armnn::UnaryOperation::Abs;
568 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
569 return armnn::UnaryOperation::Rsqrt;
570 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
571 return armnn::UnaryOperation::Sqrt;
572 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
573 return armnn::UnaryOperation::Exp;
574 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
575 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000576 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
577 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100578 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
579 return armnn::UnaryOperation::Log;
580 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
581 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600582 default:
583 throw armnn::InvalidArgumentException("Unary operation unknown");
584 }
585}
586
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +0100587armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
588{
589 switch (paddingMode)
590 {
591 case armnnSerializer::PaddingMode::PaddingMode_Reflect:
592 return armnn::PaddingMode::Reflect;
593 case armnnSerializer::PaddingMode::PaddingMode_Symmetric:
594 return armnn::PaddingMode::Symmetric;
595 default:
596 return armnn::PaddingMode::Constant;
597 }
598}
599
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100600armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
601{
602 switch (method)
603 {
604 case armnnSerializer::ResizeMethod_NearestNeighbor:
605 return armnn::ResizeMethod::NearestNeighbor;
606 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000607 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100608 default:
609 return armnn::ResizeMethod::NearestNeighbor;
610 }
611}
612
Finn Williams85d36712021-01-26 22:30:06 +0000613armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000614{
615 armnn::DataType type;
616 CHECK_TENSOR_PTR(tensorPtr);
617
618 switch (tensorPtr->dataType())
619 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000620 case DataType_QAsymmS8:
621 type = armnn::DataType::QAsymmS8;
622 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000623 case DataType_QSymmS8:
624 type = armnn::DataType::QSymmS8;
625 break;
Kevin May43a799c2019-02-08 16:31:42 +0000626 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000627 case DataType_QAsymmU8:
628 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000629 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000630 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000631 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000632 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000633 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000634 case DataType_Signed32:
635 type = armnn::DataType::Signed32;
636 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100637 case DataType_Signed64:
638 type = armnn::DataType::Signed64;
639 break;
Kevin May43a799c2019-02-08 16:31:42 +0000640 case DataType_Float32:
641 type = armnn::DataType::Float32;
642 break;
643 case DataType_Float16:
644 type = armnn::DataType::Float16;
645 break;
646 case DataType_Boolean:
647 type = armnn::DataType::Boolean;
648 break;
649 default:
650 {
651 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100652 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
653 tensorPtr->dataType(),
654 EnumNameDataType(tensorPtr->dataType()),
655 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000656 }
657 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000658
Colm Donelan800b2812021-02-12 12:43:35 +0000659 float quantizationScale = tensorPtr->quantizationScale();
660 int32_t quantizationOffset = tensorPtr->quantizationOffset();
661
Finn Williams2605b232020-06-10 15:53:46 +0100662 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
663 {
Colm Donelan800b2812021-02-12 12:43:35 +0000664 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100665 type,
666 quantizationScale,
667 quantizationOffset);
668 }
Colm Donelan800b2812021-02-12 12:43:35 +0000669 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
670 {
671 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
672 type,
673 quantizationScale,
674 quantizationOffset);
675 return result;
676 }
Kevin May43a799c2019-02-08 16:31:42 +0000677
678 auto dimensions = tensorPtr->dimensions();
679 unsigned int size = dimensions->size();
680 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000681 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
682 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
683 // For backwards compatibility check if the dimensionSpecificity vector is present first.
684 // The default is to have dimensionSpecificity set to all true's anyway.
685 if (tensorPtr->dimensionSpecificity() != nullptr)
686 {
687 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
688 size = dimensionSpecificity->size();
689 for (unsigned int i = 0; i < size; ++i)
690 {
691 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
692 }
693 }
694 // Construct a TensorShape
695 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000696
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000697 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000698 if (quantizationScales)
699 {
700 unsigned int quantizationScalesSize = quantizationScales->size();
701 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
702 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000703 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000704 type,
705 scales,
706 quantizationDim);
707 return result;
708 }
709
Kevin May43a799c2019-02-08 16:31:42 +0000710 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000711 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000712 type,
713 quantizationScale,
714 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000715
Kevin May43a799c2019-02-08 16:31:42 +0000716 return result;
717}
718
Finn Williams85d36712021-01-26 22:30:06 +0000719armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000720{
721 CHECK_CONST_TENSOR_PTR(constTensorPtr);
722 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100723 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000724
725 switch (constTensorPtr->data_type())
726 {
727 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000728 {
729 auto byteData = constTensorPtr->data_as_ByteData()->data();
730 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
731 return armnn::ConstTensor(tensorInfo, byteData->data());
732 }
Mike Kellya0766c32019-02-19 17:22:07 +0000733 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000734 {
735 auto shortData = constTensorPtr->data_as_ShortData()->data();
736 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
737 return armnn::ConstTensor(tensorInfo, shortData->data());
738 }
Mike Kellya0766c32019-02-19 17:22:07 +0000739 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000740 {
741 auto intData = constTensorPtr->data_as_IntData()->data();
742 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
743 return armnn::ConstTensor(tensorInfo, intData->data());
744 }
Mike Kellya0766c32019-02-19 17:22:07 +0000745 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000746 {
747 auto longData = constTensorPtr->data_as_LongData()->data();
748 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
749 return armnn::ConstTensor(tensorInfo, longData->data());
750 }
Mike Kellya0766c32019-02-19 17:22:07 +0000751 default:
752 {
753 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100754 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
755 constTensorPtr->data_type(),
756 EnumNameConstTensorData(constTensorPtr->data_type()),
757 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000758 }
759 }
760}
761
Finn Williams85d36712021-01-26 22:30:06 +0000762TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000763{
764 CHECK_LAYERS(graphPtr, 0, layerIndex);
765 auto layer = GetBaseLayer(graphPtr, layerIndex);
766 const auto& numInputs = layer->inputSlots()->size();
767
768 TensorRawPtrVector result(numInputs);
769
770 for (unsigned int i=0; i<numInputs; ++i)
771 {
772 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
773 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
774 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
775 }
776 return result;
777}
778
Finn Williams85d36712021-01-26 22:30:06 +0000779TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000780{
781 CHECK_LAYERS(graphPtr, 0, layerIndex);
782 auto layer = GetBaseLayer(graphPtr, layerIndex);
783 const auto& numOutputs = layer->outputSlots()->size();
784
785 TensorRawPtrVector result(numOutputs);
786
787 for (unsigned int i=0; i<numOutputs; ++i)
788 {
789 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
790 }
791 return result;
792}
793
Finn Williams85d36712021-01-26 22:30:06 +0000794void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000795{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000796 CHECK_LAYERS(graph, 0, layerIndex);
797 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100798 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
799 "layerName: {1} / {2}",
800 layerIndex,
801 layerName,
802 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000803}
804
Finn Williams85d36712021-01-26 22:30:06 +0000805void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000806{
807 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000808 m_InputBindings.clear();
809 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000810}
811
Kevin May43a799c2019-02-08 16:31:42 +0000812
Finn Williams85d36712021-01-26 22:30:06 +0000813INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000814{
815 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000816 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
817 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000818}
819
Finn Williams85d36712021-01-26 22:30:06 +0000820armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000821{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000822 ResetParser();
Simon Obutedb5804e2022-04-14 15:49:52 +0100823 if (binaryContent.fail()) {
824 ARMNN_LOG(error) << (std::string("Cannot read input"));
825 throw ParseException("Unable to read Input stream data");
826 }
827 binaryContent.seekg(0, std::ios::end);
828 const std::streamoff size = binaryContent.tellg();
829 std::vector<char> content(static_cast<size_t>(size));
830 binaryContent.seekg(0);
831 binaryContent.read(content.data(), static_cast<std::streamsize>(size));
832 GraphPtr graph = LoadGraphFromBinary(reinterpret_cast<uint8_t*>(content.data()), static_cast<size_t>(size));
Derek Lamberti8ddae332019-02-21 16:29:43 +0000833 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000834}
835
Finn Williams85d36712021-01-26 22:30:06 +0000836GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000837{
838 if (binaryContent == nullptr)
839 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100840 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
841 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000842 }
843 flatbuffers::Verifier verifier(binaryContent, len);
844 if (verifier.VerifyBuffer<SerializedGraph>() == false)
845 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100846 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
847 "flatbuffers format. size:{0} {1}",
848 len,
849 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000850 }
851 return GetSerializedGraph(binaryContent);
852}
853
Finn Williams85d36712021-01-26 22:30:06 +0000854INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000855{
856 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100857 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000858 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000859 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000860 {
861 if (layer->layer_type() != Layer_InputLayer &&
862 layer->layer_type() != Layer_OutputLayer)
863 {
864 // lookup and call the parser function
865 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000866 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000867 }
868 ++layerIndex;
869 }
870
Derek Lamberti8ddae332019-02-21 16:29:43 +0000871 SetupInputLayers(graph);
872 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000873
874 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100875 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000876 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100877 Connections& connections = graphIt.second;
878 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000879 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100880 const unsigned int outputSlotIndex = outputIt.first;
881 IOutputSlot* outputSlot = outputIt.second;
882 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000883 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100884 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000885 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100886 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000887 }
Kevin May43a799c2019-02-08 16:31:42 +0000888 }
889 }
890 }
891
892 return std::move(m_Network);
893}
894
Finn Williams85d36712021-01-26 22:30:06 +0000895BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000896 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000897{
Jan Eilers8eb25602020-03-09 12:13:48 +0000898 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000899 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000900 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000901 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000902 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000903 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000904 }
905 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100906 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
907 name,
908 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000909}
910
Finn Williams85d36712021-01-26 22:30:06 +0000911BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000912 const std::string& name) const
913{
Jan Eilers8eb25602020-03-09 12:13:48 +0000914 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000915 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000916 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000917 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000918 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000919 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000920 }
921 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100922 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
923 name,
924 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000925}
926
Finn Williams85d36712021-01-26 22:30:06 +0000927unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000928{
929 for (unsigned int i = 0; i < graph->layers()->size(); i++)
930 {
931 auto layer = graph->layers()->Get(i);
932 if (layer->layer_type() == Layer::Layer_InputLayer)
933 {
934 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
935 if (layerBindingId == targetId)
936 {
937 return i;
938 }
939 }
940 }
941 throw ParseException("Input layer with given layerBindingId not found");
942}
943
Finn Williams85d36712021-01-26 22:30:06 +0000944unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000945{
946 for (unsigned int i = 0; i < graph->layers()->size(); i++)
947 {
948 auto layer = graph->layers()->Get(i);
949 if (layer->layer_type() == Layer::Layer_OutputLayer)
950 {
951 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
952 if (layerBindingId == targetId)
953 {
954 return i;
955 }
956 }
957 }
958 throw ParseException("Output layer with given layerBindingId not found");
959}
960
Finn Williams85d36712021-01-26 22:30:06 +0000961unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100962{
963 for (unsigned int i = 0; i < graph->layers()->size(); i++)
964 {
965 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
966 if (layer->index() == targetIndex)
967 {
968 return i;
969 }
970 }
971 throw ParseException("Layer with given index not found");
972}
973
Finn Williams85d36712021-01-26 22:30:06 +0000974IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000975{
Finn Williams85d36712021-01-26 22:30:06 +0000976 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000977
978 if (graph->featureVersions())
979 {
980 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +0100981 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +0100982 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +0000983 }
984
985 return versions;
986}
987
Finn Williams85d36712021-01-26 22:30:06 +0000988void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000989{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000990 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100991 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000992 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100993 m_InputBindings.reserve(numInputs);
994
995 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000996 {
Tee Jungaa920c52019-11-05 10:48:25 +0000997 unsigned int inputLayerIndex = 0xFFFFFFFF;
998 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
999 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001000 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001001 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
1002 }
1003 else
1004 {
1005 const int inputId = graph->inputIds()->Get(i);
1006 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1007 }
1008
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001009 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001010
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001011 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1012 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001013 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +00001014
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001015 IConnectableLayer* inputLayer =
1016 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001017
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001018 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
1019 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1020 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1021
Derek Lamberti8ddae332019-02-21 16:29:43 +00001022 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001023 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001024 }
1025}
1026
Finn Williams85d36712021-01-26 22:30:06 +00001027void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001028{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001029 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001030 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001031 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001032 m_OutputBindings.reserve(numOutputs);
1033
1034 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001035 {
Tee Jungaa920c52019-11-05 10:48:25 +00001036 unsigned int outputLayerIndex = 0xFFFFFFFF;
1037 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1038 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001039 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001040 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1041 }
1042 else
1043 {
1044 const int outputId = graph->outputIds()->Get(i);
1045 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1046 }
1047
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001048 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001049
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001050 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1051 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001052 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001053
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001054 IConnectableLayer* outputLayer =
1055 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001056
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001057 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001058 unsigned int sourceLayerIndex =
1059 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001060 unsigned int outputSlotIndex =
1061 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001062 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001063 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1064 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001065 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001066 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001067 }
1068}
1069
Finn Williams85d36712021-01-26 22:30:06 +00001070void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001071 uint32_t layerIndex,
1072 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001073{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001074 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001075 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001076 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1077 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001078 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001079 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1080 " for layer index: {2} {3}",
1081 baseLayer->outputSlots()->size(),
1082 layer->GetNumOutputSlots(),
1083 layerIndex,
1084 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001085 }
1086
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001087 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001088 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001089 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1090 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1091 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1092 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001093 }
1094}
1095
Finn Williams85d36712021-01-26 22:30:06 +00001096void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001097 uint32_t layerIndex,
1098 armnn::IConnectableLayer* layer,
1099 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001100{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001101 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001102 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001103 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001104
1105 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001106 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001107 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1108 " for layer index:{2} {3}",
1109 baseLayer->inputSlots()->size(),
1110 layer->GetNumInputSlots(),
1111 layerIndex,
1112 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001113 }
1114
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001115 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001116 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001117 // Check if slot should be ignored.
1118 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1119 {
1120 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1121 auto fbConnection = fbInputSlot->connection();
1122 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1123 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1124 }
Kevin May43a799c2019-02-08 16:31:42 +00001125 }
1126}
1127
Finn Williams85d36712021-01-26 22:30:06 +00001128void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001129 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001130 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001131{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001132 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001133 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001134 m_GraphConnections[sourceLayerIndex] = Connections();
1135 }
1136
1137 Connections& connections = m_GraphConnections[sourceLayerIndex];
1138 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1139 {
1140 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001141 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001142 else
1143 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001144 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001145 }
1146}
Kevin May43a799c2019-02-08 16:31:42 +00001147
Finn Williams85d36712021-01-26 22:30:06 +00001148void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001149 uint32_t outputSlotIndex,
1150 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001151{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001152 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1153 {
1154 m_GraphConnections[sourceLayerIndex] = Connections();
1155 }
1156
1157 Connections& connections = m_GraphConnections[sourceLayerIndex];
1158 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1159 {
1160 throw ParseException("Same output slot index processed twice");
1161 }
1162
1163 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001164}
1165
Finn Williams85d36712021-01-26 22:30:06 +00001166void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001167{
1168 CHECK_LAYERS(graph, 0, layerIndex);
1169 auto inputs = GetInputs(graph, layerIndex);
1170 CHECK_LOCATION();
1171 CHECK_VALID_SIZE(inputs.size(), 1);
1172
1173 auto outputs = GetOutputs(graph, layerIndex);
1174 CHECK_VALID_SIZE(outputs.size(), 1);
1175
1176 auto layerName = GetLayerName(graph, layerIndex);
1177
josh minor4a3c6102020-01-06 16:40:46 -06001178 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1179 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001180 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1181 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1182
1183 RegisterInputSlots(graph, layerIndex, layer);
1184 RegisterOutputSlots(graph, layerIndex, layer);
1185}
1186
Finn Williams85d36712021-01-26 22:30:06 +00001187void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001188{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001189 CHECK_LAYERS(graph, 0, layerIndex);
1190 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001191 CHECK_LOCATION();
1192 CHECK_VALID_SIZE(inputs.size(), 1);
1193
Derek Lamberti8ddae332019-02-21 16:29:43 +00001194 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001195 CHECK_VALID_SIZE(outputs.size(), 1);
1196
Derek Lamberti8ddae332019-02-21 16:29:43 +00001197 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001198 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001199 auto serializerDescriptor = serializerLayer->descriptor();
1200
1201 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001202 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001203 descriptor.m_A = serializerDescriptor->a();
1204 descriptor.m_B = serializerDescriptor->b();
1205
1206 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1207 layerName.c_str());
1208 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1209 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1210
Derek Lamberti8ddae332019-02-21 16:29:43 +00001211 RegisterInputSlots(graph, layerIndex, layer);
1212 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001213}
1214
Finn Williams85d36712021-01-26 22:30:06 +00001215void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001216{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001217 CHECK_LAYERS(graph, 0, layerIndex);
1218 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001219 CHECK_LOCATION();
1220 CHECK_VALID_SIZE(inputs.size(), 2);
1221
Derek Lamberti8ddae332019-02-21 16:29:43 +00001222 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001223 CHECK_VALID_SIZE(outputs.size(), 1);
1224
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001225 auto layerName = GetLayerName(graph, layerIndex);
1226 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001227
1228 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1229 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1230
Derek Lamberti8ddae332019-02-21 16:29:43 +00001231 RegisterInputSlots(graph, layerIndex, layer);
1232 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001233}
1234
Finn Williams85d36712021-01-26 22:30:06 +00001235void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001236{
1237 CHECK_LAYERS(graph, 0, layerIndex);
1238 auto inputs = GetInputs(graph, layerIndex);
1239 CHECK_LOCATION();
1240 CHECK_VALID_SIZE(inputs.size(), 1);
1241
1242 auto outputs = GetOutputs(graph, layerIndex);
1243 CHECK_VALID_SIZE(outputs.size(), 1);
1244
1245 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1246 auto serializerDescriptor = serializerLayer->descriptor();
1247
1248 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001249 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001250 descriptor.m_Axis = serializerDescriptor->axis();
1251 auto layerName = GetLayerName(graph, layerIndex);
1252 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1253
1254 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1255 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1256
1257 RegisterInputSlots(graph, layerIndex, layer);
1258 RegisterOutputSlots(graph, layerIndex, layer);
1259}
1260
Finn Williams85d36712021-01-26 22:30:06 +00001261void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001262{
1263 CHECK_LAYERS(graph, 0, layerIndex);
1264
Finn Williams85d36712021-01-26 22:30:06 +00001265 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001266 CHECK_VALID_SIZE(inputs.size(), 1);
1267
Finn Williams85d36712021-01-26 22:30:06 +00001268 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001269 CHECK_VALID_SIZE(outputs.size(), 1);
1270
1271 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1272 auto flatBufferCrops = flatBufferDescriptor->crops();
1273 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1274
Mike Kelly51b8c312022-05-24 11:34:02 +01001275 if (flatBufferCrops->size() % 2 != 0)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001276 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001277 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001278 }
1279
1280 std::vector<std::pair<unsigned int, unsigned int>> crops;
Mike Kelly51b8c312022-05-24 11:34:02 +01001281 crops.reserve(flatBufferCrops->size() / 2);
1282 for (unsigned int i = 0; i < flatBufferCrops->size() - 1; i += 2)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001283 {
1284 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1285 }
1286
1287 armnn::BatchToSpaceNdDescriptor descriptor;
1288 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1289 descriptor.m_BlockShape =
1290 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1291 descriptor.m_Crops = crops;
1292
1293 auto layerName = GetLayerName(graph, layerIndex);
1294 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1295
1296 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1297 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1298
1299 RegisterInputSlots(graph, layerIndex, layer);
1300 RegisterOutputSlots(graph, layerIndex, layer);
1301}
1302
Finn Williams85d36712021-01-26 22:30:06 +00001303void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001304{
1305 CHECK_LAYERS(graph, 0, layerIndex);
1306
1307 auto inputs = GetInputs(graph, layerIndex);
1308 CHECK_VALID_SIZE(inputs.size(), 1);
1309
1310 auto outputs = GetOutputs(graph, layerIndex);
1311 CHECK_VALID_SIZE(outputs.size(), 1);
1312 auto outputInfo = ToTensorInfo(outputs[0]);
1313
ruoyan015c7ab052019-03-04 14:48:02 +00001314 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001315
1316 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1317 auto serializerDescriptor = serializerLayer->descriptor();
1318
1319 armnn::BatchNormalizationDescriptor descriptor;
1320 descriptor.m_Eps = serializerDescriptor->eps();
1321 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1322
1323 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1324 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1325 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1326 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1327
1328 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1329 mean,
1330 variance,
1331 beta,
1332 gamma,
1333 layerName.c_str());
1334 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1335
1336 RegisterInputSlots(graph, layerIndex, layer);
1337 RegisterOutputSlots(graph, layerIndex, layer);
1338}
1339
mathad01b392e982021-04-07 12:07:30 +01001340void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1341{
1342 CHECK_LAYERS(graph, 0, layerIndex);
1343 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1344 CHECK_LOCATION();
1345 CHECK_VALID_SIZE(inputs.size(), 1);
1346
1347 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1348 CHECK_VALID_SIZE(outputs.size(), 1);
1349
1350 auto layerName = GetLayerName(graph, layerIndex);
1351
1352 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1353
1354 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1355 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1356
1357 RegisterInputSlots(graph, layerIndex, layer);
1358 RegisterOutputSlots(graph, layerIndex, layer);
1359}
1360
Finn Williams85d36712021-01-26 22:30:06 +00001361void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001362{
1363 CHECK_LAYERS(graph, 0, layerIndex);
1364 CHECK_LOCATION();
1365
1366 auto outputs = GetOutputs(graph, layerIndex);
1367 CHECK_VALID_SIZE(outputs.size(), 1);
1368
1369 auto layerName = GetLayerName(graph, layerIndex);
1370
1371 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1372 auto serializerInput = serializerLayer->input();
1373
1374 armnn::ConstTensor input = ToConstTensor(serializerInput);
Cathal Corbett06902652022-04-14 17:55:11 +01001375 IConnectableLayer* layer;
Conor Kennedy76277882019-02-26 08:29:54 +00001376
Cathal Corbett06902652022-04-14 17:55:11 +01001377 // Required for when Constant Layer is used as an inputs to DepthwiseConvolution2d Layer.
1378 // Running a model that was created before weights layout scheme version was added to our flatbuffers
1379 // file ensuring older models can still be read and executed. featureVersion weights layout scheme 1
1380 // indicates a change in the depthwise weights layout within ArmNN from [M,I,H,W] --> [1,H,W,I*M]
1381 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1382 {
1383 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1384 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1385 PermutationVector permutationVector = { 3, 2, 0, 1 };
1386 armnn::TensorInfo weightsInfo = input.GetInfo();
1387 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1388 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1389 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1390 input.GetMemoryArea(), permuteBuffer.get(),
1391 GetDataTypeSize(weightsInfo.GetDataType()));
Conor Kennedy76277882019-02-26 08:29:54 +00001392
Cathal Corbett06902652022-04-14 17:55:11 +01001393 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1394 auto weightsShape = weightsInfo.GetShape();
1395 weightsInfo.SetShape({1,
1396 weightsShape[0],
1397 weightsShape[1],
1398 weightsShape[2]*weightsShape[3]});
Sadik Armagan361679d2022-08-02 09:17:23 +01001399 weightsInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001400
1401 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1402
1403 layer = m_Network->AddConstantLayer(weightsPermuted, layerName.c_str());
1404
1405 layer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1406
1407 RegisterOutputSlots(graph, layerIndex, layer);
1408
1409 return;
1410 }
1411 else
1412 {
1413 layer = m_Network->AddConstantLayer(input, layerName.c_str());
1414
1415 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Sadik Armagan361679d2022-08-02 09:17:23 +01001416 outputTensorInfo.SetConstant(true);
Cathal Corbett06902652022-04-14 17:55:11 +01001417 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1418 }
Conor Kennedy76277882019-02-26 08:29:54 +00001419
1420 RegisterOutputSlots(graph, layerIndex, layer);
1421}
1422
Finn Williams85d36712021-01-26 22:30:06 +00001423void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001424{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001425 CHECK_LAYERS(graph, 0, layerIndex);
1426 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001427 CHECK_LOCATION();
Mike Kellya0766c32019-02-19 17:22:07 +00001428
Derek Lamberti8ddae332019-02-21 16:29:43 +00001429 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001430 CHECK_VALID_SIZE(outputs.size(), 1);
1431
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001432 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1433
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001434 auto layerName = GetLayerName(graph, layerIndex);
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001435 auto flatbufferDescriptor = flatBufferLayer->descriptor();
Mike Kellya0766c32019-02-19 17:22:07 +00001436
1437 armnn::Convolution2dDescriptor descriptor;
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001438 descriptor.m_PadLeft = flatbufferDescriptor->padLeft();
1439 descriptor.m_PadRight = flatbufferDescriptor->padRight();
1440 descriptor.m_PadTop = flatbufferDescriptor->padTop();
1441 descriptor.m_PadBottom = flatbufferDescriptor->padBottom();
1442 descriptor.m_StrideX = flatbufferDescriptor->strideX();
1443 descriptor.m_StrideY = flatbufferDescriptor->strideY();;
1444 descriptor.m_DilationX = flatbufferDescriptor->dilationX();
1445 descriptor.m_DilationY = flatbufferDescriptor->dilationY();;
1446 descriptor.m_BiasEnabled = flatbufferDescriptor->biasEnabled();;
1447 descriptor.m_DataLayout = ToDataLayout(flatbufferDescriptor->dataLayout());
Mike Kellya0766c32019-02-19 17:22:07 +00001448
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001449 armnn::IConnectableLayer* layer;
1450 std::vector<unsigned int> ignoreSlots {};
Mike Kellya0766c32019-02-19 17:22:07 +00001451
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001452 armnn::ConstTensor biasTensor;
1453 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1454 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1455 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Mike Kellya0766c32019-02-19 17:22:07 +00001456 {
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001457 // If the model stores weights and biases as members of the layer we have to read them from there
1458 // but add them to their own ConstantLayer for compatibility
1459 CHECK_VALID_SIZE(inputs.size(), 1);
1460
1461 layer = m_Network->AddConvolution2dLayer(descriptor,
1462 layerName.c_str());
1463
1464 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1465 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1466 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1467 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
1468 ignoreSlots.emplace_back(1u);
1469
1470 if (descriptor.m_BiasEnabled)
1471 {
1472 biasTensor = ToConstTensor(flatBufferLayer->biases());
1473 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1474 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1475 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
1476 ignoreSlots.emplace_back(2u);
1477 }
Mike Kellya0766c32019-02-19 17:22:07 +00001478 }
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001479 else
1480 {
1481 layer = m_Network->AddConvolution2dLayer(descriptor,
1482 layerName.c_str());
1483 uint32_t numInputs = descriptor.GetNumInputs();
1484 CHECK_VALID_SIZE(inputs.size(), numInputs);
1485 }
1486
Mike Kellya0766c32019-02-19 17:22:07 +00001487 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1488 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1489
Keith Davisb4dd5cc2022-04-07 11:32:00 +01001490 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001491 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001492}
1493
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001494void IDeserializer::DeserializerImpl::ParseConvolution3d(GraphPtr graph, unsigned int layerIndex)
1495{
1496 CHECK_LAYERS(graph, 0, layerIndex);
1497 auto inputs = GetInputs(graph, layerIndex);
1498 CHECK_LOCATION();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001499
1500 auto outputs = GetOutputs(graph, layerIndex);
1501 CHECK_VALID_SIZE(outputs.size(), 1);
1502
1503 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1504 auto layerName = GetLayerName(graph, layerIndex);
1505 auto serializerDescriptor = serializerLayer->descriptor();
1506
1507 armnn::Convolution3dDescriptor descriptor;
1508 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1509 descriptor.m_PadRight = serializerDescriptor->padRight();
1510 descriptor.m_PadTop = serializerDescriptor->padTop();
1511 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1512 descriptor.m_PadFront = serializerDescriptor->padFront();
1513 descriptor.m_PadBack = serializerDescriptor->padBack();
1514 descriptor.m_StrideX = serializerDescriptor->strideX();
1515 descriptor.m_StrideY = serializerDescriptor->strideY();
1516 descriptor.m_StrideZ = serializerDescriptor->strideZ();
1517 descriptor.m_DilationX = serializerDescriptor->dilationX();
1518 descriptor.m_DilationY = serializerDescriptor->dilationY();
1519 descriptor.m_DilationZ = serializerDescriptor->dilationZ();
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001520 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001521 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1522
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001523 uint32_t numInputs = descriptor.GetNumInputs();
1524 CHECK_VALID_SIZE(inputs.size(), numInputs);
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001525
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001526 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1527
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001528 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1529 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1530
1531 RegisterInputSlots(graph, layerIndex, layer);
1532 RegisterOutputSlots(graph, layerIndex, layer);
1533}
1534
Finn Williams85d36712021-01-26 22:30:06 +00001535void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001536{
1537 CHECK_LAYERS(graph, 0, layerIndex);
1538
1539 auto inputs = GetInputs(graph, layerIndex);
1540 CHECK_VALID_SIZE(inputs.size(), 1);
1541
1542 auto outputs = GetOutputs(graph, layerIndex);
1543 CHECK_VALID_SIZE(outputs.size(), 1);
1544
1545 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1546
1547 armnn::DepthToSpaceDescriptor descriptor;
1548 descriptor.m_BlockSize = fbDescriptor->blockSize();
1549 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1550
1551 auto layerName = GetLayerName(graph, layerIndex);
1552 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1553
1554 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1555 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1556
1557 RegisterInputSlots(graph, layerIndex, layer);
1558 RegisterOutputSlots(graph, layerIndex, layer);
1559}
1560
Finn Williams85d36712021-01-26 22:30:06 +00001561void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001562{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001563 CHECK_LAYERS(graph, 0, layerIndex);
1564 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001565 CHECK_LOCATION();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001566
Derek Lamberti8ddae332019-02-21 16:29:43 +00001567 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001568 CHECK_VALID_SIZE(outputs.size(), 1);
1569
Derek Lamberti8ddae332019-02-21 16:29:43 +00001570 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001571 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001572 auto serializerDescriptor = serializerLayer->descriptor();
1573
1574 armnn::DepthwiseConvolution2dDescriptor descriptor;
Cathal Corbett06902652022-04-14 17:55:11 +01001575 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1576 descriptor.m_PadRight = serializerDescriptor->padRight();
1577 descriptor.m_PadTop = serializerDescriptor->padTop();
1578 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1579 descriptor.m_StrideX = serializerDescriptor->strideX();
1580 descriptor.m_StrideY = serializerDescriptor->strideY();
1581 descriptor.m_DilationX = serializerDescriptor->dilationX();
1582 descriptor.m_DilationY = serializerDescriptor->dilationY();
1583 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
1584 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001585
Jan Eilers53ef7952021-06-02 12:01:25 +01001586 IConnectableLayer* layer;
Cathal Corbett06902652022-04-14 17:55:11 +01001587 std::vector<unsigned int> ignoreSlots {};
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001588
Cathal Corbett06902652022-04-14 17:55:11 +01001589 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1590 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1591 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001592 {
Cathal Corbett06902652022-04-14 17:55:11 +01001593 CHECK_VALID_SIZE(inputs.size(), 1);
Jan Eilers53ef7952021-06-02 12:01:25 +01001594
Cathal Corbett06902652022-04-14 17:55:11 +01001595 // If the model stores weights and biases as members of the layer we have to read them from there
1596 // but add them to their own ConstantLayer for compatibility
1597 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1598 ignoreSlots.emplace_back(1u);
Jan Eilers53ef7952021-06-02 12:01:25 +01001599
1600 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001601 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001602
1603 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1604 if (descriptor.m_BiasEnabled)
1605 {
1606 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
1607 ignoreSlots.emplace_back(2u);
1608
1609 auto biasLayer = m_Network->AddConstantLayer(biases);
1610 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1611 biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
1612 }
1613
1614 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1615 {
1616 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1617 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1618 PermutationVector permutationVector = { 3, 2, 0, 1 };
1619 armnn::TensorInfo weightsInfo = weights.GetInfo();
1620 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1621 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1622 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1623 weights.GetMemoryArea(), permuteBuffer.get(),
1624 GetDataTypeSize(weightsInfo.GetDataType()));
1625
1626 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1627 auto weightsShape = weightsInfo.GetShape();
1628 weightsInfo.SetShape({1,
1629 weightsShape[0],
1630 weightsShape[1],
1631 weightsShape[2]*weightsShape[3]});
1632
1633 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1634
1635 auto weightsLayer = m_Network->AddConstantLayer(weightsPermuted);
1636 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1637 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1638 }
1639 else
1640 {
1641 auto weightsLayer = m_Network->AddConstantLayer(weights);
1642 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1643 weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
1644 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001645 }
1646 else
1647 {
1648 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001649 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001650 uint32_t numInputs = descriptor.GetNumInputs();
1651 CHECK_VALID_SIZE(inputs.size(), numInputs);
Jan Eilers53ef7952021-06-02 12:01:25 +01001652 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001653
1654 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1655 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1656
Cathal Corbett06902652022-04-14 17:55:11 +01001657 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001658 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001659}
1660
Finn Williams85d36712021-01-26 22:30:06 +00001661void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001662{
1663 CHECK_LAYERS(graph, 0, layerIndex);
1664 auto inputs = GetInputs(graph, layerIndex);
1665 CHECK_LOCATION();
1666 CHECK_VALID_SIZE(inputs.size(), 2);
1667
1668 auto outputs = GetOutputs(graph, layerIndex);
1669 CHECK_VALID_SIZE(outputs.size(), 4);
1670
1671 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1672 auto layerName = GetLayerName(graph, layerIndex);
1673 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1674
1675 armnn::DetectionPostProcessDescriptor descriptor;
1676 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1677 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1678 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1679 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1680 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1681 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1682 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1683 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1684 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1685 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1686 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1687
1688 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1689
1690 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1691 anchors,
1692 layerName.c_str());
1693
1694 for (unsigned int i = 0; i < 4; i++)
1695 {
1696 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1697 }
1698
1699 RegisterInputSlots(graph, layerIndex, layer);
1700 RegisterOutputSlots(graph, layerIndex, layer);
1701}
1702
Finn Williams85d36712021-01-26 22:30:06 +00001703void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001704{
1705 CHECK_LAYERS(graph, 0, layerIndex);
1706 auto inputs = GetInputs(graph, layerIndex);
1707 CHECK_LOCATION();
1708 CHECK_VALID_SIZE(inputs.size(), 2);
1709
1710 auto outputs = GetOutputs(graph, layerIndex);
1711 CHECK_VALID_SIZE(outputs.size(), 1);
1712
1713 auto layerName = GetLayerName(graph, layerIndex);
1714 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1715
1716 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1717 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1718
1719 RegisterInputSlots(graph, layerIndex, layer);
1720 RegisterOutputSlots(graph, layerIndex, layer);
1721}
1722
Finn Williams85d36712021-01-26 22:30:06 +00001723void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001724{
1725 CHECK_LAYERS(graph, 0, layerIndex);
1726 auto inputs = GetInputs(graph, layerIndex);
1727 CHECK_LOCATION();
1728 CHECK_VALID_SIZE(inputs.size(), 2);
1729
1730 auto outputs = GetOutputs(graph, layerIndex);
1731 CHECK_VALID_SIZE(outputs.size(), 1);
1732
1733 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001734 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1735 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001736
1737 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1738 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1739
1740 RegisterInputSlots(graph, layerIndex, layer);
1741 RegisterOutputSlots(graph, layerIndex, layer);
1742}
1743
Finn Williams85d36712021-01-26 22:30:06 +00001744void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001745{
1746 CHECK_LAYERS(graph, 0, layerIndex);
1747 auto inputs = GetInputs(graph, layerIndex);
1748 CHECK_LOCATION();
1749 CHECK_VALID_SIZE(inputs.size(), 1);
1750
1751 auto outputs = GetOutputs(graph, layerIndex);
1752 CHECK_VALID_SIZE(outputs.size(), 1);
1753
1754 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armaganfd0cae32021-11-08 17:18:31 +00001755 armnn::FillDescriptor descriptor;
1756 descriptor.m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
Keith Davis300ad562020-06-04 16:34:23 +01001757 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1758
1759 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1760 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1761
1762 RegisterInputSlots(graph, layerIndex, layer);
1763 RegisterOutputSlots(graph, layerIndex, layer);
1764}
1765
Finn Williams85d36712021-01-26 22:30:06 +00001766void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001767{
1768 CHECK_LAYERS(graph, 0, layerIndex);
1769 auto inputs = GetInputs(graph, layerIndex);
1770 CHECK_LOCATION();
1771 CHECK_VALID_SIZE(inputs.size(), 2);
1772
1773 auto outputs = GetOutputs(graph, layerIndex);
1774 CHECK_VALID_SIZE(outputs.size(), 1);
1775
1776 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001777 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1778 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001779
1780 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1781 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1782
1783 RegisterInputSlots(graph, layerIndex, layer);
1784 RegisterOutputSlots(graph, layerIndex, layer);
1785}
1786
Finn Williams85d36712021-01-26 22:30:06 +00001787void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001788{
1789 CHECK_LAYERS(graph, 0, layerIndex);
1790
1791 auto inputs = GetInputs(graph, layerIndex);
1792 CHECK_VALID_SIZE(inputs.size(), 1);
1793
1794 auto outputs = GetOutputs(graph, layerIndex);
1795 CHECK_VALID_SIZE(outputs.size(), 1);
1796
1797 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1798 auto fbDescriptor = fbLayer->descriptor();
1799
1800 armnn::InstanceNormalizationDescriptor descriptor;
1801 descriptor.m_Gamma = fbDescriptor->gamma();
1802 descriptor.m_Beta = fbDescriptor->beta();
1803 descriptor.m_Eps = fbDescriptor->eps();
1804 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1805
1806 const std::string layerName = GetLayerName(graph, layerIndex);
1807 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1808
1809 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1810 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1811
1812 RegisterInputSlots(graph, layerIndex, layer);
1813 RegisterOutputSlots(graph, layerIndex, layer);
1814}
1815
Finn Williams85d36712021-01-26 22:30:06 +00001816void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001817{
1818 CHECK_LAYERS(graph, 0, layerIndex);
1819
1820 auto inputs = GetInputs(graph, layerIndex);
1821 CHECK_VALID_SIZE(inputs.size(), 1);
1822
1823 auto outputs = GetOutputs(graph, layerIndex);
1824 CHECK_VALID_SIZE(outputs.size(), 1);
1825 auto outputInfo = ToTensorInfo(outputs[0]);
1826
1827 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1828 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1829
1830 auto layerName = GetLayerName(graph, layerIndex);
1831 armnn::L2NormalizationDescriptor descriptor;
1832 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001833 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001834
1835 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1836 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1837
1838 RegisterInputSlots(graph, layerIndex, layer);
1839 RegisterOutputSlots(graph, layerIndex, layer);
1840}
1841
Finn Williams85d36712021-01-26 22:30:06 +00001842void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001843{
1844 CHECK_LAYERS(graph, 0, layerIndex);
1845 CHECK_LOCATION();
1846
1847 auto inputs = GetInputs(graph, layerIndex);
1848 CHECK_VALID_SIZE(inputs.size(), 2);
1849
1850 auto outputs = GetOutputs(graph, layerIndex);
1851 CHECK_VALID_SIZE(outputs.size(), 1);
1852
1853 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1854 auto fbDescriptor = fbLayer->descriptor();
1855
1856 armnn::LogicalBinaryDescriptor descriptor;
1857 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1858
1859 const std::string& layerName = GetLayerName(graph, layerIndex);
1860 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1861
1862 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1863 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1864
1865 RegisterInputSlots(graph, layerIndex, layer);
1866 RegisterOutputSlots(graph, layerIndex, layer);
1867}
1868
Finn Williams85d36712021-01-26 22:30:06 +00001869void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001870{
1871 CHECK_LAYERS(graph, 0, layerIndex);
1872
Finn Williams85d36712021-01-26 22:30:06 +00001873 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001874 CHECK_VALID_SIZE(inputs.size(), 1);
1875
Finn Williams85d36712021-01-26 22:30:06 +00001876 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001877 CHECK_VALID_SIZE(outputs.size(), 1);
1878
1879 armnn::LogSoftmaxDescriptor descriptor;
1880 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1881 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1882 auto layerName = GetLayerName(graph, layerIndex);
1883
1884 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1885
1886 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1887 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1888
1889 RegisterInputSlots(graph, layerIndex, layer);
1890 RegisterOutputSlots(graph, layerIndex, layer);
1891}
1892
Finn Williams85d36712021-01-26 22:30:06 +00001893void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001894{
1895 CHECK_LAYERS(graph, 0, layerIndex);
1896 auto inputs = GetInputs(graph, layerIndex);
1897 CHECK_LOCATION();
1898 CHECK_VALID_SIZE(inputs.size(), 2);
1899
1900 auto outputs = GetOutputs(graph, layerIndex);
1901 CHECK_VALID_SIZE(outputs.size(), 1);
1902
1903 auto layerName = GetLayerName(graph, layerIndex);
1904 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1905
1906 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1907 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1908
1909 RegisterInputSlots(graph, layerIndex, layer);
1910 RegisterOutputSlots(graph, layerIndex, layer);
1911}
1912
Finn Williams85d36712021-01-26 22:30:06 +00001913void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001914{
1915 CHECK_LAYERS(graph, 0, layerIndex);
1916 auto inputs = GetInputs(graph, layerIndex);
1917 CHECK_LOCATION();
1918 CHECK_VALID_SIZE(inputs.size(), 2);
1919
1920 auto outputs = GetOutputs(graph, layerIndex);
1921 CHECK_VALID_SIZE(outputs.size(), 1);
1922
1923 auto layerName = GetLayerName(graph, layerIndex);
1924 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1925
1926 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1927 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1928
1929 RegisterInputSlots(graph, layerIndex, layer);
1930 RegisterOutputSlots(graph, layerIndex, layer);
1931}
1932
Jim Flynne242f2d2019-05-22 14:24:13 +01001933const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1934 unsigned int layerIndex)
1935{
1936 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1937
1938 switch (layerType)
1939 {
1940 case Layer::Layer_ConcatLayer:
1941 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1942 case Layer::Layer_MergerLayer:
1943 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1944 default:
1945 throw armnn::Exception("unknown layer type, should be concat or merger");
1946 }
1947}
Simon Obute51f67772021-09-03 15:50:13 +01001948void IDeserializer::DeserializerImpl::ParseChannelShuffle(GraphPtr graph, unsigned int layerIndex)
1949{
1950 CHECK_LAYERS(graph, 0, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001951
Simon Obute51f67772021-09-03 15:50:13 +01001952 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1953 CHECK_VALID_SIZE(inputs.size(), 1);
1954
1955 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1956 CHECK_VALID_SIZE(outputs.size(), 1);
1957
1958 armnn::ChannelShuffleDescriptor descriptor;
1959 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
1960 descriptor.m_NumGroups =
1961 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
1962
1963 auto layerName = GetLayerName(graph, layerIndex);
1964 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
1965
1966 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1967 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1968
1969 RegisterInputSlots(graph, layerIndex, layer);
1970 RegisterOutputSlots(graph, layerIndex, layer);
1971}
Finn Williams85d36712021-01-26 22:30:06 +00001972void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001973{
1974 CHECK_LAYERS(graph, 0, layerIndex);
1975 CHECK_LOCATION();
1976
1977 auto inputs = GetInputs(graph, layerIndex);
1978 CHECK_VALID_SIZE(inputs.size(), 2);
1979
1980 auto outputs = GetOutputs(graph, layerIndex);
1981 CHECK_VALID_SIZE(outputs.size(), 1);
1982
1983 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1984 auto fbDescriptor = fbLayer->descriptor();
1985
1986 armnn::ComparisonDescriptor descriptor;
1987 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1988
1989 const std::string& layerName = GetLayerName(graph, layerIndex);
1990 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1991
1992 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1993 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1994
1995 RegisterInputSlots(graph, layerIndex, layer);
1996 RegisterOutputSlots(graph, layerIndex, layer);
1997}
1998
Finn Williams85d36712021-01-26 22:30:06 +00001999void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06002000{
2001 CHECK_LAYERS(graph, 0, layerIndex);
2002 CHECK_LOCATION();
2003
2004 auto inputs = GetInputs(graph, layerIndex);
2005 CHECK_VALID_SIZE(inputs.size(), 1);
2006
2007 auto outputs = GetOutputs(graph, layerIndex);
2008 CHECK_VALID_SIZE(outputs.size(), 1);
2009
2010 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
2011 auto fbDescriptor = fbLayer->descriptor();
2012
2013 armnn::ElementwiseUnaryDescriptor descriptor;
2014 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
2015
2016 const std::string& layerName = GetLayerName(graph, layerIndex);
2017 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2018
2019 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2020 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2021
2022 RegisterInputSlots(graph, layerIndex, layer);
2023 RegisterOutputSlots(graph, layerIndex, layer);
2024}
2025
Finn Williams85d36712021-01-26 22:30:06 +00002026void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00002027{
2028 CHECK_LAYERS(graph, 0, layerIndex);
2029 CHECK_LOCATION();
2030
2031 auto outputs = GetOutputs(graph, layerIndex);
2032 CHECK_VALID_SIZE(outputs.size(), 1);
2033
Jim Flynnac25a1b2019-02-28 10:40:49 +00002034 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002035 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
2036 unsigned int numViews = originsDescriptor->numViews();
2037 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002038
2039 // can now check the number of inputs == number of views
2040 auto inputs = GetInputs(graph, layerIndex);
2041 CHECK_VALID_SIZE(inputs.size(), numViews);
2042
2043 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01002044 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002045 for (unsigned int v = 0; v < numViews; ++v)
2046 {
2047 auto originPtr = originsPtr->Get(v);
2048 for (unsigned int d = 0; d < numDimensions; ++d)
2049 {
2050 uint32_t value = originPtr->data()->Get(d);
2051 descriptor.SetViewOriginCoord(v, d, value);
2052 }
2053 }
Jim Flynne242f2d2019-05-22 14:24:13 +01002054 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002055
Jim Flynn906f9462019-05-10 13:55:21 +01002056 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002057 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2058 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2059
2060 RegisterInputSlots(graph, layerIndex, layer);
2061 RegisterOutputSlots(graph, layerIndex, layer);
2062}
2063
Finn Williams85d36712021-01-26 22:30:06 +00002064void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00002065{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002066 CHECK_LAYERS(graph, 0, layerIndex);
2067 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002068 CHECK_LOCATION();
2069 CHECK_VALID_SIZE(inputs.size(), 2);
2070
Derek Lamberti8ddae332019-02-21 16:29:43 +00002071 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002072 CHECK_VALID_SIZE(outputs.size(), 1);
2073
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002074 auto layerName = GetLayerName(graph, layerIndex);
2075 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00002076
2077 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2078 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2079
Derek Lamberti8ddae332019-02-21 16:29:43 +00002080 RegisterInputSlots(graph, layerIndex, layer);
2081 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00002082}
2083
Finn Williams85d36712021-01-26 22:30:06 +00002084void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002085{
2086 CHECK_LAYERS(graph, 0, layerIndex);
2087 CHECK_LOCATION();
2088
2089 auto inputs = GetInputs(graph, layerIndex);
2090 CHECK_VALID_SIZE(inputs.size(), 1);
2091
2092 auto outputs = GetOutputs(graph, layerIndex);
2093 CHECK_VALID_SIZE(outputs.size(), 1);
2094
2095 auto layerName = GetLayerName(graph, layerIndex);
2096
2097 armnn::IConnectableLayer* layer;
2098
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00002099 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002100
2101 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2102 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2103
2104 RegisterInputSlots(graph, layerIndex, layer);
2105 RegisterOutputSlots(graph, layerIndex, layer);
2106}
2107
Finn Williams85d36712021-01-26 22:30:06 +00002108void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002109{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002110 CHECK_LAYERS(graph, 0, layerIndex);
2111 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002112 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002113
Derek Lamberti8ddae332019-02-21 16:29:43 +00002114 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002115 CHECK_VALID_SIZE(outputs.size(), 1);
2116
Derek Lamberti8ddae332019-02-21 16:29:43 +00002117 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002118 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002119 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2120
2121 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
2122 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2123 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002124 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01002125
2126 armnn::IConnectableLayer* layer;
2127 std::vector<unsigned int> ignoreSlots {};
2128
2129 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
2130 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
2131 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002132 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002133 // If the model stores weights and biases as members of the layer we have to read them from there
2134 // but add them to their own ConstantLayer for compatibility
2135 CHECK_VALID_SIZE(inputs.size(), 1);
2136 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2137 layerName.c_str());
2138
2139 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
2140 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2141 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2142 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
2143 ignoreSlots.emplace_back(1u);
2144
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002145 if (fullyConnectedDescriptor.m_BiasEnabled)
2146 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002147 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
2148 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2149 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2150 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
2151 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002152 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002153 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01002154 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002155 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002156 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2157 layerName.c_str());
2158 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
2159 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002160 }
2161
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002162 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2163 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2164
Matthew Sloyan81beae32021-07-13 19:46:11 +01002165 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00002166 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002167}
2168
Finn Williams85d36712021-01-26 22:30:06 +00002169void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002170{
2171 CHECK_LAYERS(graph, 0, layerIndex);
2172
Finn Williams85d36712021-01-26 22:30:06 +00002173 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002174 CHECK_VALID_SIZE(inputs.size(), 1);
2175
Finn Williams85d36712021-01-26 22:30:06 +00002176 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002177 CHECK_VALID_SIZE(outputs.size(), 1);
2178
2179 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2180 auto flatBufferPadList = flatBufferDescriptor->padList();
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002181 auto paddingMode = flatBufferDescriptor->paddingMode();
David Monahan34757812019-06-19 11:47:21 +01002182 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002183
Mike Kelly51b8c312022-05-24 11:34:02 +01002184 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002185 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002186 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2187 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002188 }
2189
2190 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002191 padList.reserve(flatBufferPadList->size() / 2);
2192 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002193 {
2194 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2195 }
2196
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002197 armnn::PadDescriptor descriptor(padList, padValue, ToPaddingMode(paddingMode));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002198
2199 auto layerName = GetLayerName(graph, layerIndex);
2200 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2201
2202 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2203 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2204
2205 RegisterInputSlots(graph, layerIndex, layer);
2206 RegisterOutputSlots(graph, layerIndex, layer);
2207}
2208
Finn Williams85d36712021-01-26 22:30:06 +00002209void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002210{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002211 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002212
2213 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002214 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002215
Derek Lamberti8ddae332019-02-21 16:29:43 +00002216 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002217 CHECK_VALID_SIZE(inputs.size(), 1);
2218
Derek Lamberti8ddae332019-02-21 16:29:43 +00002219 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002220 CHECK_VALID_SIZE(outputs.size(), 1);
2221 auto outputInfo = ToTensorInfo(outputs[0]);
2222
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002223 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01002224 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002225
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002226 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002227 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2228
Derek Lamberti8ddae332019-02-21 16:29:43 +00002229 RegisterInputSlots(graph, layerIndex, layer);
2230 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002231}
2232
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002233armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002234 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002235{
Jan Eilers8eb25602020-03-09 12:13:48 +00002236 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002237 armnn::Pooling2dDescriptor desc;
2238
2239 switch (pooling2dDesc->poolType())
2240 {
2241 case PoolingAlgorithm_Average:
2242 {
2243 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002244 break;
2245 }
2246 case PoolingAlgorithm_Max:
2247 {
2248 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002249 break;
2250 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002251 case PoolingAlgorithm_L2:
2252 {
2253 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2254 break;
2255 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002256 default:
2257 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002258 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002259 }
2260 }
2261
2262 switch (pooling2dDesc->outputShapeRounding())
2263 {
2264 case OutputShapeRounding_Floor:
2265 {
2266 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2267 break;
2268 }
2269 case OutputShapeRounding_Ceiling:
2270 {
2271 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2272 break;
2273 }
2274 default:
2275 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002276 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002277 }
2278 }
2279
2280 switch (pooling2dDesc->paddingMethod())
2281 {
2282 case PaddingMethod_Exclude:
2283 {
2284 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2285 break;
2286 }
2287 case PaddingMethod_IgnoreValue:
2288 {
2289 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2290 break;
2291 }
2292 default:
2293 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002294 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002295 }
2296 }
2297
2298 switch (pooling2dDesc->dataLayout())
2299 {
2300 case DataLayout_NCHW:
2301 {
2302 desc.m_DataLayout = armnn::DataLayout::NCHW;
2303 break;
2304 }
2305 case DataLayout_NHWC:
2306 {
2307 desc.m_DataLayout = armnn::DataLayout::NHWC;
2308 break;
2309 }
2310 default:
2311 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002312 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002313 }
2314 }
2315
2316 desc.m_PadRight = pooling2dDesc->padRight();
2317 desc.m_PadLeft = pooling2dDesc->padLeft();
2318 desc.m_PadBottom = pooling2dDesc->padBottom();
2319 desc.m_PadTop = pooling2dDesc->padTop();
2320 desc.m_StrideX = pooling2dDesc->strideX();
2321 desc.m_StrideY = pooling2dDesc->strideY();
2322 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2323 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2324
2325 return desc;
2326}
2327
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002328armnn::Pooling3dDescriptor IDeserializer::DeserializerImpl::GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDesc,
2329 unsigned int layerIndex)
2330{
2331 IgnoreUnused(layerIndex);
2332 armnn::Pooling3dDescriptor desc;
Finn Williams85d36712021-01-26 22:30:06 +00002333
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002334 switch (pooling3dDesc->poolType())
2335 {
2336 case PoolingAlgorithm_Average:
2337 {
2338 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2339 break;
2340 }
2341 case PoolingAlgorithm_Max:
2342 {
2343 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
2344 break;
2345 }
2346 case PoolingAlgorithm_L2:
2347 {
2348 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2349 break;
2350 }
2351 default:
2352 {
2353 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
2354 }
2355 }
2356
2357 switch (pooling3dDesc->outputShapeRounding())
2358 {
2359 case OutputShapeRounding_Floor:
2360 {
2361 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2362 break;
2363 }
2364 case OutputShapeRounding_Ceiling:
2365 {
2366 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2367 break;
2368 }
2369 default:
2370 {
2371 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
2372 }
2373 }
2374
2375 switch (pooling3dDesc->paddingMethod())
2376 {
2377 case PaddingMethod_Exclude:
2378 {
2379 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2380 break;
2381 }
2382 case PaddingMethod_IgnoreValue:
2383 {
2384 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2385 break;
2386 }
2387 default:
2388 {
2389 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
2390 }
2391 }
2392
2393 switch (pooling3dDesc->dataLayout())
2394 {
2395 case DataLayout_NCDHW:
2396 {
2397 desc.m_DataLayout = armnn::DataLayout::NCDHW;
2398 break;
2399 }
2400 case DataLayout_NDHWC:
2401 {
2402 desc.m_DataLayout = armnn::DataLayout::NDHWC;
2403 break;
2404 }
2405 default:
2406 {
2407 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
2408 }
2409 }
2410
2411 desc.m_PadRight = pooling3dDesc->padRight();
2412 desc.m_PadLeft = pooling3dDesc->padLeft();
2413 desc.m_PadBottom = pooling3dDesc->padBottom();
2414 desc.m_PadTop = pooling3dDesc->padTop();
2415 desc.m_PadFront = pooling3dDesc->padFront();
2416 desc.m_PadBack = pooling3dDesc->padBack();
2417 desc.m_StrideX = pooling3dDesc->strideX();
2418 desc.m_StrideY = pooling3dDesc->strideY();
2419 desc.m_StrideZ = pooling3dDesc->strideZ();
2420 desc.m_PoolWidth = pooling3dDesc->poolWidth();
2421 desc.m_PoolHeight = pooling3dDesc->poolHeight();
2422 desc.m_PoolDepth = pooling3dDesc->poolDepth();
2423
2424 return desc;
2425}
Finn Williams85d36712021-01-26 22:30:06 +00002426
2427void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002428{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002429 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002430
Derek Lamberti8ddae332019-02-21 16:29:43 +00002431 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002432 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002433 CHECK_VALID_SIZE(inputs.size(), 1);
2434
Derek Lamberti8ddae332019-02-21 16:29:43 +00002435 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002436 CHECK_VALID_SIZE(outputs.size(), 1);
2437 auto outputInfo = ToTensorInfo(outputs[0]);
2438
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002439 auto pooling2dDescriptor = GetPooling2dDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002440 auto layerName = GetLayerName(graph, layerIndex);
2441 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002442 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2443
Derek Lamberti8ddae332019-02-21 16:29:43 +00002444 RegisterInputSlots(graph, layerIndex, layer);
2445 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002446}
2447
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002448void IDeserializer::DeserializerImpl::ParsePooling3d(GraphPtr graph, unsigned int layerIndex)
2449{
2450 CHECK_LAYERS(graph, 0, layerIndex);
2451
2452 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2453 auto inputs = GetInputs(graph, layerIndex);
2454 CHECK_VALID_SIZE(inputs.size(), 1);
2455
2456 auto outputs = GetOutputs(graph, layerIndex);
2457 CHECK_VALID_SIZE(outputs.size(), 1);
2458 auto outputInfo = ToTensorInfo(outputs[0]);
2459
2460 auto pooling3dDescriptor = GetPooling3dDescriptor(pooling3dDes, layerIndex);
2461 auto layerName = GetLayerName(graph, layerIndex);
2462 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2463 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2464
2465 RegisterInputSlots(graph, layerIndex, layer);
2466 RegisterOutputSlots(graph, layerIndex, layer);
2467}
2468
Finn Williams85d36712021-01-26 22:30:06 +00002469void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002470{
2471 CHECK_LAYERS(graph, 0, layerIndex);
2472
2473 auto inputs = GetInputs(graph, layerIndex);
2474 CHECK_VALID_SIZE(inputs.size(), 1);
2475
2476 auto outputs = GetOutputs(graph, layerIndex);
2477 CHECK_VALID_SIZE(outputs.size(), 1);
2478 auto outputInfo = ToTensorInfo(outputs[0]);
2479
2480 auto layerName = GetLayerName(graph, layerIndex);
2481 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2482 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2483
2484 RegisterInputSlots(graph, layerIndex, layer);
2485 RegisterOutputSlots(graph, layerIndex, layer);
2486}
2487
Finn Williams85d36712021-01-26 22:30:06 +00002488armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002489 const std::vector<uint32_t>& targetDimsIn)
2490{
2491 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2492 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2493
2494 if (stretchDim != targetDimsIn.end())
2495 {
2496 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2497 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002498 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2499 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002500 }
2501
2502 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002503 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002504 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2505
2506 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2507 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2508 }
2509
2510 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2511
2512 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2513 reshapeInfo.SetShape(outputShape);
2514
2515 return reshapeInfo;
2516}
2517
Finn Williams85d36712021-01-26 22:30:06 +00002518void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002519{
2520 CHECK_LAYERS(graph, 0, layerIndex);
2521
Finn Williams85d36712021-01-26 22:30:06 +00002522 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002523 CHECK_VALID_SIZE(inputs.size(), 1);
2524
Finn Williams85d36712021-01-26 22:30:06 +00002525 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002526 CHECK_VALID_SIZE(outputs.size(), 1);
2527
2528 auto layerName = GetLayerName(graph, layerIndex);
2529 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2530
2531 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2532 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2533
2534 RegisterInputSlots(graph, layerIndex, layer);
2535 RegisterOutputSlots(graph, layerIndex, layer);
2536}
2537
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002538void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2539{
2540 CHECK_LAYERS(graph, 0, layerIndex);
2541 CHECK_LOCATION();
2542
2543 auto inputs = GetInputs(graph, layerIndex);
2544 CHECK_VALID_SIZE(inputs.size(), 1);
2545
2546 auto outputs = GetOutputs(graph, layerIndex);
2547 CHECK_VALID_SIZE(outputs.size(), 1);
2548
2549 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2550 auto fbDescriptor = fbLayer->descriptor();
2551 auto flatBufferAxis = fbDescriptor->axis();
2552
2553 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002554 descriptor.m_KeepDims = fbDescriptor->keepDims();
2555 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2556 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2557
2558 const std::string& layerName = GetLayerName(graph, layerIndex);
2559 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2560
2561 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2562 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2563
2564 RegisterInputSlots(graph, layerIndex, layer);
2565 RegisterOutputSlots(graph, layerIndex, layer);
2566}
2567
Finn Williams85d36712021-01-26 22:30:06 +00002568void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002569{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002570 CHECK_LAYERS(graph, 0, layerIndex);
2571 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002572
Derek Lamberti8ddae332019-02-21 16:29:43 +00002573 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002574 CHECK_VALID_SIZE(outputs.size(), 1);
2575
2576 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2577 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2578
Derek Lamberti8ddae332019-02-21 16:29:43 +00002579 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002580 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2581
Finn Williams85d36712021-01-26 22:30:06 +00002582 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002583 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2584
2585 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2586 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2587
2588 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2589 {
2590 std::stringstream ss;
2591 ss << "New shape defined in reshape parameters "
2592 << reshapeOutputTensorShape
2593 << " does not equal output shape "
2594 << actualOutputTensorInfo.GetShape()
2595 << ": "
2596 << CHECK_LOCATION().AsString();
2597 throw ParseException(ss.str());
2598 }
2599
2600 armnn::ReshapeDescriptor reshapeDesc;
2601 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2602
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002603 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002604 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2605 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2606
Derek Lamberti8ddae332019-02-21 16:29:43 +00002607 RegisterInputSlots(graph, layerIndex, layer);
2608 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002609}
2610
Finn Williams85d36712021-01-26 22:30:06 +00002611void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002612{
2613 CHECK_LAYERS(graph, 0, layerIndex);
2614
Finn Williams85d36712021-01-26 22:30:06 +00002615 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002616 CHECK_VALID_SIZE(inputs.size(), 1);
2617
Finn Williams85d36712021-01-26 22:30:06 +00002618 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002619 CHECK_VALID_SIZE(outputs.size(), 1);
2620
2621 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2622
2623 armnn::ResizeDescriptor descriptor;
2624 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2625 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2626 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2627 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002628 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2629 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002630
2631 auto layerName = GetLayerName(graph, layerIndex);
2632 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2633
2634 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2635 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2636
2637 RegisterInputSlots(graph, layerIndex, layer);
2638 RegisterOutputSlots(graph, layerIndex, layer);
2639}
2640
Jan Eilers1b2654f2021-09-24 15:45:46 +01002641
2642/// @Note The ResizeBiliniar operation was deprecated and removed in favor of the Resize operation.
2643/// This function is kept for backwards compatibility.
Finn Williams85d36712021-01-26 22:30:06 +00002644void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002645{
2646 CHECK_LAYERS(graph, 0, layerIndex);
2647
Finn Williams85d36712021-01-26 22:30:06 +00002648 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002649 CHECK_VALID_SIZE(inputs.size(), 1);
2650
Finn Williams85d36712021-01-26 22:30:06 +00002651 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002652 CHECK_VALID_SIZE(outputs.size(), 1);
2653
2654 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2655
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002656 armnn::ResizeDescriptor descriptor;
2657 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002658 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002659 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2660 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002661 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2662 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002663
2664 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002665 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002666
2667 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2668 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2669
2670 RegisterInputSlots(graph, layerIndex, layer);
2671 RegisterOutputSlots(graph, layerIndex, layer);
2672}
2673
Keith Davis3ae3f972021-05-21 16:33:48 +01002674void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2675{
2676 CHECK_LAYERS(graph, 0, layerIndex);
2677
2678 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2679 CHECK_VALID_SIZE(inputs.size(), 1);
2680
2681 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2682 CHECK_VALID_SIZE(outputs.size(), 1);
2683
2684 auto layerName = GetLayerName(graph, layerIndex);
2685 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2686
2687 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2688 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2689
2690 RegisterInputSlots(graph, layerIndex, layer);
2691 RegisterOutputSlots(graph, layerIndex, layer);
2692}
2693
Finn Williams85d36712021-01-26 22:30:06 +00002694void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002695{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002696 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002697
Finn Williams85d36712021-01-26 22:30:06 +00002698 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002699 CHECK_VALID_SIZE(inputs.size(), 1);
2700
Finn Williams85d36712021-01-26 22:30:06 +00002701 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002702 CHECK_VALID_SIZE(outputs.size(), 1);
2703
2704 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002705 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Sadik Armaganfd0cae32021-11-08 17:18:31 +00002706 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002707 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002708
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002709 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2710
2711 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2712 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2713
Derek Lamberti8ddae332019-02-21 16:29:43 +00002714 RegisterInputSlots(graph, layerIndex, layer);
2715 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002716}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002717
Finn Williams85d36712021-01-26 22:30:06 +00002718void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002719{
2720 CHECK_LAYERS(graph, 0, layerIndex);
2721
Finn Williams85d36712021-01-26 22:30:06 +00002722 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002723 CHECK_VALID_SIZE(inputs.size(), 1);
2724
Finn Williams85d36712021-01-26 22:30:06 +00002725 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002726 CHECK_VALID_SIZE(outputs.size(), 1);
2727
2728 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2729 auto flatBufferPadList = flatBufferDescriptor->padList();
2730 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2731
Mike Kelly51b8c312022-05-24 11:34:02 +01002732 if (flatBufferPadList->size() % 2 != 0)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002733 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002734 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2735 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002736 }
2737
2738 std::vector<std::pair<unsigned int, unsigned int>> padList;
Mike Kelly51b8c312022-05-24 11:34:02 +01002739 padList.reserve(flatBufferPadList->size() / 2);
2740 for (unsigned int i = 0; i < flatBufferPadList->size() - 1; i += 2)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002741 {
2742 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2743 }
2744
2745 armnn::SpaceToBatchNdDescriptor descriptor;
2746 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2747 descriptor.m_BlockShape =
2748 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2749 descriptor.m_PadList = padList;
2750
2751 auto layerName = GetLayerName(graph, layerIndex);
2752 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2753
2754 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2755 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2756
2757 RegisterInputSlots(graph, layerIndex, layer);
2758 RegisterOutputSlots(graph, layerIndex, layer);
2759}
2760
Finn Williams85d36712021-01-26 22:30:06 +00002761void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002762{
2763 CHECK_LAYERS(graph, 0, layerIndex);
2764
Finn Williams85d36712021-01-26 22:30:06 +00002765 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002766 CHECK_VALID_SIZE(inputs.size(), 1);
2767
Finn Williams85d36712021-01-26 22:30:06 +00002768 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002769 CHECK_VALID_SIZE(outputs.size(), 1);
2770
2771 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2772
2773 armnn::SpaceToDepthDescriptor descriptor;
2774 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2775 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2776
2777 auto layerName = GetLayerName(graph, layerIndex);
2778 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2779
2780 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2781 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2782
2783 RegisterInputSlots(graph, layerIndex, layer);
2784 RegisterOutputSlots(graph, layerIndex, layer);
2785}
2786
Finn Williams85d36712021-01-26 22:30:06 +00002787armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2788 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002789 unsigned int layerIndex)
2790{
Jan Eilers8eb25602020-03-09 12:13:48 +00002791 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002792 armnn::NormalizationDescriptor desc;
2793
2794 switch (normalizationDescriptor->normChannelType())
2795 {
2796 case NormalizationAlgorithmChannel_Across:
2797 {
2798 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2799 break;
2800 }
2801 case NormalizationAlgorithmChannel_Within:
2802 {
2803 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2804 break;
2805 }
2806 default:
2807 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002808 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002809 }
2810 }
2811
2812 switch (normalizationDescriptor->normMethodType())
2813 {
2814 case NormalizationAlgorithmMethod_LocalBrightness:
2815 {
2816 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2817 break;
2818 }
2819 case NormalizationAlgorithmMethod_LocalContrast:
2820 {
2821 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2822 break;
2823 }
2824 default:
2825 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002826 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002827 }
2828 }
2829
2830 switch (normalizationDescriptor->dataLayout())
2831 {
2832 case DataLayout_NCHW:
2833 {
2834 desc.m_DataLayout = armnn::DataLayout::NCHW;
2835 break;
2836 }
2837 case DataLayout_NHWC:
2838 {
2839 desc.m_DataLayout = armnn::DataLayout::NHWC;
2840 break;
2841 }
2842 default:
2843 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002844 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002845 }
2846 }
2847
2848 desc.m_Alpha = normalizationDescriptor->alpha();
2849 desc.m_Beta = normalizationDescriptor->beta();
2850 desc.m_K = normalizationDescriptor->k();
2851 desc.m_NormSize = normalizationDescriptor->normSize();
2852
2853 return desc;
2854}
2855
Finn Williams85d36712021-01-26 22:30:06 +00002856void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002857{
2858 CHECK_LAYERS(graph, 0, layerIndex);
2859
2860 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2861
Finn Williams85d36712021-01-26 22:30:06 +00002862 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002863 CHECK_VALID_SIZE(inputs.size(), 1);
2864
Finn Williams85d36712021-01-26 22:30:06 +00002865 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002866 CHECK_VALID_SIZE(outputs.size(), 1);
2867
2868 auto outputInfo = ToTensorInfo(outputs[0]);
2869
2870 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2871 auto layerName = GetLayerName(graph, layerIndex);
2872
2873 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2874 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2875
2876 RegisterInputSlots(graph, layerIndex, layer);
2877 RegisterOutputSlots(graph, layerIndex, layer);
2878}
2879
Finn Williams85d36712021-01-26 22:30:06 +00002880void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002881{
2882 CHECK_LAYERS(graph, 0, layerIndex);
2883 auto inputs = GetInputs(graph, layerIndex);
2884 CHECK_LOCATION();
2885 CHECK_VALID_SIZE(inputs.size(), 1);
2886
2887 auto outputs = GetOutputs(graph, layerIndex);
2888 CHECK_VALID_SIZE(outputs.size(), 1);
2889
2890 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002891
josh minor4a3c6102020-01-06 16:40:46 -06002892 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2893 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002894 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2895 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2896
2897 RegisterInputSlots(graph, layerIndex, layer);
2898 RegisterOutputSlots(graph, layerIndex, layer);
2899}
2900
Finn Williams85d36712021-01-26 22:30:06 +00002901void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002902{
2903 CHECK_LAYERS(graph, 0, layerIndex);
2904
2905 auto inputs = GetInputs(graph, layerIndex);
2906 CHECK_VALID_SIZE(inputs.size(), 1);
2907
2908 auto outputs = GetOutputs(graph, layerIndex);
2909 CHECK_VALID_SIZE(outputs.size(), 1);
2910
2911 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2912
2913 auto fbBegin = fbDescriptor->begin();
2914 auto fbSize = fbDescriptor->size();
2915
Mike Kelly51b8c312022-05-24 11:34:02 +01002916 if (fbBegin->size() != fbSize->size())
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002917 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002918 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2919 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002920 }
2921
2922 armnn::SliceDescriptor descriptor;
2923 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2924 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2925
2926 auto layerName = GetLayerName(graph, layerIndex);
2927 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2928
2929 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2930 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2931
2932 RegisterInputSlots(graph, layerIndex, layer);
2933 RegisterOutputSlots(graph, layerIndex, layer);
2934}
2935
Finn Williams85d36712021-01-26 22:30:06 +00002936void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002937{
2938 CHECK_LAYERS(graph, 0, layerIndex);
2939
Finn Williams85d36712021-01-26 22:30:06 +00002940 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002941 CHECK_VALID_SIZE(inputs.size(), 1);
2942
Finn Williams85d36712021-01-26 22:30:06 +00002943 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002944 CHECK_VALID_SIZE(outputs.size(), 1);
2945
2946 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2947
2948 auto flatBufferBegin = flatBufferDescriptor->begin();
2949 auto flatBufferEnd = flatBufferDescriptor->end();
2950 auto flatBufferStride = flatBufferDescriptor->stride();
2951
Mike Kelly51b8c312022-05-24 11:34:02 +01002952 if (!(flatBufferBegin->size() == flatBufferEnd->size() &&
2953 flatBufferBegin->size() == flatBufferStride->size()))
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002954 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002955 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2956 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002957 }
2958
2959 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2960 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2961 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2962
2963 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2964 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2965 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2966 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2967 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2968 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2969 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2970
2971 auto layerName = GetLayerName(graph, layerIndex);
2972 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2973
2974 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2975 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2976
2977 RegisterInputSlots(graph, layerIndex, layer);
2978 RegisterOutputSlots(graph, layerIndex, layer);
2979}
2980
Finn Williams85d36712021-01-26 22:30:06 +00002981void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002982{
2983 CHECK_LAYERS(graph, 0, layerIndex);
2984 auto inputs = GetInputs(graph, layerIndex);
2985 CHECK_LOCATION();
2986 CHECK_VALID_SIZE(inputs.size(), 2);
2987
2988 auto outputs = GetOutputs(graph, layerIndex);
2989 CHECK_VALID_SIZE(outputs.size(), 1);
2990
2991 auto layerName = GetLayerName(graph, layerIndex);
2992 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2993
2994 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2995 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2996
2997 RegisterInputSlots(graph, layerIndex, layer);
2998 RegisterOutputSlots(graph, layerIndex, layer);
2999}
3000
Finn Williams85d36712021-01-26 22:30:06 +00003001void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003002{
3003 CHECK_LAYERS(graph, 0, layerIndex);
3004
Finn Williams85d36712021-01-26 22:30:06 +00003005 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003006 CHECK_VALID_SIZE(inputs.size(), 2);
3007
Finn Williams85d36712021-01-26 22:30:06 +00003008 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003009 CHECK_VALID_SIZE(outputs.size(), 1);
3010
Teresa Charlin52664732020-06-29 16:27:03 +01003011 armnn::GatherDescriptor descriptor;
3012 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
3013
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003014 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01003015 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003016
3017 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003018 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3019
3020 RegisterInputSlots(graph, layerIndex, layer);
3021 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003022}
3023
Teresa Charlin6966bfa2022-04-25 17:14:50 +01003024void IDeserializer::DeserializerImpl::ParseGatherNd(GraphPtr graph, unsigned int layerIndex)
3025{
3026 CHECK_LAYERS(graph, 0, layerIndex);
3027
3028 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
3029 CHECK_VALID_SIZE(inputs.size(), 2);
3030
3031 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
3032 CHECK_VALID_SIZE(outputs.size(), 1);
3033
3034 auto layerName = GetLayerName(graph, layerIndex);
3035 IConnectableLayer* layer = m_Network->AddGatherNdLayer(layerName.c_str());
3036
3037 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3038 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3039
3040 RegisterInputSlots(graph, layerIndex, layer);
3041 RegisterOutputSlots(graph, layerIndex, layer);
3042}
3043
Finn Williams85d36712021-01-26 22:30:06 +00003044void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003045{
3046 CHECK_LAYERS(graph, 0, layerIndex);
3047
Finn Williams85d36712021-01-26 22:30:06 +00003048 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003049 CHECK_VALID_SIZE(inputs.size(), 1);
3050
Finn Williams85d36712021-01-26 22:30:06 +00003051 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003052 CHECK_VALID_SIZE(outputs.size(), 1);
3053
3054 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
3055 auto flatBufferAxis = flatBufferDescriptor->axis();
3056 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
3057
3058 armnn::MeanDescriptor descriptor;
3059 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
3060 descriptor.m_KeepDims = flatBufferKeepDims;
3061
3062 auto layerName = GetLayerName(graph, layerIndex);
3063 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
3064
3065 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3066 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3067
3068 RegisterInputSlots(graph, layerIndex, layer);
3069 RegisterOutputSlots(graph, layerIndex, layer);
3070}
3071
Finn Williams85d36712021-01-26 22:30:06 +00003072void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00003073{
3074 CHECK_LAYERS(graph, 0, layerIndex);
3075
Finn Williams85d36712021-01-26 22:30:06 +00003076 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003077 CHECK_VALID_SIZE(inputs.size(), 1);
3078
Finn Williams85d36712021-01-26 22:30:06 +00003079 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003080
3081 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
3082 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
3083 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
3084 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
3085 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
3086 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
3087
3088 // Check numViews and numDimensions corresponds to the ones already serialized ...
3089 // numViews == flatBufferViewSizes.size();
3090 // foreach: numDimensions == flatBufferViewSizes[x].size();
3091
3092 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
3093 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3094 {
3095 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
3096 {
3097 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
3098 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
3099 }
3100 }
3101
3102 auto layerName = GetLayerName(graph, layerIndex);
3103 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
3104
3105 // I could have as many outputs as views ...
3106 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3107 {
3108 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
3109 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
3110 }
3111
3112 RegisterInputSlots(graph, layerIndex, layer);
3113 RegisterOutputSlots(graph, layerIndex, layer);
3114}
3115
Finn Williams85d36712021-01-26 22:30:06 +00003116armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00003117{
3118 armnn::LstmDescriptor desc;
3119
3120 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
3121 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
3122 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
3123 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
3124 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
3125 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01003126 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00003127
3128 return desc;
3129}
3130
Finn Williams85d36712021-01-26 22:30:06 +00003131void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00003132{
3133 CHECK_LAYERS(graph, 0, layerIndex);
3134
3135 auto inputs = GetInputs(graph, layerIndex);
3136 CHECK_VALID_SIZE(inputs.size(), 3);
3137
3138 auto outputs = GetOutputs(graph, layerIndex);
3139 CHECK_VALID_SIZE(outputs.size(), 4);
3140
3141 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3142 auto layerName = GetLayerName(graph, layerIndex);
3143 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3144 auto flatBufferInputParams = flatBufferLayer->inputParams();
3145
3146 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
3147
3148 armnn::LstmInputParams lstmInputParams;
3149
3150 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3151 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3152 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3153 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3154 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3155 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3156 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3157 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3158 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3159
3160 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3161 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3162 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3163 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3164 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3165 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3166 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3167 lstmInputParams.m_CellBias = &cellBias;
3168 lstmInputParams.m_OutputGateBias = &outputGateBias;
3169
3170 armnn::ConstTensor inputToInputWeights;
3171 armnn::ConstTensor recurrentToInputWeights;
3172 armnn::ConstTensor cellToInputWeights;
3173 armnn::ConstTensor inputGateBias;
3174 if (!lstmDescriptor.m_CifgEnabled)
3175 {
3176 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3177 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3178 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3179 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3180
3181 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3182 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3183 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3184 lstmInputParams.m_InputGateBias = &inputGateBias;
3185 }
3186
3187 armnn::ConstTensor projectionWeights;
3188 armnn::ConstTensor projectionBias;
3189 if (lstmDescriptor.m_ProjectionEnabled)
3190 {
3191 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3192 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3193
3194 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3195 lstmInputParams.m_ProjectionBias = &projectionBias;
3196 }
3197
3198 armnn::ConstTensor cellToForgetWeights;
3199 armnn::ConstTensor cellToOutputWeights;
3200 if (lstmDescriptor.m_PeepholeEnabled)
3201 {
3202 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3203 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3204
3205 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3206 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3207 }
3208
Jan Eilersf8c62972019-07-17 11:07:49 +01003209 armnn::ConstTensor inputLayerNormWeights;
3210 armnn::ConstTensor forgetLayerNormWeights;
3211 armnn::ConstTensor cellLayerNormWeights;
3212 armnn::ConstTensor outputLayerNormWeights;
3213 if (lstmDescriptor.m_LayerNormEnabled)
3214 {
3215 if (!lstmDescriptor.m_CifgEnabled)
3216 {
3217 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3218 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3219 }
3220 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3221 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3222 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3223
3224 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3225 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3226 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3227 }
3228
Jim Flynn11af3752019-03-19 17:22:29 +00003229 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3230
3231 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3232 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3233
3234 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3235 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3236
3237 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
3238 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
3239
3240 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
3241 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
3242
3243 RegisterInputSlots(graph, layerIndex, layer);
3244 RegisterOutputSlots(graph, layerIndex, layer);
3245}
3246
Finn Williams85d36712021-01-26 22:30:06 +00003247armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01003248{
3249 armnn::QLstmDescriptor desc;
3250
3251 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
3252 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
3253 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
3254 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
3255
3256 desc.m_CellClip = qLstmDescriptor->cellClip();
3257 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
3258
3259 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
3260 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
3261 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
3262 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
3263
3264 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
3265 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
3266
3267 return desc;
3268}
3269
Finn Williams85d36712021-01-26 22:30:06 +00003270void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01003271{
3272 CHECK_LAYERS(graph, 0, layerIndex);
3273
3274 auto inputs = GetInputs(graph, layerIndex);
3275 CHECK_VALID_SIZE(inputs.size(), 3);
3276
3277 auto outputs = GetOutputs(graph, layerIndex);
3278 CHECK_VALID_SIZE(outputs.size(), 3);
3279
3280 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3281 auto layerName = GetLayerName(graph, layerIndex);
3282 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3283 auto flatBufferInputParams = flatBufferLayer->inputParams();
3284
3285 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
3286 armnn::LstmInputParams qLstmInputParams;
3287
3288 // Mandatory params
3289 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3290 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3291 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3292 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3293 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3294 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3295 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3296 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3297 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3298
3299 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3300 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3301 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3302 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3303 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3304 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3305 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
3306 qLstmInputParams.m_CellBias = &cellBias;
3307 qLstmInputParams.m_OutputGateBias = &outputGateBias;
3308
3309 // Optional CIFG params
3310 armnn::ConstTensor inputToInputWeights;
3311 armnn::ConstTensor recurrentToInputWeights;
3312 armnn::ConstTensor inputGateBias;
3313
3314 if (!qLstmDescriptor.m_CifgEnabled)
3315 {
3316 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3317 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3318 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3319
3320 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3321 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3322 qLstmInputParams.m_InputGateBias = &inputGateBias;
3323 }
3324
3325 // Optional projection params
3326 armnn::ConstTensor projectionWeights;
3327 armnn::ConstTensor projectionBias;
3328
3329 if (qLstmDescriptor.m_ProjectionEnabled)
3330 {
3331 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3332 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3333
3334 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3335 qLstmInputParams.m_ProjectionBias = &projectionBias;
3336 }
3337
3338 // Optional peephole params
3339 armnn::ConstTensor cellToInputWeights;
3340 armnn::ConstTensor cellToForgetWeights;
3341 armnn::ConstTensor cellToOutputWeights;
3342
3343 if (qLstmDescriptor.m_PeepholeEnabled)
3344 {
3345 if (!qLstmDescriptor.m_CifgEnabled)
3346 {
3347 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3348 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3349 }
3350
3351 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3352 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3353
3354 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3355 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3356 }
3357
3358 // Optional layer norm params
3359 armnn::ConstTensor inputLayerNormWeights;
3360 armnn::ConstTensor forgetLayerNormWeights;
3361 armnn::ConstTensor cellLayerNormWeights;
3362 armnn::ConstTensor outputLayerNormWeights;
3363
3364 if (qLstmDescriptor.m_LayerNormEnabled)
3365 {
3366 if (!qLstmDescriptor.m_CifgEnabled)
3367 {
3368 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3369 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3370 }
3371
3372 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3373 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3374 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3375
3376 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3377 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3378 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3379 }
3380
3381 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3382
3383 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3384 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3385
3386 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3387 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3388
3389 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3390 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3391
3392 RegisterInputSlots(graph, layerIndex, layer);
3393 RegisterOutputSlots(graph, layerIndex, layer);
3394}
3395
Finn Williams85d36712021-01-26 22:30:06 +00003396void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003397{
3398 CHECK_LAYERS(graph, 0, layerIndex);
3399
3400 auto inputs = GetInputs(graph, layerIndex);
3401 CHECK_VALID_SIZE(inputs.size(), 3);
3402
3403 auto outputs = GetOutputs(graph, layerIndex);
3404 CHECK_VALID_SIZE(outputs.size(), 2);
3405
3406 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3407 auto layerName = GetLayerName(graph, layerIndex);
3408 auto flatBufferInputParams = flatBufferLayer->inputParams();
3409
3410 armnn::QuantizedLstmInputParams lstmInputParams;
3411
3412 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3413 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3414 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3415 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3416 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3417 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3418 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3419 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3420 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3421 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3422 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3423 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3424
3425 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3426 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3427 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3428 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3429 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3430 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3431 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3432 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3433 lstmInputParams.m_InputGateBias = &inputGateBias;
3434 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3435 lstmInputParams.m_CellBias = &cellBias;
3436 lstmInputParams.m_OutputGateBias = &outputGateBias;
3437
3438 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3439
3440 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3441 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3442
3443 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3444 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3445
3446 RegisterInputSlots(graph, layerIndex, layer);
3447 RegisterOutputSlots(graph, layerIndex, layer);
3448}
3449
Finn Williams85d36712021-01-26 22:30:06 +00003450void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003451{
3452 CHECK_LAYERS(graph, 0, layerIndex);
3453
Finn Williams85d36712021-01-26 22:30:06 +00003454 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003455 CHECK_VALID_SIZE(inputs.size(), 1);
3456
Finn Williams85d36712021-01-26 22:30:06 +00003457 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003458 CHECK_VALID_SIZE(outputs.size(), 1);
3459
3460 const std::string layerName = GetLayerName(graph, layerIndex);
3461 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3462
3463 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3464 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3465
3466 RegisterInputSlots(graph, layerIndex, layer);
3467 RegisterOutputSlots(graph, layerIndex, layer);
3468}
3469
Finn Williams85d36712021-01-26 22:30:06 +00003470void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003471{
3472 CHECK_LAYERS(graph, 0, layerIndex);
3473
Finn Williams85d36712021-01-26 22:30:06 +00003474 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003475 CHECK_VALID_SIZE(inputs.size(), 2);
3476
Finn Williams85d36712021-01-26 22:30:06 +00003477 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003478 CHECK_VALID_SIZE(outputs.size(), 1);
3479
3480 const std::string layerName = GetLayerName(graph, layerIndex);
3481 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3482
3483 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3484 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3485
3486 RegisterInputSlots(graph, layerIndex, layer);
3487 RegisterOutputSlots(graph, layerIndex, layer);
3488}
3489
Finn Williams85d36712021-01-26 22:30:06 +00003490void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003491{
3492 CHECK_LAYERS(graph, 0, layerIndex);
3493 auto inputs = GetInputs(graph, layerIndex);
3494 CHECK_LOCATION();
3495 CHECK_VALID_SIZE(inputs.size(), 2);
3496
3497 auto outputs = GetOutputs(graph, layerIndex);
3498 CHECK_VALID_SIZE(outputs.size(), 2);
3499
3500 auto layerName = GetLayerName(graph, layerIndex);
3501 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3502
3503 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3504 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3505
3506 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3507 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3508
3509 RegisterInputSlots(graph, layerIndex, layer);
3510 RegisterOutputSlots(graph, layerIndex, layer);
3511}
3512
Finn Williams85d36712021-01-26 22:30:06 +00003513void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003514{
3515 CHECK_LAYERS(graph, 0, layerIndex);
3516 auto inputs = GetInputs(graph, layerIndex);
3517 CHECK_LOCATION();
3518 CHECK_VALID_SIZE(inputs.size(), 2);
3519
3520 auto outputs = GetOutputs(graph, layerIndex);
3521 CHECK_VALID_SIZE(outputs.size(), 1);
3522
3523 auto layerName = GetLayerName(graph, layerIndex);
3524 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3525
3526 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3527 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3528
3529 RegisterInputSlots(graph, layerIndex, layer);
3530 RegisterOutputSlots(graph, layerIndex, layer);
3531}
3532
Finn Williams85d36712021-01-26 22:30:06 +00003533void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003534{
3535 CHECK_LAYERS(graph, 0, layerIndex);
3536
3537 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3538
3539 auto inputs = GetInputs(graph, layerIndex);
3540 CHECK_VALID_SIZE(inputs.size(), 1);
3541
3542 auto outputs = GetOutputs(graph, layerIndex);
3543 CHECK_VALID_SIZE(outputs.size(), 1);
3544 auto outputInfo = ToTensorInfo(outputs[0]);
3545
3546 auto layerName = GetLayerName(graph, layerIndex);
Mike Kelly51b8c312022-05-24 11:34:02 +01003547 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->size()));
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003548
3549 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3550 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3551
3552 RegisterInputSlots(graph, layerIndex, layer);
3553 RegisterOutputSlots(graph, layerIndex, layer);
3554}
3555
Finn Williams85d36712021-01-26 22:30:06 +00003556void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003557{
3558 CHECK_LAYERS(graph, 0, layerIndex);
3559
3560 auto inputs = GetInputs(graph, layerIndex);
3561 CHECK_VALID_SIZE(inputs.size(), 1);
3562
3563 auto outputs = GetOutputs(graph, layerIndex);
3564 CHECK_VALID_SIZE(outputs.size(), 1);
3565
3566 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3567 auto layerName = GetLayerName(graph, layerIndex);
3568 auto serializerDescriptor = serializerLayer->descriptor();
3569
3570 armnn::TransposeConvolution2dDescriptor descriptor;
3571 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3572 descriptor.m_PadRight = serializerDescriptor->padRight();
3573 descriptor.m_PadTop = serializerDescriptor->padTop();
3574 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3575 descriptor.m_StrideX = serializerDescriptor->strideX();
3576 descriptor.m_StrideY = serializerDescriptor->strideY();;
3577 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3578 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3579
3580 // weights & biases
3581 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3582 armnn::Optional<armnn::ConstTensor> optionalBiases;
3583 if (descriptor.m_BiasEnabled)
3584 {
3585 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3586 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3587 }
3588
3589 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3590 weights,
3591 optionalBiases,
3592 layerName.c_str());
3593
3594 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3595 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3596
3597 RegisterInputSlots(graph, layerIndex, layer);
3598 RegisterOutputSlots(graph, layerIndex, layer);
3599}
3600
Finn Williams85d36712021-01-26 22:30:06 +00003601void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003602{
3603 CHECK_LAYERS(graph, 0, layerIndex);
3604 auto inputs = GetInputs(graph, layerIndex);
3605
3606 auto outputs = GetOutputs(graph, layerIndex);
3607 CHECK_VALID_SIZE(outputs.size(), 1);
3608
3609 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3610 unsigned int axis = flatBufferDescriptor->axis();
3611 unsigned int numInputs = flatBufferDescriptor->numInputs();
3612 CHECK_VALID_SIZE(inputs.size(), numInputs);
3613
3614 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3615 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3616 flatBufferInputShape->begin() + flatBufferInputShape->size());
3617
3618 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3619 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3620
3621 for (unsigned int i=0; i<inputs.size(); ++i)
3622 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003623 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003624 if (descriptor.m_InputShape != inputShape)
3625 {
3626 std::stringstream ss;
3627 ss << "Shape of input "
3628 << i
3629 << " "
3630 << inputShape
3631 << " does not equal defined input shape "
3632 << descriptor.m_InputShape
3633 << ": "
3634 << CHECK_LOCATION().AsString();
3635 throw ParseException(ss.str());
3636 }
3637 }
3638
3639 auto layerName = GetLayerName(graph, layerIndex);
3640 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3641
3642 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3643 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3644
3645 RegisterInputSlots(graph, layerIndex, layer);
3646 RegisterOutputSlots(graph, layerIndex, layer);
3647}
3648
Finn Williams85d36712021-01-26 22:30:06 +00003649void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003650{
3651 CHECK_LAYERS(graph, 0, layerIndex);
3652
3653 auto inputs = GetInputs(graph, layerIndex);
3654 auto outputs = GetOutputs(graph, layerIndex);
3655
3656 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3657 auto fbDescriptor = fbLayer->descriptor();
3658
3659 armnn::StandInDescriptor descriptor;
3660 descriptor.m_NumInputs = fbDescriptor->numInputs();
3661 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3662
3663 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3664 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3665
3666 const std::string layerName = GetLayerName(graph, layerIndex);
3667 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3668
3669 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3670 {
3671 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3672 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3673 }
3674
3675 RegisterInputSlots(graph, layerIndex, layer);
3676 RegisterOutputSlots(graph, layerIndex, layer);
3677}
3678
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003679armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3680 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3681{
3682 armnn::UnidirectionalSequenceLstmDescriptor desc;
3683
3684 desc.m_ActivationFunc = descriptor->activationFunc();
3685 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3686 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3687 desc.m_CifgEnabled = descriptor->cifgEnabled();
3688 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3689 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3690 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3691 desc.m_TimeMajor = descriptor->timeMajor();
3692
3693 return desc;
3694}
3695
3696void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3697{
3698 CHECK_LAYERS(graph, 0, layerIndex);
3699
3700 auto inputs = GetInputs(graph, layerIndex);
3701 CHECK_VALID_SIZE(inputs.size(), 3);
3702
3703 auto outputs = GetOutputs(graph, layerIndex);
Mike Kelly12994962022-04-21 11:57:09 +01003704 CHECK_VALID_SIZE(outputs.size(), 3);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003705
3706 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3707 auto layerName = GetLayerName(graph, layerIndex);
3708 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3709 auto flatBufferInputParams = flatBufferLayer->inputParams();
3710
3711 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3712
3713 armnn::LstmInputParams lstmInputParams;
3714
3715 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3716 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3717 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3718 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3719 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3720 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3721 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3722 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3723 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3724
3725 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3726 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3727 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3728 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3729 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3730 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3731 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3732 lstmInputParams.m_CellBias = &cellBias;
3733 lstmInputParams.m_OutputGateBias = &outputGateBias;
3734
3735 armnn::ConstTensor inputToInputWeights;
3736 armnn::ConstTensor recurrentToInputWeights;
3737 armnn::ConstTensor cellToInputWeights;
3738 armnn::ConstTensor inputGateBias;
3739 if (!descriptor.m_CifgEnabled)
3740 {
3741 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3742 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3743 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3744
3745 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3746 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3747 lstmInputParams.m_InputGateBias = &inputGateBias;
3748
3749 if (descriptor.m_PeepholeEnabled)
3750 {
3751 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3752 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3753 }
3754 }
3755
3756 armnn::ConstTensor projectionWeights;
3757 armnn::ConstTensor projectionBias;
3758 if (descriptor.m_ProjectionEnabled)
3759 {
3760 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3761 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3762
3763 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3764 lstmInputParams.m_ProjectionBias = &projectionBias;
3765 }
3766
3767 armnn::ConstTensor cellToForgetWeights;
3768 armnn::ConstTensor cellToOutputWeights;
3769 if (descriptor.m_PeepholeEnabled)
3770 {
3771 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3772 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3773
3774 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3775 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3776 }
3777
3778 armnn::ConstTensor inputLayerNormWeights;
3779 armnn::ConstTensor forgetLayerNormWeights;
3780 armnn::ConstTensor cellLayerNormWeights;
3781 armnn::ConstTensor outputLayerNormWeights;
3782 if (descriptor.m_LayerNormEnabled)
3783 {
3784 if (!descriptor.m_CifgEnabled)
3785 {
3786 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3787 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3788 }
3789 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3790 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3791 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3792
3793 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3794 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3795 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3796 }
3797
3798 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3799 lstmInputParams,
3800 layerName.c_str());
3801
Mike Kelly12994962022-04-21 11:57:09 +01003802 armnn::TensorInfo outputTensorInfo0 = ToTensorInfo(outputs[0]);
3803 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo0);
3804
3805 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[1]);
3806 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo1);
3807
3808 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[2]);
3809 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo2);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003810
3811 RegisterInputSlots(graph, layerIndex, layer);
3812 RegisterOutputSlots(graph, layerIndex, layer);
3813}
3814
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003815} // namespace armnnDeserializer