blob: 04dde73b20c6cab22201f43da7a3ed02338beb2a [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Simon Obutedb5804e2022-04-14 15:49:52 +010013#include <armnn/Logging.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000014
Matteo Martincighe011d202019-11-28 11:35:47 +000015#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000016#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010017#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000018#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010019#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000020
Kevin May43a799c2019-02-08 16:31:42 +000021#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000022#include <VerificationHelpers.hpp>
23
Colm Donelan5b5c2222020-09-09 12:48:16 +010024#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000025
Kevin May43a799c2019-02-08 16:31:42 +000026#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000027#include <algorithm>
28#include <limits>
29#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000030
31using armnn::ParseException;
32using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000033using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000034
Derek Lamberti0028d1b2019-02-20 13:57:42 +000035namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000036{
Kevin May43a799c2019-02-08 16:31:42 +000037
Finn Williams85d36712021-01-26 22:30:06 +000038IDeserializer::IDeserializer() : pDeserializerImpl(new DeserializerImpl()){}
39
40IDeserializer::~IDeserializer() = default;
41
42IDeserializer *IDeserializer::CreateRaw()
43{
44 return new IDeserializer();
45}
46
47IDeserializerPtr IDeserializer::Create()
48{
49 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
50}
51
52void IDeserializer::Destroy(IDeserializer *parser)
53{
54 delete parser;
55}
56
57armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(const std::vector<uint8_t> &binaryContent)
58{
59 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
60}
61
62armnn::INetworkPtr IDeserializer::CreateNetworkFromBinary(std::istream &binaryContent)
63{
64 return pDeserializerImpl->CreateNetworkFromBinary(binaryContent);
65}
66
67BindingPointInfo IDeserializer::GetNetworkInputBindingInfo(unsigned int layerId, const std::string &name) const
68{
69 return pDeserializerImpl->GetNetworkInputBindingInfo(layerId, name);
70}
71
72BindingPointInfo IDeserializer::GetNetworkOutputBindingInfo(unsigned int layerId, const std::string &name) const
73{
74 return pDeserializerImpl->GetNetworkOutputBindingInfo(layerId, name);
75}
76
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000077namespace
78{
Kevin May43a799c2019-02-08 16:31:42 +000079
80const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
81
Finn Williams85d36712021-01-26 22:30:06 +000082 void CheckGraph(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000083 unsigned int layersIndex,
84 const CheckLocation& location)
85{
86 if (graph->layers() == nullptr)
87 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010088 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
89 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
90 "layers:{1} at {2}",
91 location.m_Function,
92 layersIndex,
93 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000094 }
95 else if (layersIndex >= graph->layers()->size())
96 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010097 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
98 location.m_Function,
99 layersIndex,
100 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000101 }
102}
103
Finn Williams85d36712021-01-26 22:30:06 +0000104void CheckLayers(const GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +0000105 unsigned int layersIndex,
106 unsigned int layerIndex,
107 const CheckLocation& location)
108{
109 if (graph->layers() == nullptr)
110 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100111 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
112 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
113 "layers:{1} at {2}",
114 location.m_Function,
115 layersIndex,
116 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000117 }
118 else if (layersIndex >= graph->layers()->size())
119 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100120 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
121 "layers:{1} at {2}",
122 location.m_Function,
123 layersIndex,
124 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000125 }
126 else if (layerIndex >= graph->layers()[layersIndex].size()
127 && layerIndex != VIRTUAL_LAYER_ID)
128 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100129 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
130 "layers:{1} layer:{2} at {3}",
131 location.m_Function,
132 layersIndex,
133 layerIndex,
134 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000135 }
136}
137
Finn Williams85d36712021-01-26 22:30:06 +0000138void CheckTensorPtr(TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000139 const CheckLocation& location)
140{
141 if (rawPtr == nullptr)
142 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100143 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
144 location.m_Function,
145 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000146 }
147}
148
Finn Williams85d36712021-01-26 22:30:06 +0000149void CheckConstTensorPtr(ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000150 const CheckLocation& location)
151{
152 if (rawPtr == nullptr)
153 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100154 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
155 location.m_Function,
156 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000157 }
158}
159
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000160void CheckConstTensorSize(const unsigned int constTensorSize,
161 const unsigned int tensorSize,
162 const CheckLocation& location)
163{
164 if (constTensorSize != tensorSize)
165 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100166 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
167 location.m_Function,
168 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000169 }
170}
171
Kevin May43a799c2019-02-08 16:31:42 +0000172#define CHECK_TENSOR_PTR(TENSOR_PTR) \
173 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
174
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000175#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
176 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
177
Mike Kellya0766c32019-02-19 17:22:07 +0000178#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
179 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
180
Kevin May43a799c2019-02-08 16:31:42 +0000181#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
182 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
183
184#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
185 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
186}
187
Saoirse Stewart263829c2019-02-19 15:54:14 +0000188bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
189{
190 const unsigned int actualSize = actual.GetNumDimensions();
191 if (actualSize != expected.size())
192 {
193 return false;
194 }
195
196 for (unsigned int i = 0u; i < actualSize; i++)
197 {
198 if (actual[i] != static_cast<unsigned int>(expected[i]))
199 {
200 return false;
201 }
202 }
203
204 return true;
205}
206
Finn Williams85d36712021-01-26 22:30:06 +0000207IDeserializer::DeserializerImpl::DeserializerImpl()
Kevin May43a799c2019-02-08 16:31:42 +0000208: m_Network(nullptr, nullptr),
209//May require LayerType_Max to be included
Finn Williams85d36712021-01-26 22:30:06 +0000210m_ParserFunctions(Layer_MAX+1, &IDeserializer::DeserializerImpl::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000211{
212 // register supported layers
Finn Williams85d36712021-01-26 22:30:06 +0000213 m_ParserFunctions[Layer_AbsLayer] = &DeserializerImpl::ParseAbs;
214 m_ParserFunctions[Layer_ActivationLayer] = &DeserializerImpl::ParseActivation;
215 m_ParserFunctions[Layer_AdditionLayer] = &DeserializerImpl::ParseAdd;
216 m_ParserFunctions[Layer_ArgMinMaxLayer] = &DeserializerImpl::ParseArgMinMax;
217 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &DeserializerImpl::ParseBatchToSpaceNd;
218 m_ParserFunctions[Layer_BatchNormalizationLayer] = &DeserializerImpl::ParseBatchNormalization;
mathad01b392e982021-04-07 12:07:30 +0100219 m_ParserFunctions[Layer_CastLayer] = &DeserializerImpl::ParseCast;
Simon Obute51f67772021-09-03 15:50:13 +0100220 m_ParserFunctions[Layer_ChannelShuffleLayer] = &DeserializerImpl::ParseChannelShuffle;
221 m_ParserFunctions[Layer_ComparisonLayer] = &DeserializerImpl::ParseComparison;
Finn Williams85d36712021-01-26 22:30:06 +0000222 m_ParserFunctions[Layer_ConcatLayer] = &DeserializerImpl::ParseConcat;
223 m_ParserFunctions[Layer_ConstantLayer] = &DeserializerImpl::ParseConstant;
224 m_ParserFunctions[Layer_Convolution2dLayer] = &DeserializerImpl::ParseConvolution2d;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100225 m_ParserFunctions[Layer_Convolution3dLayer] = &DeserializerImpl::ParseConvolution3d;
Finn Williams85d36712021-01-26 22:30:06 +0000226 m_ParserFunctions[Layer_DepthToSpaceLayer] = &DeserializerImpl::ParseDepthToSpace;
227 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &DeserializerImpl::ParseDepthwiseConvolution2d;
228 m_ParserFunctions[Layer_DequantizeLayer] = &DeserializerImpl::ParseDequantize;
229 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &DeserializerImpl::ParseDetectionPostProcess;
230 m_ParserFunctions[Layer_DivisionLayer] = &DeserializerImpl::ParseDivision;
231 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &DeserializerImpl::ParseElementwiseUnary;
232 m_ParserFunctions[Layer_EqualLayer] = &DeserializerImpl::ParseEqual;
233 m_ParserFunctions[Layer_FullyConnectedLayer] = &DeserializerImpl::ParseFullyConnected;
234 m_ParserFunctions[Layer_FillLayer] = &DeserializerImpl::ParseFill;
235 m_ParserFunctions[Layer_FloorLayer] = &DeserializerImpl::ParseFloor;
236 m_ParserFunctions[Layer_GatherLayer] = &DeserializerImpl::ParseGather;
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100237 m_ParserFunctions[Layer_GatherNdLayer] = &DeserializerImpl::ParseGatherNd;
Finn Williams85d36712021-01-26 22:30:06 +0000238 m_ParserFunctions[Layer_GreaterLayer] = &DeserializerImpl::ParseGreater;
239 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &DeserializerImpl::ParseInstanceNormalization;
240 m_ParserFunctions[Layer_L2NormalizationLayer] = &DeserializerImpl::ParseL2Normalization;
241 m_ParserFunctions[Layer_LogicalBinaryLayer] = &DeserializerImpl::ParseLogicalBinary;
242 m_ParserFunctions[Layer_LogSoftmaxLayer] = &DeserializerImpl::ParseLogSoftmax;
243 m_ParserFunctions[Layer_LstmLayer] = &DeserializerImpl::ParseLstm;
244 m_ParserFunctions[Layer_MaximumLayer] = &DeserializerImpl::ParseMaximum;
245 m_ParserFunctions[Layer_MeanLayer] = &DeserializerImpl::ParseMean;
246 m_ParserFunctions[Layer_MinimumLayer] = &DeserializerImpl::ParseMinimum;
247 m_ParserFunctions[Layer_MergeLayer] = &DeserializerImpl::ParseMerge;
248 m_ParserFunctions[Layer_MergerLayer] = &DeserializerImpl::ParseConcat;
249 m_ParserFunctions[Layer_MultiplicationLayer] = &DeserializerImpl::ParseMultiplication;
250 m_ParserFunctions[Layer_NormalizationLayer] = &DeserializerImpl::ParseNormalization;
251 m_ParserFunctions[Layer_PadLayer] = &DeserializerImpl::ParsePad;
252 m_ParserFunctions[Layer_PermuteLayer] = &DeserializerImpl::ParsePermute;
253 m_ParserFunctions[Layer_Pooling2dLayer] = &DeserializerImpl::ParsePooling2d;
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000254 m_ParserFunctions[Layer_Pooling3dLayer] = &DeserializerImpl::ParsePooling3d;
Finn Williams85d36712021-01-26 22:30:06 +0000255 m_ParserFunctions[Layer_PreluLayer] = &DeserializerImpl::ParsePrelu;
256 m_ParserFunctions[Layer_QLstmLayer] = &DeserializerImpl::ParseQLstm;
257 m_ParserFunctions[Layer_QuantizeLayer] = &DeserializerImpl::ParseQuantize;
258 m_ParserFunctions[Layer_QuantizedLstmLayer] = &DeserializerImpl::ParseQuantizedLstm;
259 m_ParserFunctions[Layer_RankLayer] = &DeserializerImpl::ParseRank;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000260 m_ParserFunctions[Layer_ReduceLayer] = &DeserializerImpl::ParseReduce;
Finn Williams85d36712021-01-26 22:30:06 +0000261 m_ParserFunctions[Layer_ReshapeLayer] = &DeserializerImpl::ParseReshape;
262 m_ParserFunctions[Layer_ResizeBilinearLayer] = &DeserializerImpl::ParseResizeBilinear;
263 m_ParserFunctions[Layer_ResizeLayer] = &DeserializerImpl::ParseResize;
264 m_ParserFunctions[Layer_RsqrtLayer] = &DeserializerImpl::ParseRsqrt;
Keith Davis3ae3f972021-05-21 16:33:48 +0100265 m_ParserFunctions[Layer_ShapeLayer] = &DeserializerImpl::ParseShape;
Finn Williams85d36712021-01-26 22:30:06 +0000266 m_ParserFunctions[Layer_SliceLayer] = &DeserializerImpl::ParseSlice;
267 m_ParserFunctions[Layer_SoftmaxLayer] = &DeserializerImpl::ParseSoftmax;
268 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &DeserializerImpl::ParseSpaceToBatchNd;
269 m_ParserFunctions[Layer_SpaceToDepthLayer] = &DeserializerImpl::ParseSpaceToDepth;
270 m_ParserFunctions[Layer_SplitterLayer] = &DeserializerImpl::ParseSplitter;
271 m_ParserFunctions[Layer_StackLayer] = &DeserializerImpl::ParseStack;
272 m_ParserFunctions[Layer_StandInLayer] = &DeserializerImpl::ParseStandIn;
273 m_ParserFunctions[Layer_StridedSliceLayer] = &DeserializerImpl::ParseStridedSlice;
274 m_ParserFunctions[Layer_SubtractionLayer] = &DeserializerImpl::ParseSubtraction;
275 m_ParserFunctions[Layer_SwitchLayer] = &DeserializerImpl::ParseSwitch;
276 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &DeserializerImpl::ParseTransposeConvolution2d;
277 m_ParserFunctions[Layer_TransposeLayer] = &DeserializerImpl::ParseTranspose;
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100278 m_ParserFunctions[Layer_UnidirectionalSequenceLstmLayer] = &DeserializerImpl::ParseUnidirectionalSequenceLstm;
Kevin May43a799c2019-02-08 16:31:42 +0000279}
280
Finn Williams85d36712021-01-26 22:30:06 +0000281LayerBaseRawPtr IDeserializer::DeserializerImpl::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000282{
283 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
284
285 switch(layerType)
286 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100287 case Layer::Layer_AbsLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000289 case Layer::Layer_ActivationLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000291 case Layer::Layer_AdditionLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100293 case Layer::Layer_ArgMinMaxLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000295 case Layer::Layer_BatchToSpaceNdLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000297 case Layer::Layer_BatchNormalizationLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
mathad01b392e982021-04-07 12:07:30 +0100299 case Layer::Layer_CastLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_CastLayer()->base();
Simon Obute51f67772021-09-03 15:50:13 +0100301 case Layer::Layer_ChannelShuffleLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100303 case Layer::Layer_ComparisonLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100305 case Layer::Layer_ConcatLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000307 case Layer::Layer_ConstantLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000309 case Layer::Layer_Convolution2dLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100311 case Layer::Layer_Convolution3dLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution3dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100313 case Layer::Layer_DepthToSpaceLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000315 case Layer::Layer_DepthwiseConvolution2dLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000317 case Layer::Layer_DequantizeLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000319 case Layer::Layer_DetectionPostProcessLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000321 case Layer::Layer_DivisionLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000323 case Layer::Layer_EqualLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000325 case Layer::Layer_ElementwiseUnaryLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000327 case Layer::Layer_FullyConnectedLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100329 case Layer::Layer_FillLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000331 case Layer::Layer_FloorLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000333 case Layer::Layer_GatherLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Teresa Charlin6966bfa2022-04-25 17:14:50 +0100335 case Layer::Layer_GatherNdLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherNdLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000337 case Layer::Layer_GreaterLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000339 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000340 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100341 case Layer::Layer_InstanceNormalizationLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000343 case Layer::Layer_L2NormalizationLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000345 case Layer::Layer_LogicalBinaryLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100347 case Layer::Layer_LogSoftmaxLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000349 case Layer::Layer_LstmLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000351 case Layer::Layer_MeanLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000353 case Layer::Layer_MinimumLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000355 case Layer::Layer_MaximumLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100357 case Layer::Layer_MergeLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000359 case Layer::Layer_MergerLayer:
360 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000361 case Layer::Layer_MultiplicationLayer:
362 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000363 case Layer::Layer_NormalizationLayer:
364 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000365 case Layer::Layer_OutputLayer:
366 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000367 case Layer::Layer_PadLayer:
368 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000369 case Layer::Layer_PermuteLayer:
370 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000371 case Layer::Layer_Pooling2dLayer:
372 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Tamas Nyirid998a1c2021-11-05 14:55:33 +0000373 case Layer::Layer_Pooling3dLayer:
374 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100375 case Layer::Layer_PreluLayer:
376 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100377 case Layer::Layer_QLstmLayer:
378 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000379 case Layer::Layer_QuantizeLayer:
380 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100381 case Layer::Layer_QuantizedLstmLayer:
382 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100383 case Layer::Layer_RankLayer:
384 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000385 case Layer::Layer_ReduceLayer:
386 return graphPtr->layers()->Get(layerIndex)->layer_as_ReduceLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000387 case Layer::Layer_ReshapeLayer:
388 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000389 case Layer::Layer_ResizeBilinearLayer:
390 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100391 case Layer::Layer_ResizeLayer:
392 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000393 case Layer::Layer_RsqrtLayer:
394 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Keith Davis3ae3f972021-05-21 16:33:48 +0100395 case Layer::Layer_ShapeLayer:
396 return graphPtr->layers()->Get(layerIndex)->layer_as_ShapeLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100397 case Layer::Layer_SliceLayer:
398 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000399 case Layer::Layer_SoftmaxLayer:
400 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000401 case Layer::Layer_SpaceToBatchNdLayer:
402 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100403 case Layer::Layer_SpaceToDepthLayer:
404 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000405 case Layer::Layer_SplitterLayer:
406 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100407 case Layer::Layer_StackLayer:
408 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100409 case Layer::Layer_StandInLayer:
410 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000411 case Layer::Layer_StridedSliceLayer:
412 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000413 case Layer::Layer_SubtractionLayer:
414 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100415 case Layer::Layer_SwitchLayer:
416 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100417 case Layer::Layer_TransposeConvolution2dLayer:
418 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000419 case Layer::Layer_TransposeLayer:
420 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Narumol Prangnawarata0162e12021-07-23 14:47:49 +0100421 case Layer::Layer_UnidirectionalSequenceLstmLayer:
422 return graphPtr->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000423 case Layer::Layer_NONE:
424 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100425 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000426 }
427}
428
Finn Williams85d36712021-01-26 22:30:06 +0000429std::string IDeserializer::DeserializerImpl::GetLayerName(const GraphPtr& graph, unsigned int index)
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000430{
431 auto layer = GetBaseLayer(graph, index);
432 assert(layer);
433 return layer->layerName()->str();
434}
435
Finn Williams85d36712021-01-26 22:30:06 +0000436int32_t IDeserializer::DeserializerImpl::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000437{
438 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
439
440 if (layerType == Layer::Layer_InputLayer)
441 {
442 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
443 }
444 else if ( layerType == Layer::Layer_OutputLayer )
445 {
446 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
447 }
448 return 0;
449}
450
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000451armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000452{
453 switch (dataLayout)
454 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000455 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000456 return armnn::DataLayout::NHWC;
Matthew Sloyanb63a3112021-09-08 13:05:51 +0100457 case armnnSerializer::DataLayout::DataLayout_NDHWC:
458 return armnn::DataLayout::NDHWC;
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +0100459 case armnnSerializer::DataLayout::DataLayout_NCDHW:
460 return armnn::DataLayout::NCDHW;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000461 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000462 default:
463 return armnn::DataLayout::NCHW;
464 }
465}
466
Mike Kellyaf484012019-02-20 16:53:11 +0000467armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
468{
469 switch (function)
470 {
471 case armnnSerializer::ActivationFunction_Sigmoid:
472 return armnn::ActivationFunction::Sigmoid;
473 case armnnSerializer::ActivationFunction_TanH:
474 return armnn::ActivationFunction::TanH;
475 case armnnSerializer::ActivationFunction_Linear:
476 return armnn::ActivationFunction::Linear;
477 case armnnSerializer::ActivationFunction_ReLu:
478 return armnn::ActivationFunction::ReLu;
479 case armnnSerializer::ActivationFunction_BoundedReLu:
480 return armnn::ActivationFunction::BoundedReLu;
481 case armnnSerializer::ActivationFunction_LeakyReLu:
482 return armnn::ActivationFunction::LeakyReLu;
483 case armnnSerializer::ActivationFunction_Abs:
484 return armnn::ActivationFunction::Abs;
485 case armnnSerializer::ActivationFunction_Sqrt:
486 return armnn::ActivationFunction::Sqrt;
487 case armnnSerializer::ActivationFunction_Square:
488 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000489 case armnnSerializer::ActivationFunction_Elu:
490 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000491 case armnnSerializer::ActivationFunction_HardSwish:
492 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000493 default:
494 return armnn::ActivationFunction::Sigmoid;
495 }
496}
497
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100498armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
499{
500 switch (function)
501 {
502 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
503 return armnn::ArgMinMaxFunction::Max;
504 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
505 default:
506 return armnn::ArgMinMaxFunction::Min;
507 }
508}
509
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100510armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
511{
512 switch (operation)
513 {
514 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
515 return armnn::ComparisonOperation::Equal;
516 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
517 return armnn::ComparisonOperation::Greater;
518 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
519 return armnn::ComparisonOperation::GreaterOrEqual;
520 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
521 return armnn::ComparisonOperation::Less;
522 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
523 return armnn::ComparisonOperation::LessOrEqual;
524 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
525 default:
526 return armnn::ComparisonOperation::NotEqual;
527 }
528}
529
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000530armnn::ReduceOperation ToReduceOperation(armnnSerializer::ReduceOperation operation)
531{
532 switch (operation)
533 {
534 case armnnSerializer::ReduceOperation::ReduceOperation_Sum:
535 return armnn::ReduceOperation::Sum;
536 case armnnSerializer::ReduceOperation::ReduceOperation_Max:
537 return armnn::ReduceOperation::Max;
538 case armnnSerializer::ReduceOperation::ReduceOperation_Mean:
539 return armnn::ReduceOperation::Mean;
540 case armnnSerializer::ReduceOperation::ReduceOperation_Min:
541 return armnn::ReduceOperation::Min;
Teresa Charlin4e3e8312021-08-05 12:34:37 +0100542 case armnnSerializer::ReduceOperation::ReduceOperation_Prod:
543 return armnn::ReduceOperation::Prod;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +0000544 default:
545 return armnn::ReduceOperation::Sum;
546 }
547}
548
James Conroyaba90cd2020-11-06 16:28:18 +0000549armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
550{
551 switch (operation)
552 {
553 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
554 return armnn::LogicalBinaryOperation::LogicalAnd;
555 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
556 return armnn::LogicalBinaryOperation::LogicalOr;
557 default:
558 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
559 }
560}
561
josh minor4a3c6102020-01-06 16:40:46 -0600562armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
563{
564 switch (operation)
565 {
566 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
567 return armnn::UnaryOperation::Abs;
568 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
569 return armnn::UnaryOperation::Rsqrt;
570 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
571 return armnn::UnaryOperation::Sqrt;
572 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
573 return armnn::UnaryOperation::Exp;
574 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
575 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000576 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
577 return armnn::UnaryOperation::LogicalNot;
Teresa Charlin50de4fa2021-05-31 18:47:33 +0100578 case armnnSerializer::UnaryOperation::UnaryOperation_Log:
579 return armnn::UnaryOperation::Log;
580 case armnnSerializer::UnaryOperation::UnaryOperation_Sin:
581 return armnn::UnaryOperation::Sin;
josh minor4a3c6102020-01-06 16:40:46 -0600582 default:
583 throw armnn::InvalidArgumentException("Unary operation unknown");
584 }
585}
586
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +0100587armnn::PaddingMode ToPaddingMode(armnnSerializer::PaddingMode paddingMode)
588{
589 switch (paddingMode)
590 {
591 case armnnSerializer::PaddingMode::PaddingMode_Reflect:
592 return armnn::PaddingMode::Reflect;
593 case armnnSerializer::PaddingMode::PaddingMode_Symmetric:
594 return armnn::PaddingMode::Symmetric;
595 default:
596 return armnn::PaddingMode::Constant;
597 }
598}
599
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100600armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
601{
602 switch (method)
603 {
604 case armnnSerializer::ResizeMethod_NearestNeighbor:
605 return armnn::ResizeMethod::NearestNeighbor;
606 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000607 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100608 default:
609 return armnn::ResizeMethod::NearestNeighbor;
610 }
611}
612
Finn Williams85d36712021-01-26 22:30:06 +0000613armnn::TensorInfo ToTensorInfo(TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000614{
615 armnn::DataType type;
616 CHECK_TENSOR_PTR(tensorPtr);
617
618 switch (tensorPtr->dataType())
619 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000620 case DataType_QAsymmS8:
621 type = armnn::DataType::QAsymmS8;
622 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000623 case DataType_QSymmS8:
624 type = armnn::DataType::QSymmS8;
625 break;
Kevin May43a799c2019-02-08 16:31:42 +0000626 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000627 case DataType_QAsymmU8:
628 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000629 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000630 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000631 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000632 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000633 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000634 case DataType_Signed32:
635 type = armnn::DataType::Signed32;
636 break;
Mike Kelly1f140f72021-04-06 12:25:55 +0100637 case DataType_Signed64:
638 type = armnn::DataType::Signed64;
639 break;
Kevin May43a799c2019-02-08 16:31:42 +0000640 case DataType_Float32:
641 type = armnn::DataType::Float32;
642 break;
643 case DataType_Float16:
644 type = armnn::DataType::Float16;
645 break;
646 case DataType_Boolean:
647 type = armnn::DataType::Boolean;
648 break;
649 default:
650 {
651 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100652 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
653 tensorPtr->dataType(),
654 EnumNameDataType(tensorPtr->dataType()),
655 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000656 }
657 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000658
Colm Donelan800b2812021-02-12 12:43:35 +0000659 float quantizationScale = tensorPtr->quantizationScale();
660 int32_t quantizationOffset = tensorPtr->quantizationOffset();
661
Finn Williams2605b232020-06-10 15:53:46 +0100662 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
663 {
Colm Donelan800b2812021-02-12 12:43:35 +0000664 return armnn::TensorInfo(TensorShape{armnn::Dimensionality::Scalar},
Finn Williams2605b232020-06-10 15:53:46 +0100665 type,
666 quantizationScale,
667 quantizationOffset);
668 }
Colm Donelan800b2812021-02-12 12:43:35 +0000669 else if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::NotSpecified))
670 {
671 armnn::TensorInfo result(TensorShape{Dimensionality::NotSpecified},
672 type,
673 quantizationScale,
674 quantizationOffset);
675 return result;
676 }
Kevin May43a799c2019-02-08 16:31:42 +0000677
678 auto dimensions = tensorPtr->dimensions();
679 unsigned int size = dimensions->size();
680 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
Colm Donelan800b2812021-02-12 12:43:35 +0000681 bool dimensionsSpecificity[armnn::MaxNumOfTensorDimensions];
682 std::fill_n(dimensionsSpecificity, armnn::MaxNumOfTensorDimensions, true);
683 // For backwards compatibility check if the dimensionSpecificity vector is present first.
684 // The default is to have dimensionSpecificity set to all true's anyway.
685 if (tensorPtr->dimensionSpecificity() != nullptr)
686 {
687 auto dimensionSpecificity = tensorPtr->dimensionSpecificity();
688 size = dimensionSpecificity->size();
689 for (unsigned int i = 0; i < size; ++i)
690 {
691 dimensionsSpecificity[i] = dimensionSpecificity->Get(i);
692 }
693 }
694 // Construct a TensorShape
695 TensorShape shape(size, outputDims.data(), dimensionsSpecificity);
Kevin May43a799c2019-02-08 16:31:42 +0000696
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000697 auto quantizationScales = tensorPtr->quantizationScales();
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000698 if (quantizationScales)
699 {
700 unsigned int quantizationScalesSize = quantizationScales->size();
701 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
702 unsigned int quantizationDim = tensorPtr->quantizationDim();
Colm Donelan800b2812021-02-12 12:43:35 +0000703 armnn::TensorInfo result(shape,
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000704 type,
705 scales,
706 quantizationDim);
707 return result;
708 }
709
Kevin May43a799c2019-02-08 16:31:42 +0000710 // two statements (on purpose) for easier debugging:
Colm Donelan800b2812021-02-12 12:43:35 +0000711 armnn::TensorInfo result(shape,
Kevin May43a799c2019-02-08 16:31:42 +0000712 type,
713 quantizationScale,
714 quantizationOffset);
Colm Donelan800b2812021-02-12 12:43:35 +0000715
Kevin May43a799c2019-02-08 16:31:42 +0000716 return result;
717}
718
Finn Williams85d36712021-01-26 22:30:06 +0000719armnn::ConstTensor ToConstTensor(ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000720{
721 CHECK_CONST_TENSOR_PTR(constTensorPtr);
722 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
Matthew Sloyan81beae32021-07-13 19:46:11 +0100723 tensorInfo.SetConstant();
Mike Kellya0766c32019-02-19 17:22:07 +0000724
725 switch (constTensorPtr->data_type())
726 {
727 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000728 {
729 auto byteData = constTensorPtr->data_as_ByteData()->data();
730 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
731 return armnn::ConstTensor(tensorInfo, byteData->data());
732 }
Mike Kellya0766c32019-02-19 17:22:07 +0000733 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000734 {
735 auto shortData = constTensorPtr->data_as_ShortData()->data();
736 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
737 return armnn::ConstTensor(tensorInfo, shortData->data());
738 }
Mike Kellya0766c32019-02-19 17:22:07 +0000739 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000740 {
741 auto intData = constTensorPtr->data_as_IntData()->data();
742 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
743 return armnn::ConstTensor(tensorInfo, intData->data());
744 }
Mike Kellya0766c32019-02-19 17:22:07 +0000745 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000746 {
747 auto longData = constTensorPtr->data_as_LongData()->data();
748 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
749 return armnn::ConstTensor(tensorInfo, longData->data());
750 }
Mike Kellya0766c32019-02-19 17:22:07 +0000751 default:
752 {
753 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100754 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
755 constTensorPtr->data_type(),
756 EnumNameConstTensorData(constTensorPtr->data_type()),
757 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000758 }
759 }
760}
761
Finn Williams85d36712021-01-26 22:30:06 +0000762TensorRawPtrVector IDeserializer::DeserializerImpl::GetInputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000763{
764 CHECK_LAYERS(graphPtr, 0, layerIndex);
765 auto layer = GetBaseLayer(graphPtr, layerIndex);
766 const auto& numInputs = layer->inputSlots()->size();
767
768 TensorRawPtrVector result(numInputs);
769
770 for (unsigned int i=0; i<numInputs; ++i)
771 {
772 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
773 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
774 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
775 }
776 return result;
777}
778
Finn Williams85d36712021-01-26 22:30:06 +0000779TensorRawPtrVector IDeserializer::DeserializerImpl::GetOutputs(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000780{
781 CHECK_LAYERS(graphPtr, 0, layerIndex);
782 auto layer = GetBaseLayer(graphPtr, layerIndex);
783 const auto& numOutputs = layer->outputSlots()->size();
784
785 TensorRawPtrVector result(numOutputs);
786
787 for (unsigned int i=0; i<numOutputs; ++i)
788 {
789 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
790 }
791 return result;
792}
793
Finn Williams85d36712021-01-26 22:30:06 +0000794void IDeserializer::DeserializerImpl::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000795{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000796 CHECK_LAYERS(graph, 0, layerIndex);
797 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100798 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
799 "layerName: {1} / {2}",
800 layerIndex,
801 layerName,
802 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000803}
804
Finn Williams85d36712021-01-26 22:30:06 +0000805void IDeserializer::DeserializerImpl::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000806{
807 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000808 m_InputBindings.clear();
809 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000810}
811
Kevin May43a799c2019-02-08 16:31:42 +0000812
Finn Williams85d36712021-01-26 22:30:06 +0000813INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000814{
815 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000816 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
817 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000818}
819
Finn Williams85d36712021-01-26 22:30:06 +0000820armnn::INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000821{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000822 ResetParser();
Simon Obutedb5804e2022-04-14 15:49:52 +0100823 if (binaryContent.fail()) {
824 ARMNN_LOG(error) << (std::string("Cannot read input"));
825 throw ParseException("Unable to read Input stream data");
826 }
827 binaryContent.seekg(0, std::ios::end);
828 const std::streamoff size = binaryContent.tellg();
829 std::vector<char> content(static_cast<size_t>(size));
830 binaryContent.seekg(0);
831 binaryContent.read(content.data(), static_cast<std::streamsize>(size));
832 GraphPtr graph = LoadGraphFromBinary(reinterpret_cast<uint8_t*>(content.data()), static_cast<size_t>(size));
Derek Lamberti8ddae332019-02-21 16:29:43 +0000833 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000834}
835
Finn Williams85d36712021-01-26 22:30:06 +0000836GraphPtr IDeserializer::DeserializerImpl::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000837{
838 if (binaryContent == nullptr)
839 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100840 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
841 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000842 }
843 flatbuffers::Verifier verifier(binaryContent, len);
844 if (verifier.VerifyBuffer<SerializedGraph>() == false)
845 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100846 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
847 "flatbuffers format. size:{0} {1}",
848 len,
849 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000850 }
851 return GetSerializedGraph(binaryContent);
852}
853
Finn Williams85d36712021-01-26 22:30:06 +0000854INetworkPtr IDeserializer::DeserializerImpl::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000855{
856 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100857 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000858 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000859 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000860 {
861 if (layer->layer_type() != Layer_InputLayer &&
862 layer->layer_type() != Layer_OutputLayer)
863 {
864 // lookup and call the parser function
865 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000866 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000867 }
868 ++layerIndex;
869 }
870
Derek Lamberti8ddae332019-02-21 16:29:43 +0000871 SetupInputLayers(graph);
872 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000873
874 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100875 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000876 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100877 Connections& connections = graphIt.second;
878 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000879 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100880 const unsigned int outputSlotIndex = outputIt.first;
881 IOutputSlot* outputSlot = outputIt.second;
882 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000883 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100884 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000885 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100886 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000887 }
Kevin May43a799c2019-02-08 16:31:42 +0000888 }
889 }
890 }
891
892 return std::move(m_Network);
893}
894
Finn Williams85d36712021-01-26 22:30:06 +0000895BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000896 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000897{
Jan Eilers8eb25602020-03-09 12:13:48 +0000898 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000899 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000900 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000901 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000902 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000903 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000904 }
905 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100906 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
907 name,
908 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000909}
910
Finn Williams85d36712021-01-26 22:30:06 +0000911BindingPointInfo IDeserializer::DeserializerImpl::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000912 const std::string& name) const
913{
Jan Eilers8eb25602020-03-09 12:13:48 +0000914 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000915 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000916 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000917 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000918 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000919 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000920 }
921 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100922 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
923 name,
924 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000925}
926
Finn Williams85d36712021-01-26 22:30:06 +0000927unsigned int IDeserializer::DeserializerImpl::GetInputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000928{
929 for (unsigned int i = 0; i < graph->layers()->size(); i++)
930 {
931 auto layer = graph->layers()->Get(i);
932 if (layer->layer_type() == Layer::Layer_InputLayer)
933 {
934 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
935 if (layerBindingId == targetId)
936 {
937 return i;
938 }
939 }
940 }
941 throw ParseException("Input layer with given layerBindingId not found");
942}
943
Finn Williams85d36712021-01-26 22:30:06 +0000944unsigned int IDeserializer::DeserializerImpl::GetOutputLayerInVector(GraphPtr graph, int targetId)
Tee Jungaa920c52019-11-05 10:48:25 +0000945{
946 for (unsigned int i = 0; i < graph->layers()->size(); i++)
947 {
948 auto layer = graph->layers()->Get(i);
949 if (layer->layer_type() == Layer::Layer_OutputLayer)
950 {
951 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
952 if (layerBindingId == targetId)
953 {
954 return i;
955 }
956 }
957 }
958 throw ParseException("Output layer with given layerBindingId not found");
959}
960
Finn Williams85d36712021-01-26 22:30:06 +0000961unsigned int IDeserializer::DeserializerImpl::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100962{
963 for (unsigned int i = 0; i < graph->layers()->size(); i++)
964 {
965 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
966 if (layer->index() == targetIndex)
967 {
968 return i;
969 }
970 }
971 throw ParseException("Layer with given index not found");
972}
973
Finn Williams85d36712021-01-26 22:30:06 +0000974IDeserializer::DeserializerImpl::FeatureVersions IDeserializer::DeserializerImpl::GetFeatureVersions(GraphPtr graph)
Tee Jungaa920c52019-11-05 10:48:25 +0000975{
Finn Williams85d36712021-01-26 22:30:06 +0000976 IDeserializer::DeserializerImpl::FeatureVersions versions;
Tee Jungaa920c52019-11-05 10:48:25 +0000977
978 if (graph->featureVersions())
979 {
980 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
Jan Eilers53ef7952021-06-02 12:01:25 +0100981 versions.m_WeightsLayoutScheme = graph->featureVersions()->weightsLayoutScheme();
Matthew Sloyan81beae32021-07-13 19:46:11 +0100982 versions.m_ConstTensorsAsInputs = graph->featureVersions()->constantTensorsAsInputs();
Tee Jungaa920c52019-11-05 10:48:25 +0000983 }
984
985 return versions;
986}
987
Finn Williams85d36712021-01-26 22:30:06 +0000988void IDeserializer::DeserializerImpl::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000989{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000990 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100991 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000992 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100993 m_InputBindings.reserve(numInputs);
994
995 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000996 {
Tee Jungaa920c52019-11-05 10:48:25 +0000997 unsigned int inputLayerIndex = 0xFFFFFFFF;
998 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
999 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001000 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001001 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
1002 }
1003 else
1004 {
1005 const int inputId = graph->inputIds()->Get(i);
1006 inputLayerIndex = GetInputLayerInVector(graph, inputId);
1007 }
1008
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001009 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001010
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001011 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1012 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001013 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +00001014
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001015 IConnectableLayer* inputLayer =
1016 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001017
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001018 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
1019 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
1020 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
1021
Derek Lamberti8ddae332019-02-21 16:29:43 +00001022 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001023 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001024 }
1025}
1026
Finn Williams85d36712021-01-26 22:30:06 +00001027void IDeserializer::DeserializerImpl::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +00001028{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001029 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001030 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001031 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001032 m_OutputBindings.reserve(numOutputs);
1033
1034 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +00001035 {
Tee Jungaa920c52019-11-05 10:48:25 +00001036 unsigned int outputLayerIndex = 0xFFFFFFFF;
1037 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
1038 {
Matthew Sloyan0663d662020-09-14 11:47:26 +01001039 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +00001040 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
1041 }
1042 else
1043 {
1044 const int outputId = graph->outputIds()->Get(i);
1045 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
1046 }
1047
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001048 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001049
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001050 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
1051 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001052 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +00001053
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001054 IConnectableLayer* outputLayer =
1055 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001056
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001057 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001058 unsigned int sourceLayerIndex =
1059 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
Colm Donelan30aa3712021-04-07 17:28:01 +01001060 unsigned int outputSlotIndex =
1061 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->outputSlotIndex());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001062 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Colm Donelan30aa3712021-04-07 17:28:01 +01001063 const armnn::TensorInfo& tensorInfo = ToTensorInfo(
1064 sourceBaseLayer->outputSlots()->Get(outputSlotIndex)->tensorInfo());
Derek Lamberti8ddae332019-02-21 16:29:43 +00001065 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001066 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +00001067 }
1068}
1069
Finn Williams85d36712021-01-26 22:30:06 +00001070void IDeserializer::DeserializerImpl::RegisterOutputSlots(GraphPtr graph,
Derek Lamberti8ddae332019-02-21 16:29:43 +00001071 uint32_t layerIndex,
1072 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +00001073{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001074 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001075 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001076 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
1077 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +00001078 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001079 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
1080 " for layer index: {2} {3}",
1081 baseLayer->outputSlots()->size(),
1082 layer->GetNumOutputSlots(),
1083 layerIndex,
1084 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001085 }
1086
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001087 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001088 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001089 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
1090 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
1091 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
1092 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +00001093 }
1094}
1095
Finn Williams85d36712021-01-26 22:30:06 +00001096void IDeserializer::DeserializerImpl::RegisterInputSlots(GraphPtr graph,
Matthew Sloyan81beae32021-07-13 19:46:11 +01001097 uint32_t layerIndex,
1098 armnn::IConnectableLayer* layer,
1099 std::vector<unsigned int> ignoreSlots)
Kevin May43a799c2019-02-08 16:31:42 +00001100{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001101 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001102 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001103 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
Matthew Sloyan81beae32021-07-13 19:46:11 +01001104
1105 if (baseLayer->inputSlots()->size() != (layer->GetNumInputSlots() - ignoreSlots.size()))
Kevin May43a799c2019-02-08 16:31:42 +00001106 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001107 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
1108 " for layer index:{2} {3}",
1109 baseLayer->inputSlots()->size(),
1110 layer->GetNumInputSlots(),
1111 layerIndex,
1112 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +00001113 }
1114
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001115 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +00001116 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01001117 // Check if slot should be ignored.
1118 if (std::find(ignoreSlots.begin(), ignoreSlots.end(), i) == ignoreSlots.end())
1119 {
1120 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
1121 auto fbConnection = fbInputSlot->connection();
1122 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
1123 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
1124 }
Kevin May43a799c2019-02-08 16:31:42 +00001125 }
1126}
1127
Finn Williams85d36712021-01-26 22:30:06 +00001128void IDeserializer::DeserializerImpl::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001129 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001130 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001131{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001132 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001133 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001134 m_GraphConnections[sourceLayerIndex] = Connections();
1135 }
1136
1137 Connections& connections = m_GraphConnections[sourceLayerIndex];
1138 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1139 {
1140 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001141 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001142 else
1143 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001144 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001145 }
1146}
Kevin May43a799c2019-02-08 16:31:42 +00001147
Finn Williams85d36712021-01-26 22:30:06 +00001148void IDeserializer::DeserializerImpl::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001149 uint32_t outputSlotIndex,
1150 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001151{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001152 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1153 {
1154 m_GraphConnections[sourceLayerIndex] = Connections();
1155 }
1156
1157 Connections& connections = m_GraphConnections[sourceLayerIndex];
1158 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1159 {
1160 throw ParseException("Same output slot index processed twice");
1161 }
1162
1163 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001164}
1165
Finn Williams85d36712021-01-26 22:30:06 +00001166void IDeserializer::DeserializerImpl::ParseAbs(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001167{
1168 CHECK_LAYERS(graph, 0, layerIndex);
1169 auto inputs = GetInputs(graph, layerIndex);
1170 CHECK_LOCATION();
1171 CHECK_VALID_SIZE(inputs.size(), 1);
1172
1173 auto outputs = GetOutputs(graph, layerIndex);
1174 CHECK_VALID_SIZE(outputs.size(), 1);
1175
1176 auto layerName = GetLayerName(graph, layerIndex);
1177
josh minor4a3c6102020-01-06 16:40:46 -06001178 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1179 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001180 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1181 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1182
1183 RegisterInputSlots(graph, layerIndex, layer);
1184 RegisterOutputSlots(graph, layerIndex, layer);
1185}
1186
Finn Williams85d36712021-01-26 22:30:06 +00001187void IDeserializer::DeserializerImpl::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001188{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001189 CHECK_LAYERS(graph, 0, layerIndex);
1190 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001191 CHECK_LOCATION();
1192 CHECK_VALID_SIZE(inputs.size(), 1);
1193
Derek Lamberti8ddae332019-02-21 16:29:43 +00001194 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001195 CHECK_VALID_SIZE(outputs.size(), 1);
1196
Derek Lamberti8ddae332019-02-21 16:29:43 +00001197 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001198 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001199 auto serializerDescriptor = serializerLayer->descriptor();
1200
1201 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001202 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001203 descriptor.m_A = serializerDescriptor->a();
1204 descriptor.m_B = serializerDescriptor->b();
1205
1206 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1207 layerName.c_str());
1208 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1209 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1210
Derek Lamberti8ddae332019-02-21 16:29:43 +00001211 RegisterInputSlots(graph, layerIndex, layer);
1212 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001213}
1214
Finn Williams85d36712021-01-26 22:30:06 +00001215void IDeserializer::DeserializerImpl::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001216{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001217 CHECK_LAYERS(graph, 0, layerIndex);
1218 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001219 CHECK_LOCATION();
1220 CHECK_VALID_SIZE(inputs.size(), 2);
1221
Derek Lamberti8ddae332019-02-21 16:29:43 +00001222 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001223 CHECK_VALID_SIZE(outputs.size(), 1);
1224
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001225 auto layerName = GetLayerName(graph, layerIndex);
1226 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001227
1228 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1229 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1230
Derek Lamberti8ddae332019-02-21 16:29:43 +00001231 RegisterInputSlots(graph, layerIndex, layer);
1232 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001233}
1234
Finn Williams85d36712021-01-26 22:30:06 +00001235void IDeserializer::DeserializerImpl::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001236{
1237 CHECK_LAYERS(graph, 0, layerIndex);
1238 auto inputs = GetInputs(graph, layerIndex);
1239 CHECK_LOCATION();
1240 CHECK_VALID_SIZE(inputs.size(), 1);
1241
1242 auto outputs = GetOutputs(graph, layerIndex);
1243 CHECK_VALID_SIZE(outputs.size(), 1);
1244
1245 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1246 auto serializerDescriptor = serializerLayer->descriptor();
1247
1248 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001249 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001250 descriptor.m_Axis = serializerDescriptor->axis();
1251 auto layerName = GetLayerName(graph, layerIndex);
1252 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1253
1254 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1255 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1256
1257 RegisterInputSlots(graph, layerIndex, layer);
1258 RegisterOutputSlots(graph, layerIndex, layer);
1259}
1260
Finn Williams85d36712021-01-26 22:30:06 +00001261void IDeserializer::DeserializerImpl::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001262{
1263 CHECK_LAYERS(graph, 0, layerIndex);
1264
Finn Williams85d36712021-01-26 22:30:06 +00001265 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001266 CHECK_VALID_SIZE(inputs.size(), 1);
1267
Finn Williams85d36712021-01-26 22:30:06 +00001268 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001269 CHECK_VALID_SIZE(outputs.size(), 1);
1270
1271 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1272 auto flatBufferCrops = flatBufferDescriptor->crops();
1273 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1274
1275 if (flatBufferCrops->Length() % 2 != 0)
1276 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001277 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001278 }
1279
1280 std::vector<std::pair<unsigned int, unsigned int>> crops;
1281 crops.reserve(flatBufferCrops->Length() / 2);
1282 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1283 {
1284 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1285 }
1286
1287 armnn::BatchToSpaceNdDescriptor descriptor;
1288 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1289 descriptor.m_BlockShape =
1290 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1291 descriptor.m_Crops = crops;
1292
1293 auto layerName = GetLayerName(graph, layerIndex);
1294 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1295
1296 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1297 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1298
1299 RegisterInputSlots(graph, layerIndex, layer);
1300 RegisterOutputSlots(graph, layerIndex, layer);
1301}
1302
Finn Williams85d36712021-01-26 22:30:06 +00001303void IDeserializer::DeserializerImpl::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
ruoyan018e7fa232019-02-28 15:09:07 +00001304{
1305 CHECK_LAYERS(graph, 0, layerIndex);
1306
1307 auto inputs = GetInputs(graph, layerIndex);
1308 CHECK_VALID_SIZE(inputs.size(), 1);
1309
1310 auto outputs = GetOutputs(graph, layerIndex);
1311 CHECK_VALID_SIZE(outputs.size(), 1);
1312 auto outputInfo = ToTensorInfo(outputs[0]);
1313
ruoyan015c7ab052019-03-04 14:48:02 +00001314 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001315
1316 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1317 auto serializerDescriptor = serializerLayer->descriptor();
1318
1319 armnn::BatchNormalizationDescriptor descriptor;
1320 descriptor.m_Eps = serializerDescriptor->eps();
1321 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1322
1323 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1324 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1325 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1326 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1327
1328 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1329 mean,
1330 variance,
1331 beta,
1332 gamma,
1333 layerName.c_str());
1334 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1335
1336 RegisterInputSlots(graph, layerIndex, layer);
1337 RegisterOutputSlots(graph, layerIndex, layer);
1338}
1339
mathad01b392e982021-04-07 12:07:30 +01001340void IDeserializer::DeserializerImpl::ParseCast(GraphPtr graph, unsigned int layerIndex)
1341{
1342 CHECK_LAYERS(graph, 0, layerIndex);
1343 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1344 CHECK_LOCATION();
1345 CHECK_VALID_SIZE(inputs.size(), 1);
1346
1347 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1348 CHECK_VALID_SIZE(outputs.size(), 1);
1349
1350 auto layerName = GetLayerName(graph, layerIndex);
1351
1352 IConnectableLayer* layer = m_Network->AddCastLayer(layerName.c_str());
1353
1354 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1355 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1356
1357 RegisterInputSlots(graph, layerIndex, layer);
1358 RegisterOutputSlots(graph, layerIndex, layer);
1359}
1360
Finn Williams85d36712021-01-26 22:30:06 +00001361void IDeserializer::DeserializerImpl::ParseConstant(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy76277882019-02-26 08:29:54 +00001362{
1363 CHECK_LAYERS(graph, 0, layerIndex);
1364 CHECK_LOCATION();
1365
1366 auto outputs = GetOutputs(graph, layerIndex);
1367 CHECK_VALID_SIZE(outputs.size(), 1);
1368
1369 auto layerName = GetLayerName(graph, layerIndex);
1370
1371 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1372 auto serializerInput = serializerLayer->input();
1373
1374 armnn::ConstTensor input = ToConstTensor(serializerInput);
Cathal Corbett06902652022-04-14 17:55:11 +01001375 IConnectableLayer* layer;
Conor Kennedy76277882019-02-26 08:29:54 +00001376
Cathal Corbett06902652022-04-14 17:55:11 +01001377 // Required for when Constant Layer is used as an inputs to DepthwiseConvolution2d Layer.
1378 // Running a model that was created before weights layout scheme version was added to our flatbuffers
1379 // file ensuring older models can still be read and executed. featureVersion weights layout scheme 1
1380 // indicates a change in the depthwise weights layout within ArmNN from [M,I,H,W] --> [1,H,W,I*M]
1381 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1382 {
1383 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1384 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1385 PermutationVector permutationVector = { 3, 2, 0, 1 };
1386 armnn::TensorInfo weightsInfo = input.GetInfo();
1387 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1388 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1389 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1390 input.GetMemoryArea(), permuteBuffer.get(),
1391 GetDataTypeSize(weightsInfo.GetDataType()));
Conor Kennedy76277882019-02-26 08:29:54 +00001392
Cathal Corbett06902652022-04-14 17:55:11 +01001393 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1394 auto weightsShape = weightsInfo.GetShape();
1395 weightsInfo.SetShape({1,
1396 weightsShape[0],
1397 weightsShape[1],
1398 weightsShape[2]*weightsShape[3]});
1399
1400 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1401
1402 layer = m_Network->AddConstantLayer(weightsPermuted, layerName.c_str());
1403
1404 layer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1405
1406 RegisterOutputSlots(graph, layerIndex, layer);
1407
1408 return;
1409 }
1410 else
1411 {
1412 layer = m_Network->AddConstantLayer(input, layerName.c_str());
1413
1414 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1415 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1416 }
Conor Kennedy76277882019-02-26 08:29:54 +00001417
1418 RegisterOutputSlots(graph, layerIndex, layer);
1419}
1420
Finn Williams85d36712021-01-26 22:30:06 +00001421void IDeserializer::DeserializerImpl::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001422{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001423 CHECK_LAYERS(graph, 0, layerIndex);
1424 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001425 CHECK_LOCATION();
Mike Kellya0766c32019-02-19 17:22:07 +00001426
Derek Lamberti8ddae332019-02-21 16:29:43 +00001427 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001428 CHECK_VALID_SIZE(outputs.size(), 1);
1429
Keith Davis2cddc722022-04-07 11:32:00 +01001430 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
1431
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001432 auto layerName = GetLayerName(graph, layerIndex);
Keith Davis2cddc722022-04-07 11:32:00 +01001433 auto flatbufferDescriptor = flatBufferLayer->descriptor();
Mike Kellya0766c32019-02-19 17:22:07 +00001434
1435 armnn::Convolution2dDescriptor descriptor;
Keith Davis2cddc722022-04-07 11:32:00 +01001436 descriptor.m_PadLeft = flatbufferDescriptor->padLeft();
1437 descriptor.m_PadRight = flatbufferDescriptor->padRight();
1438 descriptor.m_PadTop = flatbufferDescriptor->padTop();
1439 descriptor.m_PadBottom = flatbufferDescriptor->padBottom();
1440 descriptor.m_StrideX = flatbufferDescriptor->strideX();
1441 descriptor.m_StrideY = flatbufferDescriptor->strideY();;
1442 descriptor.m_DilationX = flatbufferDescriptor->dilationX();
1443 descriptor.m_DilationY = flatbufferDescriptor->dilationY();;
1444 descriptor.m_BiasEnabled = flatbufferDescriptor->biasEnabled();;
1445 descriptor.m_DataLayout = ToDataLayout(flatbufferDescriptor->dataLayout());
Mike Kellya0766c32019-02-19 17:22:07 +00001446
Keith Davis2cddc722022-04-07 11:32:00 +01001447 armnn::IConnectableLayer* layer;
1448 std::vector<unsigned int> ignoreSlots {};
Mike Kellya0766c32019-02-19 17:22:07 +00001449
Keith Davis2cddc722022-04-07 11:32:00 +01001450 armnn::ConstTensor biasTensor;
1451 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1452 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1453 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Mike Kellya0766c32019-02-19 17:22:07 +00001454 {
Keith Davis2cddc722022-04-07 11:32:00 +01001455 // If the model stores weights and biases as members of the layer we have to read them from there
1456 // but add them to their own ConstantLayer for compatibility
1457 CHECK_VALID_SIZE(inputs.size(), 1);
1458
1459 layer = m_Network->AddConvolution2dLayer(descriptor,
1460 layerName.c_str());
1461
1462 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1463 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
1464 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1465 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
1466 ignoreSlots.emplace_back(1u);
1467
1468 if (descriptor.m_BiasEnabled)
1469 {
1470 biasTensor = ToConstTensor(flatBufferLayer->biases());
1471 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
1472 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1473 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
1474 ignoreSlots.emplace_back(2u);
1475 }
Mike Kellya0766c32019-02-19 17:22:07 +00001476 }
Keith Davis2cddc722022-04-07 11:32:00 +01001477 else
1478 {
1479 layer = m_Network->AddConvolution2dLayer(descriptor,
1480 layerName.c_str());
1481 uint32_t numInputs = descriptor.GetNumInputs();
1482 CHECK_VALID_SIZE(inputs.size(), numInputs);
1483 }
1484
Mike Kellya0766c32019-02-19 17:22:07 +00001485 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1486 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1487
Keith Davis2cddc722022-04-07 11:32:00 +01001488 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001489 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001490}
1491
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001492void IDeserializer::DeserializerImpl::ParseConvolution3d(GraphPtr graph, unsigned int layerIndex)
1493{
1494 CHECK_LAYERS(graph, 0, layerIndex);
1495 auto inputs = GetInputs(graph, layerIndex);
1496 CHECK_LOCATION();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001497
1498 auto outputs = GetOutputs(graph, layerIndex);
1499 CHECK_VALID_SIZE(outputs.size(), 1);
1500
1501 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution3dLayer();
1502 auto layerName = GetLayerName(graph, layerIndex);
1503 auto serializerDescriptor = serializerLayer->descriptor();
1504
1505 armnn::Convolution3dDescriptor descriptor;
1506 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1507 descriptor.m_PadRight = serializerDescriptor->padRight();
1508 descriptor.m_PadTop = serializerDescriptor->padTop();
1509 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1510 descriptor.m_PadFront = serializerDescriptor->padFront();
1511 descriptor.m_PadBack = serializerDescriptor->padBack();
1512 descriptor.m_StrideX = serializerDescriptor->strideX();
1513 descriptor.m_StrideY = serializerDescriptor->strideY();
1514 descriptor.m_StrideZ = serializerDescriptor->strideZ();
1515 descriptor.m_DilationX = serializerDescriptor->dilationX();
1516 descriptor.m_DilationY = serializerDescriptor->dilationY();
1517 descriptor.m_DilationZ = serializerDescriptor->dilationZ();
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001518 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001519 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1520
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001521 uint32_t numInputs = descriptor.GetNumInputs();
1522 CHECK_VALID_SIZE(inputs.size(), numInputs);
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001523
Matthew Sloyan5d7b0a32021-10-18 13:07:49 +01001524 IConnectableLayer* layer = m_Network->AddConvolution3dLayer(descriptor, layerName.c_str());
1525
Matthew Sloyanb63a3112021-09-08 13:05:51 +01001526 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1527 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1528
1529 RegisterInputSlots(graph, layerIndex, layer);
1530 RegisterOutputSlots(graph, layerIndex, layer);
1531}
1532
Finn Williams85d36712021-01-26 22:30:06 +00001533void IDeserializer::DeserializerImpl::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001534{
1535 CHECK_LAYERS(graph, 0, layerIndex);
1536
1537 auto inputs = GetInputs(graph, layerIndex);
1538 CHECK_VALID_SIZE(inputs.size(), 1);
1539
1540 auto outputs = GetOutputs(graph, layerIndex);
1541 CHECK_VALID_SIZE(outputs.size(), 1);
1542
1543 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1544
1545 armnn::DepthToSpaceDescriptor descriptor;
1546 descriptor.m_BlockSize = fbDescriptor->blockSize();
1547 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1548
1549 auto layerName = GetLayerName(graph, layerIndex);
1550 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1551
1552 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1553 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1554
1555 RegisterInputSlots(graph, layerIndex, layer);
1556 RegisterOutputSlots(graph, layerIndex, layer);
1557}
1558
Finn Williams85d36712021-01-26 22:30:06 +00001559void IDeserializer::DeserializerImpl::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001560{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001561 CHECK_LAYERS(graph, 0, layerIndex);
1562 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001563 CHECK_LOCATION();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001564
Derek Lamberti8ddae332019-02-21 16:29:43 +00001565 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001566 CHECK_VALID_SIZE(outputs.size(), 1);
1567
Derek Lamberti8ddae332019-02-21 16:29:43 +00001568 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001569 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001570 auto serializerDescriptor = serializerLayer->descriptor();
1571
1572 armnn::DepthwiseConvolution2dDescriptor descriptor;
Cathal Corbett06902652022-04-14 17:55:11 +01001573 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1574 descriptor.m_PadRight = serializerDescriptor->padRight();
1575 descriptor.m_PadTop = serializerDescriptor->padTop();
1576 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1577 descriptor.m_StrideX = serializerDescriptor->strideX();
1578 descriptor.m_StrideY = serializerDescriptor->strideY();
1579 descriptor.m_DilationX = serializerDescriptor->dilationX();
1580 descriptor.m_DilationY = serializerDescriptor->dilationY();
1581 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();
1582 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001583
Jan Eilers53ef7952021-06-02 12:01:25 +01001584 IConnectableLayer* layer;
Cathal Corbett06902652022-04-14 17:55:11 +01001585 std::vector<unsigned int> ignoreSlots {};
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001586
Cathal Corbett06902652022-04-14 17:55:11 +01001587 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
1588 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
1589 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001590 {
Cathal Corbett06902652022-04-14 17:55:11 +01001591 CHECK_VALID_SIZE(inputs.size(), 1);
Jan Eilers53ef7952021-06-02 12:01:25 +01001592
Cathal Corbett06902652022-04-14 17:55:11 +01001593 // If the model stores weights and biases as members of the layer we have to read them from there
1594 // but add them to their own ConstantLayer for compatibility
1595 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1596 ignoreSlots.emplace_back(1u);
Jan Eilers53ef7952021-06-02 12:01:25 +01001597
1598 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001599 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001600
1601 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
1602 if (descriptor.m_BiasEnabled)
1603 {
1604 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
1605 ignoreSlots.emplace_back(2u);
1606
1607 auto biasLayer = m_Network->AddConstantLayer(biases);
1608 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
1609 biasLayer->GetOutputSlot(0).SetTensorInfo(biases.GetInfo());
1610 }
1611
1612 if (this->GetFeatureVersions(graph).m_WeightsLayoutScheme <= 0)
1613 {
1614 // Permute weights [ H, W, M, I ] --> [ 1, H, W, I*M ]
1615 // Step1: [ M, I, H, W ] --> [ H, W, I, M]
1616 PermutationVector permutationVector = { 3, 2, 0, 1 };
1617 armnn::TensorInfo weightsInfo = weights.GetInfo();
1618 std::unique_ptr<unsigned char[]> permuteBuffer(new unsigned char[weightsInfo.GetNumBytes()]);
1619 weightsInfo = armnnUtils::Permuted(weightsInfo, permutationVector);
1620 armnnUtils::Permute(weightsInfo.GetShape(), permutationVector,
1621 weights.GetMemoryArea(), permuteBuffer.get(),
1622 GetDataTypeSize(weightsInfo.GetDataType()));
1623
1624 // Step2: Reshape [ H, W, I, M] --> [ 1, H, W, I*M ]
1625 auto weightsShape = weightsInfo.GetShape();
1626 weightsInfo.SetShape({1,
1627 weightsShape[0],
1628 weightsShape[1],
1629 weightsShape[2]*weightsShape[3]});
1630
1631 armnn::ConstTensor weightsPermuted(weightsInfo, permuteBuffer.get());
1632
1633 auto weightsLayer = m_Network->AddConstantLayer(weightsPermuted);
1634 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1635 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsPermuted.GetInfo());
1636 }
1637 else
1638 {
1639 auto weightsLayer = m_Network->AddConstantLayer(weights);
1640 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
1641 weightsLayer->GetOutputSlot(0).SetTensorInfo(weights.GetInfo());
1642 }
Jan Eilers53ef7952021-06-02 12:01:25 +01001643 }
1644 else
1645 {
1646 layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
Jan Eilers53ef7952021-06-02 12:01:25 +01001647 layerName.c_str());
Cathal Corbett06902652022-04-14 17:55:11 +01001648 uint32_t numInputs = descriptor.GetNumInputs();
1649 CHECK_VALID_SIZE(inputs.size(), numInputs);
Jan Eilers53ef7952021-06-02 12:01:25 +01001650 }
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001651
1652 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1653 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1654
Cathal Corbett06902652022-04-14 17:55:11 +01001655 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00001656 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001657}
1658
Finn Williams85d36712021-01-26 22:30:06 +00001659void IDeserializer::DeserializerImpl::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001660{
1661 CHECK_LAYERS(graph, 0, layerIndex);
1662 auto inputs = GetInputs(graph, layerIndex);
1663 CHECK_LOCATION();
1664 CHECK_VALID_SIZE(inputs.size(), 2);
1665
1666 auto outputs = GetOutputs(graph, layerIndex);
1667 CHECK_VALID_SIZE(outputs.size(), 4);
1668
1669 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1670 auto layerName = GetLayerName(graph, layerIndex);
1671 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1672
1673 armnn::DetectionPostProcessDescriptor descriptor;
1674 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1675 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1676 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1677 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1678 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1679 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1680 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1681 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1682 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1683 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1684 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1685
1686 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1687
1688 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1689 anchors,
1690 layerName.c_str());
1691
1692 for (unsigned int i = 0; i < 4; i++)
1693 {
1694 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1695 }
1696
1697 RegisterInputSlots(graph, layerIndex, layer);
1698 RegisterOutputSlots(graph, layerIndex, layer);
1699}
1700
Finn Williams85d36712021-01-26 22:30:06 +00001701void IDeserializer::DeserializerImpl::ParseDivision(GraphPtr graph, unsigned int layerIndex)
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001702{
1703 CHECK_LAYERS(graph, 0, layerIndex);
1704 auto inputs = GetInputs(graph, layerIndex);
1705 CHECK_LOCATION();
1706 CHECK_VALID_SIZE(inputs.size(), 2);
1707
1708 auto outputs = GetOutputs(graph, layerIndex);
1709 CHECK_VALID_SIZE(outputs.size(), 1);
1710
1711 auto layerName = GetLayerName(graph, layerIndex);
1712 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1713
1714 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1715 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1716
1717 RegisterInputSlots(graph, layerIndex, layer);
1718 RegisterOutputSlots(graph, layerIndex, layer);
1719}
1720
Finn Williams85d36712021-01-26 22:30:06 +00001721void IDeserializer::DeserializerImpl::ParseEqual(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001722{
1723 CHECK_LAYERS(graph, 0, layerIndex);
1724 auto inputs = GetInputs(graph, layerIndex);
1725 CHECK_LOCATION();
1726 CHECK_VALID_SIZE(inputs.size(), 2);
1727
1728 auto outputs = GetOutputs(graph, layerIndex);
1729 CHECK_VALID_SIZE(outputs.size(), 1);
1730
1731 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001732 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1733 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001734
1735 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1736 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1737
1738 RegisterInputSlots(graph, layerIndex, layer);
1739 RegisterOutputSlots(graph, layerIndex, layer);
1740}
1741
Finn Williams85d36712021-01-26 22:30:06 +00001742void IDeserializer::DeserializerImpl::ParseFill(GraphPtr graph, unsigned int layerIndex)
Keith Davis300ad562020-06-04 16:34:23 +01001743{
1744 CHECK_LAYERS(graph, 0, layerIndex);
1745 auto inputs = GetInputs(graph, layerIndex);
1746 CHECK_LOCATION();
1747 CHECK_VALID_SIZE(inputs.size(), 1);
1748
1749 auto outputs = GetOutputs(graph, layerIndex);
1750 CHECK_VALID_SIZE(outputs.size(), 1);
1751
1752 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armaganfd0cae32021-11-08 17:18:31 +00001753 armnn::FillDescriptor descriptor;
1754 descriptor.m_Value = graph->layers()->Get(layerIndex)->layer_as_FillLayer()->descriptor()->value();
Keith Davis300ad562020-06-04 16:34:23 +01001755 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1756
1757 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1758 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1759
1760 RegisterInputSlots(graph, layerIndex, layer);
1761 RegisterOutputSlots(graph, layerIndex, layer);
1762}
1763
Finn Williams85d36712021-01-26 22:30:06 +00001764void IDeserializer::DeserializerImpl::ParseGreater(GraphPtr graph, unsigned int layerIndex)
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001765{
1766 CHECK_LAYERS(graph, 0, layerIndex);
1767 auto inputs = GetInputs(graph, layerIndex);
1768 CHECK_LOCATION();
1769 CHECK_VALID_SIZE(inputs.size(), 2);
1770
1771 auto outputs = GetOutputs(graph, layerIndex);
1772 CHECK_VALID_SIZE(outputs.size(), 1);
1773
1774 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001775 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1776 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001777
1778 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1779 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1780
1781 RegisterInputSlots(graph, layerIndex, layer);
1782 RegisterOutputSlots(graph, layerIndex, layer);
1783}
1784
Finn Williams85d36712021-01-26 22:30:06 +00001785void IDeserializer::DeserializerImpl::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001786{
1787 CHECK_LAYERS(graph, 0, layerIndex);
1788
1789 auto inputs = GetInputs(graph, layerIndex);
1790 CHECK_VALID_SIZE(inputs.size(), 1);
1791
1792 auto outputs = GetOutputs(graph, layerIndex);
1793 CHECK_VALID_SIZE(outputs.size(), 1);
1794
1795 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1796 auto fbDescriptor = fbLayer->descriptor();
1797
1798 armnn::InstanceNormalizationDescriptor descriptor;
1799 descriptor.m_Gamma = fbDescriptor->gamma();
1800 descriptor.m_Beta = fbDescriptor->beta();
1801 descriptor.m_Eps = fbDescriptor->eps();
1802 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1803
1804 const std::string layerName = GetLayerName(graph, layerIndex);
1805 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1806
1807 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1808 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1809
1810 RegisterInputSlots(graph, layerIndex, layer);
1811 RegisterOutputSlots(graph, layerIndex, layer);
1812}
1813
Finn Williams85d36712021-01-26 22:30:06 +00001814void IDeserializer::DeserializerImpl::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001815{
1816 CHECK_LAYERS(graph, 0, layerIndex);
1817
1818 auto inputs = GetInputs(graph, layerIndex);
1819 CHECK_VALID_SIZE(inputs.size(), 1);
1820
1821 auto outputs = GetOutputs(graph, layerIndex);
1822 CHECK_VALID_SIZE(outputs.size(), 1);
1823 auto outputInfo = ToTensorInfo(outputs[0]);
1824
1825 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1826 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1827
1828 auto layerName = GetLayerName(graph, layerIndex);
1829 armnn::L2NormalizationDescriptor descriptor;
1830 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001831 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001832
1833 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1834 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1835
1836 RegisterInputSlots(graph, layerIndex, layer);
1837 RegisterOutputSlots(graph, layerIndex, layer);
1838}
1839
Finn Williams85d36712021-01-26 22:30:06 +00001840void IDeserializer::DeserializerImpl::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
James Conroyaba90cd2020-11-06 16:28:18 +00001841{
1842 CHECK_LAYERS(graph, 0, layerIndex);
1843 CHECK_LOCATION();
1844
1845 auto inputs = GetInputs(graph, layerIndex);
1846 CHECK_VALID_SIZE(inputs.size(), 2);
1847
1848 auto outputs = GetOutputs(graph, layerIndex);
1849 CHECK_VALID_SIZE(outputs.size(), 1);
1850
1851 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1852 auto fbDescriptor = fbLayer->descriptor();
1853
1854 armnn::LogicalBinaryDescriptor descriptor;
1855 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1856
1857 const std::string& layerName = GetLayerName(graph, layerIndex);
1858 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1859
1860 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1861 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1862
1863 RegisterInputSlots(graph, layerIndex, layer);
1864 RegisterOutputSlots(graph, layerIndex, layer);
1865}
1866
Finn Williams85d36712021-01-26 22:30:06 +00001867void IDeserializer::DeserializerImpl::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan26257852019-10-14 13:00:47 +01001868{
1869 CHECK_LAYERS(graph, 0, layerIndex);
1870
Finn Williams85d36712021-01-26 22:30:06 +00001871 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001872 CHECK_VALID_SIZE(inputs.size(), 1);
1873
Finn Williams85d36712021-01-26 22:30:06 +00001874 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armagan26257852019-10-14 13:00:47 +01001875 CHECK_VALID_SIZE(outputs.size(), 1);
1876
1877 armnn::LogSoftmaxDescriptor descriptor;
1878 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1879 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1880 auto layerName = GetLayerName(graph, layerIndex);
1881
1882 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1883
1884 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1885 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1886
1887 RegisterInputSlots(graph, layerIndex, layer);
1888 RegisterOutputSlots(graph, layerIndex, layer);
1889}
1890
Finn Williams85d36712021-01-26 22:30:06 +00001891void IDeserializer::DeserializerImpl::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001892{
1893 CHECK_LAYERS(graph, 0, layerIndex);
1894 auto inputs = GetInputs(graph, layerIndex);
1895 CHECK_LOCATION();
1896 CHECK_VALID_SIZE(inputs.size(), 2);
1897
1898 auto outputs = GetOutputs(graph, layerIndex);
1899 CHECK_VALID_SIZE(outputs.size(), 1);
1900
1901 auto layerName = GetLayerName(graph, layerIndex);
1902 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1903
1904 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1905 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1906
1907 RegisterInputSlots(graph, layerIndex, layer);
1908 RegisterOutputSlots(graph, layerIndex, layer);
1909}
1910
Finn Williams85d36712021-01-26 22:30:06 +00001911void IDeserializer::DeserializerImpl::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001912{
1913 CHECK_LAYERS(graph, 0, layerIndex);
1914 auto inputs = GetInputs(graph, layerIndex);
1915 CHECK_LOCATION();
1916 CHECK_VALID_SIZE(inputs.size(), 2);
1917
1918 auto outputs = GetOutputs(graph, layerIndex);
1919 CHECK_VALID_SIZE(outputs.size(), 1);
1920
1921 auto layerName = GetLayerName(graph, layerIndex);
1922 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1923
1924 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1925 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1926
1927 RegisterInputSlots(graph, layerIndex, layer);
1928 RegisterOutputSlots(graph, layerIndex, layer);
1929}
1930
Jim Flynne242f2d2019-05-22 14:24:13 +01001931const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1932 unsigned int layerIndex)
1933{
1934 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1935
1936 switch (layerType)
1937 {
1938 case Layer::Layer_ConcatLayer:
1939 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1940 case Layer::Layer_MergerLayer:
1941 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1942 default:
1943 throw armnn::Exception("unknown layer type, should be concat or merger");
1944 }
1945}
Simon Obute51f67772021-09-03 15:50:13 +01001946void IDeserializer::DeserializerImpl::ParseChannelShuffle(GraphPtr graph, unsigned int layerIndex)
1947{
1948 CHECK_LAYERS(graph, 0, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001949
Simon Obute51f67772021-09-03 15:50:13 +01001950 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1951 CHECK_VALID_SIZE(inputs.size(), 1);
1952
1953 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1954 CHECK_VALID_SIZE(outputs.size(), 1);
1955
1956 armnn::ChannelShuffleDescriptor descriptor;
1957 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->axis();
1958 descriptor.m_NumGroups =
1959 graph->layers()->Get(layerIndex)->layer_as_ChannelShuffleLayer()->descriptor()->numGroups();
1960
1961 auto layerName = GetLayerName(graph, layerIndex);
1962 IConnectableLayer* layer = m_Network->AddChannelShuffleLayer(descriptor, layerName.c_str());
1963
1964 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1965 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1966
1967 RegisterInputSlots(graph, layerIndex, layer);
1968 RegisterOutputSlots(graph, layerIndex, layer);
1969}
Finn Williams85d36712021-01-26 22:30:06 +00001970void IDeserializer::DeserializerImpl::ParseComparison(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001971{
1972 CHECK_LAYERS(graph, 0, layerIndex);
1973 CHECK_LOCATION();
1974
1975 auto inputs = GetInputs(graph, layerIndex);
1976 CHECK_VALID_SIZE(inputs.size(), 2);
1977
1978 auto outputs = GetOutputs(graph, layerIndex);
1979 CHECK_VALID_SIZE(outputs.size(), 1);
1980
1981 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1982 auto fbDescriptor = fbLayer->descriptor();
1983
1984 armnn::ComparisonDescriptor descriptor;
1985 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1986
1987 const std::string& layerName = GetLayerName(graph, layerIndex);
1988 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1989
1990 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1991 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1992
1993 RegisterInputSlots(graph, layerIndex, layer);
1994 RegisterOutputSlots(graph, layerIndex, layer);
1995}
1996
Finn Williams85d36712021-01-26 22:30:06 +00001997void IDeserializer::DeserializerImpl::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
josh minor4a3c6102020-01-06 16:40:46 -06001998{
1999 CHECK_LAYERS(graph, 0, layerIndex);
2000 CHECK_LOCATION();
2001
2002 auto inputs = GetInputs(graph, layerIndex);
2003 CHECK_VALID_SIZE(inputs.size(), 1);
2004
2005 auto outputs = GetOutputs(graph, layerIndex);
2006 CHECK_VALID_SIZE(outputs.size(), 1);
2007
2008 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
2009 auto fbDescriptor = fbLayer->descriptor();
2010
2011 armnn::ElementwiseUnaryDescriptor descriptor;
2012 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
2013
2014 const std::string& layerName = GetLayerName(graph, layerIndex);
2015 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
2016
2017 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2018 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2019
2020 RegisterInputSlots(graph, layerIndex, layer);
2021 RegisterOutputSlots(graph, layerIndex, layer);
2022}
2023
Finn Williams85d36712021-01-26 22:30:06 +00002024void IDeserializer::DeserializerImpl::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00002025{
2026 CHECK_LAYERS(graph, 0, layerIndex);
2027 CHECK_LOCATION();
2028
2029 auto outputs = GetOutputs(graph, layerIndex);
2030 CHECK_VALID_SIZE(outputs.size(), 1);
2031
Jim Flynnac25a1b2019-02-28 10:40:49 +00002032 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01002033 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
2034 unsigned int numViews = originsDescriptor->numViews();
2035 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002036
2037 // can now check the number of inputs == number of views
2038 auto inputs = GetInputs(graph, layerIndex);
2039 CHECK_VALID_SIZE(inputs.size(), numViews);
2040
2041 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01002042 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00002043 for (unsigned int v = 0; v < numViews; ++v)
2044 {
2045 auto originPtr = originsPtr->Get(v);
2046 for (unsigned int d = 0; d < numDimensions; ++d)
2047 {
2048 uint32_t value = originPtr->data()->Get(d);
2049 descriptor.SetViewOriginCoord(v, d, value);
2050 }
2051 }
Jim Flynne242f2d2019-05-22 14:24:13 +01002052 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002053
Jim Flynn906f9462019-05-10 13:55:21 +01002054 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00002055 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2056 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2057
2058 RegisterInputSlots(graph, layerIndex, layer);
2059 RegisterOutputSlots(graph, layerIndex, layer);
2060}
2061
Finn Williams85d36712021-01-26 22:30:06 +00002062void IDeserializer::DeserializerImpl::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00002063{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002064 CHECK_LAYERS(graph, 0, layerIndex);
2065 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002066 CHECK_LOCATION();
2067 CHECK_VALID_SIZE(inputs.size(), 2);
2068
Derek Lamberti8ddae332019-02-21 16:29:43 +00002069 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00002070 CHECK_VALID_SIZE(outputs.size(), 1);
2071
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002072 auto layerName = GetLayerName(graph, layerIndex);
2073 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00002074
2075 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2076 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2077
Derek Lamberti8ddae332019-02-21 16:29:43 +00002078 RegisterInputSlots(graph, layerIndex, layer);
2079 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00002080}
2081
Finn Williams85d36712021-01-26 22:30:06 +00002082void IDeserializer::DeserializerImpl::ParseFloor(GraphPtr graph, unsigned int layerIndex)
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002083{
2084 CHECK_LAYERS(graph, 0, layerIndex);
2085 CHECK_LOCATION();
2086
2087 auto inputs = GetInputs(graph, layerIndex);
2088 CHECK_VALID_SIZE(inputs.size(), 1);
2089
2090 auto outputs = GetOutputs(graph, layerIndex);
2091 CHECK_VALID_SIZE(outputs.size(), 1);
2092
2093 auto layerName = GetLayerName(graph, layerIndex);
2094
2095 armnn::IConnectableLayer* layer;
2096
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00002097 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00002098
2099 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2100 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2101
2102 RegisterInputSlots(graph, layerIndex, layer);
2103 RegisterOutputSlots(graph, layerIndex, layer);
2104}
2105
Finn Williams85d36712021-01-26 22:30:06 +00002106void IDeserializer::DeserializerImpl::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002107{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002108 CHECK_LAYERS(graph, 0, layerIndex);
2109 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002110 CHECK_LOCATION();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002111
Derek Lamberti8ddae332019-02-21 16:29:43 +00002112 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002113 CHECK_VALID_SIZE(outputs.size(), 1);
2114
Derek Lamberti8ddae332019-02-21 16:29:43 +00002115 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002116 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002117 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2118
2119 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
2120 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
2121 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002122 fullyConnectedDescriptor.m_ConstantWeights = flatBufferDescriptor->constantWeights();
Matthew Sloyan81beae32021-07-13 19:46:11 +01002123
2124 armnn::IConnectableLayer* layer;
2125 std::vector<unsigned int> ignoreSlots {};
2126
2127 // Weights and biases used to be always constant and were stored as members of the layer. This has changed and
2128 // they are now passed as inputs. If they are constant then they will be stored in a ConstantLayer.
2129 if (this->GetFeatureVersions(graph).m_ConstTensorsAsInputs <= 0)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002130 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002131 // If the model stores weights and biases as members of the layer we have to read them from there
2132 // but add them to their own ConstantLayer for compatibility
2133 CHECK_VALID_SIZE(inputs.size(), 1);
2134 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2135 layerName.c_str());
2136
2137 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
2138 auto weightsLayer = m_Network->AddConstantLayer(weightsTensor);
2139 weightsLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(1u));
2140 weightsLayer->GetOutputSlot(0).SetTensorInfo(weightsTensor.GetInfo());
2141 ignoreSlots.emplace_back(1u);
2142
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002143 if (fullyConnectedDescriptor.m_BiasEnabled)
2144 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002145 armnn::ConstTensor biasTensor = ToConstTensor(flatBufferLayer->biases());
2146 auto biasLayer = m_Network->AddConstantLayer(biasTensor);
2147 biasLayer->GetOutputSlot(0).Connect(layer->GetInputSlot(2u));
2148 biasLayer->GetOutputSlot(0).SetTensorInfo(biasTensor.GetInfo());
2149 ignoreSlots.emplace_back(2u);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002150 }
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002151 }
Matthew Sloyan81beae32021-07-13 19:46:11 +01002152 else
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002153 {
Matthew Sloyan81beae32021-07-13 19:46:11 +01002154 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
2155 layerName.c_str());
2156 uint32_t numInputs = fullyConnectedDescriptor.GetNumInputs();
2157 CHECK_VALID_SIZE(inputs.size(), numInputs);
Sadik Armaganf0a6dec2021-03-25 07:46:55 +00002158 }
2159
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002160 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2161 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2162
Matthew Sloyan81beae32021-07-13 19:46:11 +01002163 RegisterInputSlots(graph, layerIndex, layer, ignoreSlots);
Derek Lamberti8ddae332019-02-21 16:29:43 +00002164 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00002165}
2166
Finn Williams85d36712021-01-26 22:30:06 +00002167void IDeserializer::DeserializerImpl::ParsePad(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002168{
2169 CHECK_LAYERS(graph, 0, layerIndex);
2170
Finn Williams85d36712021-01-26 22:30:06 +00002171 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002172 CHECK_VALID_SIZE(inputs.size(), 1);
2173
Finn Williams85d36712021-01-26 22:30:06 +00002174 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002175 CHECK_VALID_SIZE(outputs.size(), 1);
2176
2177 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
2178 auto flatBufferPadList = flatBufferDescriptor->padList();
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002179 auto paddingMode = flatBufferDescriptor->paddingMode();
David Monahan34757812019-06-19 11:47:21 +01002180 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002181
2182 if (flatBufferPadList->Length() % 2 != 0)
2183 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002184 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2185 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002186 }
2187
2188 std::vector<std::pair<unsigned int, unsigned int>> padList;
2189 padList.reserve(flatBufferPadList->Length() / 2);
2190 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2191 {
2192 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2193 }
2194
Matthew Sloyan2e5d0b22021-10-21 14:05:31 +01002195 armnn::PadDescriptor descriptor(padList, padValue, ToPaddingMode(paddingMode));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00002196
2197 auto layerName = GetLayerName(graph, layerIndex);
2198 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
2199
2200 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2201 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2202
2203 RegisterInputSlots(graph, layerIndex, layer);
2204 RegisterOutputSlots(graph, layerIndex, layer);
2205}
2206
Finn Williams85d36712021-01-26 22:30:06 +00002207void IDeserializer::DeserializerImpl::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002208{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002209 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002210
2211 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00002212 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002213
Derek Lamberti8ddae332019-02-21 16:29:43 +00002214 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002215 CHECK_VALID_SIZE(inputs.size(), 1);
2216
Derek Lamberti8ddae332019-02-21 16:29:43 +00002217 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002218 CHECK_VALID_SIZE(outputs.size(), 1);
2219 auto outputInfo = ToTensorInfo(outputs[0]);
2220
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002221 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002222 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2223
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002224 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002225 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2226
Derek Lamberti8ddae332019-02-21 16:29:43 +00002227 RegisterInputSlots(graph, layerIndex, layer);
2228 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002229}
2230
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002231armnn::Pooling2dDescriptor IDeserializer::DeserializerImpl::GetPooling2dDescriptor(Pooling2dDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00002232 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002233{
Jan Eilers8eb25602020-03-09 12:13:48 +00002234 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002235 armnn::Pooling2dDescriptor desc;
2236
2237 switch (pooling2dDesc->poolType())
2238 {
2239 case PoolingAlgorithm_Average:
2240 {
2241 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002242 break;
2243 }
2244 case PoolingAlgorithm_Max:
2245 {
2246 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002247 break;
2248 }
Teresa Charlin4c600de2021-03-11 21:59:43 +00002249 case PoolingAlgorithm_L2:
2250 {
2251 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2252 break;
2253 }
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002254 default:
2255 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002256 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002257 }
2258 }
2259
2260 switch (pooling2dDesc->outputShapeRounding())
2261 {
2262 case OutputShapeRounding_Floor:
2263 {
2264 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2265 break;
2266 }
2267 case OutputShapeRounding_Ceiling:
2268 {
2269 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2270 break;
2271 }
2272 default:
2273 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002274 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002275 }
2276 }
2277
2278 switch (pooling2dDesc->paddingMethod())
2279 {
2280 case PaddingMethod_Exclude:
2281 {
2282 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2283 break;
2284 }
2285 case PaddingMethod_IgnoreValue:
2286 {
2287 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2288 break;
2289 }
2290 default:
2291 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002292 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002293 }
2294 }
2295
2296 switch (pooling2dDesc->dataLayout())
2297 {
2298 case DataLayout_NCHW:
2299 {
2300 desc.m_DataLayout = armnn::DataLayout::NCHW;
2301 break;
2302 }
2303 case DataLayout_NHWC:
2304 {
2305 desc.m_DataLayout = armnn::DataLayout::NHWC;
2306 break;
2307 }
2308 default:
2309 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002310 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002311 }
2312 }
2313
2314 desc.m_PadRight = pooling2dDesc->padRight();
2315 desc.m_PadLeft = pooling2dDesc->padLeft();
2316 desc.m_PadBottom = pooling2dDesc->padBottom();
2317 desc.m_PadTop = pooling2dDesc->padTop();
2318 desc.m_StrideX = pooling2dDesc->strideX();
2319 desc.m_StrideY = pooling2dDesc->strideY();
2320 desc.m_PoolWidth = pooling2dDesc->poolWidth();
2321 desc.m_PoolHeight = pooling2dDesc->poolHeight();
2322
2323 return desc;
2324}
2325
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002326armnn::Pooling3dDescriptor IDeserializer::DeserializerImpl::GetPooling3dDescriptor(Pooling3dDescriptor pooling3dDesc,
2327 unsigned int layerIndex)
2328{
2329 IgnoreUnused(layerIndex);
2330 armnn::Pooling3dDescriptor desc;
Finn Williams85d36712021-01-26 22:30:06 +00002331
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002332 switch (pooling3dDesc->poolType())
2333 {
2334 case PoolingAlgorithm_Average:
2335 {
2336 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
2337 break;
2338 }
2339 case PoolingAlgorithm_Max:
2340 {
2341 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
2342 break;
2343 }
2344 case PoolingAlgorithm_L2:
2345 {
2346 desc.m_PoolType = armnn::PoolingAlgorithm::L2;
2347 break;
2348 }
2349 default:
2350 {
2351 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
2352 }
2353 }
2354
2355 switch (pooling3dDesc->outputShapeRounding())
2356 {
2357 case OutputShapeRounding_Floor:
2358 {
2359 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
2360 break;
2361 }
2362 case OutputShapeRounding_Ceiling:
2363 {
2364 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
2365 break;
2366 }
2367 default:
2368 {
2369 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
2370 }
2371 }
2372
2373 switch (pooling3dDesc->paddingMethod())
2374 {
2375 case PaddingMethod_Exclude:
2376 {
2377 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
2378 break;
2379 }
2380 case PaddingMethod_IgnoreValue:
2381 {
2382 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
2383 break;
2384 }
2385 default:
2386 {
2387 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
2388 }
2389 }
2390
2391 switch (pooling3dDesc->dataLayout())
2392 {
2393 case DataLayout_NCDHW:
2394 {
2395 desc.m_DataLayout = armnn::DataLayout::NCDHW;
2396 break;
2397 }
2398 case DataLayout_NDHWC:
2399 {
2400 desc.m_DataLayout = armnn::DataLayout::NDHWC;
2401 break;
2402 }
2403 default:
2404 {
2405 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
2406 }
2407 }
2408
2409 desc.m_PadRight = pooling3dDesc->padRight();
2410 desc.m_PadLeft = pooling3dDesc->padLeft();
2411 desc.m_PadBottom = pooling3dDesc->padBottom();
2412 desc.m_PadTop = pooling3dDesc->padTop();
2413 desc.m_PadFront = pooling3dDesc->padFront();
2414 desc.m_PadBack = pooling3dDesc->padBack();
2415 desc.m_StrideX = pooling3dDesc->strideX();
2416 desc.m_StrideY = pooling3dDesc->strideY();
2417 desc.m_StrideZ = pooling3dDesc->strideZ();
2418 desc.m_PoolWidth = pooling3dDesc->poolWidth();
2419 desc.m_PoolHeight = pooling3dDesc->poolHeight();
2420 desc.m_PoolDepth = pooling3dDesc->poolDepth();
2421
2422 return desc;
2423}
Finn Williams85d36712021-01-26 22:30:06 +00002424
2425void IDeserializer::DeserializerImpl::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002426{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002427 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002428
Derek Lamberti8ddae332019-02-21 16:29:43 +00002429 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00002430 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002431 CHECK_VALID_SIZE(inputs.size(), 1);
2432
Derek Lamberti8ddae332019-02-21 16:29:43 +00002433 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002434 CHECK_VALID_SIZE(outputs.size(), 1);
2435 auto outputInfo = ToTensorInfo(outputs[0]);
2436
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002437 auto pooling2dDescriptor = GetPooling2dDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002438 auto layerName = GetLayerName(graph, layerIndex);
2439 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002440 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2441
Derek Lamberti8ddae332019-02-21 16:29:43 +00002442 RegisterInputSlots(graph, layerIndex, layer);
2443 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00002444}
2445
Tamas Nyirid998a1c2021-11-05 14:55:33 +00002446void IDeserializer::DeserializerImpl::ParsePooling3d(GraphPtr graph, unsigned int layerIndex)
2447{
2448 CHECK_LAYERS(graph, 0, layerIndex);
2449
2450 auto pooling3dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling3dLayer()->descriptor();
2451 auto inputs = GetInputs(graph, layerIndex);
2452 CHECK_VALID_SIZE(inputs.size(), 1);
2453
2454 auto outputs = GetOutputs(graph, layerIndex);
2455 CHECK_VALID_SIZE(outputs.size(), 1);
2456 auto outputInfo = ToTensorInfo(outputs[0]);
2457
2458 auto pooling3dDescriptor = GetPooling3dDescriptor(pooling3dDes, layerIndex);
2459 auto layerName = GetLayerName(graph, layerIndex);
2460 IConnectableLayer* layer = m_Network->AddPooling3dLayer(pooling3dDescriptor, layerName.c_str());
2461 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2462
2463 RegisterInputSlots(graph, layerIndex, layer);
2464 RegisterOutputSlots(graph, layerIndex, layer);
2465}
2466
Finn Williams85d36712021-01-26 22:30:06 +00002467void IDeserializer::DeserializerImpl::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
Derek Lamberti87acb272019-03-27 16:51:31 +00002468{
2469 CHECK_LAYERS(graph, 0, layerIndex);
2470
2471 auto inputs = GetInputs(graph, layerIndex);
2472 CHECK_VALID_SIZE(inputs.size(), 1);
2473
2474 auto outputs = GetOutputs(graph, layerIndex);
2475 CHECK_VALID_SIZE(outputs.size(), 1);
2476 auto outputInfo = ToTensorInfo(outputs[0]);
2477
2478 auto layerName = GetLayerName(graph, layerIndex);
2479 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2480 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2481
2482 RegisterInputSlots(graph, layerIndex, layer);
2483 RegisterOutputSlots(graph, layerIndex, layer);
2484}
2485
Finn Williams85d36712021-01-26 22:30:06 +00002486armnn::TensorInfo IDeserializer::DeserializerImpl::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002487 const std::vector<uint32_t>& targetDimsIn)
2488{
2489 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2490 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2491
2492 if (stretchDim != targetDimsIn.end())
2493 {
2494 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2495 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002496 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2497 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002498 }
2499
2500 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002501 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002502 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2503
2504 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2505 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2506 }
2507
2508 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2509
2510 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2511 reshapeInfo.SetShape(outputShape);
2512
2513 return reshapeInfo;
2514}
2515
Finn Williams85d36712021-01-26 22:30:06 +00002516void IDeserializer::DeserializerImpl::ParseRank(GraphPtr graph, unsigned int layerIndex)
Finn Williams2605b232020-06-10 15:53:46 +01002517{
2518 CHECK_LAYERS(graph, 0, layerIndex);
2519
Finn Williams85d36712021-01-26 22:30:06 +00002520 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002521 CHECK_VALID_SIZE(inputs.size(), 1);
2522
Finn Williams85d36712021-01-26 22:30:06 +00002523 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Finn Williams2605b232020-06-10 15:53:46 +01002524 CHECK_VALID_SIZE(outputs.size(), 1);
2525
2526 auto layerName = GetLayerName(graph, layerIndex);
2527 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2528
2529 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2530 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2531
2532 RegisterInputSlots(graph, layerIndex, layer);
2533 RegisterOutputSlots(graph, layerIndex, layer);
2534}
2535
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002536void IDeserializer::DeserializerImpl::ParseReduce(GraphPtr graph, unsigned int layerIndex)
2537{
2538 CHECK_LAYERS(graph, 0, layerIndex);
2539 CHECK_LOCATION();
2540
2541 auto inputs = GetInputs(graph, layerIndex);
2542 CHECK_VALID_SIZE(inputs.size(), 1);
2543
2544 auto outputs = GetOutputs(graph, layerIndex);
2545 CHECK_VALID_SIZE(outputs.size(), 1);
2546
2547 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ReduceLayer();
2548 auto fbDescriptor = fbLayer->descriptor();
2549 auto flatBufferAxis = fbDescriptor->axis();
2550
2551 armnn::ReduceDescriptor descriptor;
Sadik Armagan0c3ea5b2021-02-03 09:29:30 +00002552 descriptor.m_KeepDims = fbDescriptor->keepDims();
2553 descriptor.m_vAxis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2554 descriptor.m_ReduceOperation = ToReduceOperation(fbDescriptor->reduceOperation());
2555
2556 const std::string& layerName = GetLayerName(graph, layerIndex);
2557 IConnectableLayer* layer = m_Network->AddReduceLayer(descriptor, layerName.c_str());
2558
2559 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2560 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2561
2562 RegisterInputSlots(graph, layerIndex, layer);
2563 RegisterOutputSlots(graph, layerIndex, layer);
2564}
2565
Finn Williams85d36712021-01-26 22:30:06 +00002566void IDeserializer::DeserializerImpl::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002567{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002568 CHECK_LAYERS(graph, 0, layerIndex);
2569 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002570
Derek Lamberti8ddae332019-02-21 16:29:43 +00002571 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002572 CHECK_VALID_SIZE(outputs.size(), 1);
2573
2574 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2575 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2576
Derek Lamberti8ddae332019-02-21 16:29:43 +00002577 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002578 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2579
Finn Williams85d36712021-01-26 22:30:06 +00002580 armnn::TensorInfo reshapeOutputTensorInfo = DeserializerImpl::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002581 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2582
2583 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2584 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2585
2586 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2587 {
2588 std::stringstream ss;
2589 ss << "New shape defined in reshape parameters "
2590 << reshapeOutputTensorShape
2591 << " does not equal output shape "
2592 << actualOutputTensorInfo.GetShape()
2593 << ": "
2594 << CHECK_LOCATION().AsString();
2595 throw ParseException(ss.str());
2596 }
2597
2598 armnn::ReshapeDescriptor reshapeDesc;
2599 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2600
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002601 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002602 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2603 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2604
Derek Lamberti8ddae332019-02-21 16:29:43 +00002605 RegisterInputSlots(graph, layerIndex, layer);
2606 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002607}
2608
Finn Williams85d36712021-01-26 22:30:06 +00002609void IDeserializer::DeserializerImpl::ParseResize(GraphPtr graph, unsigned int layerIndex)
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002610{
2611 CHECK_LAYERS(graph, 0, layerIndex);
2612
Finn Williams85d36712021-01-26 22:30:06 +00002613 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002614 CHECK_VALID_SIZE(inputs.size(), 1);
2615
Finn Williams85d36712021-01-26 22:30:06 +00002616 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002617 CHECK_VALID_SIZE(outputs.size(), 1);
2618
2619 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2620
2621 armnn::ResizeDescriptor descriptor;
2622 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2623 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2624 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2625 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002626 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2627 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002628
2629 auto layerName = GetLayerName(graph, layerIndex);
2630 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2631
2632 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2633 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2634
2635 RegisterInputSlots(graph, layerIndex, layer);
2636 RegisterOutputSlots(graph, layerIndex, layer);
2637}
2638
Jan Eilers1b2654f2021-09-24 15:45:46 +01002639
2640/// @Note The ResizeBiliniar operation was deprecated and removed in favor of the Resize operation.
2641/// This function is kept for backwards compatibility.
Finn Williams85d36712021-01-26 22:30:06 +00002642void IDeserializer::DeserializerImpl::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002643{
2644 CHECK_LAYERS(graph, 0, layerIndex);
2645
Finn Williams85d36712021-01-26 22:30:06 +00002646 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002647 CHECK_VALID_SIZE(inputs.size(), 1);
2648
Finn Williams85d36712021-01-26 22:30:06 +00002649 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002650 CHECK_VALID_SIZE(outputs.size(), 1);
2651
2652 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2653
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002654 armnn::ResizeDescriptor descriptor;
2655 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002656 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002657 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2658 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002659 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2660 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002661
2662 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002663 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002664
2665 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2666 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2667
2668 RegisterInputSlots(graph, layerIndex, layer);
2669 RegisterOutputSlots(graph, layerIndex, layer);
2670}
2671
Keith Davis3ae3f972021-05-21 16:33:48 +01002672void IDeserializer::DeserializerImpl::ParseShape(GraphPtr graph, unsigned int layerIndex)
2673{
2674 CHECK_LAYERS(graph, 0, layerIndex);
2675
2676 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2677 CHECK_VALID_SIZE(inputs.size(), 1);
2678
2679 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2680 CHECK_VALID_SIZE(outputs.size(), 1);
2681
2682 auto layerName = GetLayerName(graph, layerIndex);
2683 IConnectableLayer* layer = m_Network->AddShapeLayer( layerName.c_str());
2684
2685 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2686 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2687
2688 RegisterInputSlots(graph, layerIndex, layer);
2689 RegisterOutputSlots(graph, layerIndex, layer);
2690}
2691
Finn Williams85d36712021-01-26 22:30:06 +00002692void IDeserializer::DeserializerImpl::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002693{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002694 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002695
Finn Williams85d36712021-01-26 22:30:06 +00002696 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002697 CHECK_VALID_SIZE(inputs.size(), 1);
2698
Finn Williams85d36712021-01-26 22:30:06 +00002699 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002700 CHECK_VALID_SIZE(outputs.size(), 1);
2701
2702 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002703 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Sadik Armaganfd0cae32021-11-08 17:18:31 +00002704 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->axis();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002705 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002706
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002707 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2708
2709 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2710 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2711
Derek Lamberti8ddae332019-02-21 16:29:43 +00002712 RegisterInputSlots(graph, layerIndex, layer);
2713 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002714}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002715
Finn Williams85d36712021-01-26 22:30:06 +00002716void IDeserializer::DeserializerImpl::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002717{
2718 CHECK_LAYERS(graph, 0, layerIndex);
2719
Finn Williams85d36712021-01-26 22:30:06 +00002720 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002721 CHECK_VALID_SIZE(inputs.size(), 1);
2722
Finn Williams85d36712021-01-26 22:30:06 +00002723 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002724 CHECK_VALID_SIZE(outputs.size(), 1);
2725
2726 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2727 auto flatBufferPadList = flatBufferDescriptor->padList();
2728 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2729
2730 if (flatBufferPadList->Length() % 2 != 0)
2731 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002732 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2733 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002734 }
2735
2736 std::vector<std::pair<unsigned int, unsigned int>> padList;
2737 padList.reserve(flatBufferPadList->Length() / 2);
2738 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2739 {
2740 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2741 }
2742
2743 armnn::SpaceToBatchNdDescriptor descriptor;
2744 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2745 descriptor.m_BlockShape =
2746 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2747 descriptor.m_PadList = padList;
2748
2749 auto layerName = GetLayerName(graph, layerIndex);
2750 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2751
2752 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2753 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2754
2755 RegisterInputSlots(graph, layerIndex, layer);
2756 RegisterOutputSlots(graph, layerIndex, layer);
2757}
2758
Finn Williams85d36712021-01-26 22:30:06 +00002759void IDeserializer::DeserializerImpl::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002760{
2761 CHECK_LAYERS(graph, 0, layerIndex);
2762
Finn Williams85d36712021-01-26 22:30:06 +00002763 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002764 CHECK_VALID_SIZE(inputs.size(), 1);
2765
Finn Williams85d36712021-01-26 22:30:06 +00002766 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002767 CHECK_VALID_SIZE(outputs.size(), 1);
2768
2769 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2770
2771 armnn::SpaceToDepthDescriptor descriptor;
2772 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2773 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2774
2775 auto layerName = GetLayerName(graph, layerIndex);
2776 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2777
2778 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2779 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2780
2781 RegisterInputSlots(graph, layerIndex, layer);
2782 RegisterOutputSlots(graph, layerIndex, layer);
2783}
2784
Finn Williams85d36712021-01-26 22:30:06 +00002785armnn::NormalizationDescriptor IDeserializer::DeserializerImpl::GetNormalizationDescriptor(
2786 NormalizationDescriptorPtr normalizationDescriptor,
Nina Drozd57728782019-02-27 10:53:27 +00002787 unsigned int layerIndex)
2788{
Jan Eilers8eb25602020-03-09 12:13:48 +00002789 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002790 armnn::NormalizationDescriptor desc;
2791
2792 switch (normalizationDescriptor->normChannelType())
2793 {
2794 case NormalizationAlgorithmChannel_Across:
2795 {
2796 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2797 break;
2798 }
2799 case NormalizationAlgorithmChannel_Within:
2800 {
2801 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2802 break;
2803 }
2804 default:
2805 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002806 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002807 }
2808 }
2809
2810 switch (normalizationDescriptor->normMethodType())
2811 {
2812 case NormalizationAlgorithmMethod_LocalBrightness:
2813 {
2814 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2815 break;
2816 }
2817 case NormalizationAlgorithmMethod_LocalContrast:
2818 {
2819 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2820 break;
2821 }
2822 default:
2823 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002824 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002825 }
2826 }
2827
2828 switch (normalizationDescriptor->dataLayout())
2829 {
2830 case DataLayout_NCHW:
2831 {
2832 desc.m_DataLayout = armnn::DataLayout::NCHW;
2833 break;
2834 }
2835 case DataLayout_NHWC:
2836 {
2837 desc.m_DataLayout = armnn::DataLayout::NHWC;
2838 break;
2839 }
2840 default:
2841 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002842 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002843 }
2844 }
2845
2846 desc.m_Alpha = normalizationDescriptor->alpha();
2847 desc.m_Beta = normalizationDescriptor->beta();
2848 desc.m_K = normalizationDescriptor->k();
2849 desc.m_NormSize = normalizationDescriptor->normSize();
2850
2851 return desc;
2852}
2853
Finn Williams85d36712021-01-26 22:30:06 +00002854void IDeserializer::DeserializerImpl::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
Nina Drozd57728782019-02-27 10:53:27 +00002855{
2856 CHECK_LAYERS(graph, 0, layerIndex);
2857
2858 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2859
Finn Williams85d36712021-01-26 22:30:06 +00002860 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002861 CHECK_VALID_SIZE(inputs.size(), 1);
2862
Finn Williams85d36712021-01-26 22:30:06 +00002863 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002864 CHECK_VALID_SIZE(outputs.size(), 1);
2865
2866 auto outputInfo = ToTensorInfo(outputs[0]);
2867
2868 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2869 auto layerName = GetLayerName(graph, layerIndex);
2870
2871 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2872 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2873
2874 RegisterInputSlots(graph, layerIndex, layer);
2875 RegisterOutputSlots(graph, layerIndex, layer);
2876}
2877
Finn Williams85d36712021-01-26 22:30:06 +00002878void IDeserializer::DeserializerImpl::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan8b42a382019-03-01 14:24:49 +00002879{
2880 CHECK_LAYERS(graph, 0, layerIndex);
2881 auto inputs = GetInputs(graph, layerIndex);
2882 CHECK_LOCATION();
2883 CHECK_VALID_SIZE(inputs.size(), 1);
2884
2885 auto outputs = GetOutputs(graph, layerIndex);
2886 CHECK_VALID_SIZE(outputs.size(), 1);
2887
2888 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002889
josh minor4a3c6102020-01-06 16:40:46 -06002890 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2891 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002892 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2893 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2894
2895 RegisterInputSlots(graph, layerIndex, layer);
2896 RegisterOutputSlots(graph, layerIndex, layer);
2897}
2898
Finn Williams85d36712021-01-26 22:30:06 +00002899void IDeserializer::DeserializerImpl::ParseSlice(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002900{
2901 CHECK_LAYERS(graph, 0, layerIndex);
2902
2903 auto inputs = GetInputs(graph, layerIndex);
2904 CHECK_VALID_SIZE(inputs.size(), 1);
2905
2906 auto outputs = GetOutputs(graph, layerIndex);
2907 CHECK_VALID_SIZE(outputs.size(), 1);
2908
2909 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2910
2911 auto fbBegin = fbDescriptor->begin();
2912 auto fbSize = fbDescriptor->size();
2913
2914 if (fbBegin->Length() != fbSize->Length())
2915 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002916 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2917 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002918 }
2919
2920 armnn::SliceDescriptor descriptor;
2921 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2922 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2923
2924 auto layerName = GetLayerName(graph, layerIndex);
2925 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2926
2927 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2928 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2929
2930 RegisterInputSlots(graph, layerIndex, layer);
2931 RegisterOutputSlots(graph, layerIndex, layer);
2932}
2933
Finn Williams85d36712021-01-26 22:30:06 +00002934void IDeserializer::DeserializerImpl::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002935{
2936 CHECK_LAYERS(graph, 0, layerIndex);
2937
Finn Williams85d36712021-01-26 22:30:06 +00002938 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002939 CHECK_VALID_SIZE(inputs.size(), 1);
2940
Finn Williams85d36712021-01-26 22:30:06 +00002941 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002942 CHECK_VALID_SIZE(outputs.size(), 1);
2943
2944 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2945
2946 auto flatBufferBegin = flatBufferDescriptor->begin();
2947 auto flatBufferEnd = flatBufferDescriptor->end();
2948 auto flatBufferStride = flatBufferDescriptor->stride();
2949
2950 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2951 flatBufferBegin->Length() == flatBufferStride->Length()))
2952 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002953 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2954 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002955 }
2956
2957 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2958 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2959 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2960
2961 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2962 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2963 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2964 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2965 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2966 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2967 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2968
2969 auto layerName = GetLayerName(graph, layerIndex);
2970 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2971
2972 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2973 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2974
2975 RegisterInputSlots(graph, layerIndex, layer);
2976 RegisterOutputSlots(graph, layerIndex, layer);
2977}
2978
Finn Williams85d36712021-01-26 22:30:06 +00002979void IDeserializer::DeserializerImpl::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
Conor Kennedyda1f9752019-03-01 14:37:12 +00002980{
2981 CHECK_LAYERS(graph, 0, layerIndex);
2982 auto inputs = GetInputs(graph, layerIndex);
2983 CHECK_LOCATION();
2984 CHECK_VALID_SIZE(inputs.size(), 2);
2985
2986 auto outputs = GetOutputs(graph, layerIndex);
2987 CHECK_VALID_SIZE(outputs.size(), 1);
2988
2989 auto layerName = GetLayerName(graph, layerIndex);
2990 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2991
2992 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2993 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2994
2995 RegisterInputSlots(graph, layerIndex, layer);
2996 RegisterOutputSlots(graph, layerIndex, layer);
2997}
2998
Finn Williams85d36712021-01-26 22:30:06 +00002999void IDeserializer::DeserializerImpl::ParseGather(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003000{
3001 CHECK_LAYERS(graph, 0, layerIndex);
3002
Finn Williams85d36712021-01-26 22:30:06 +00003003 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003004 CHECK_VALID_SIZE(inputs.size(), 2);
3005
Finn Williams85d36712021-01-26 22:30:06 +00003006 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003007 CHECK_VALID_SIZE(outputs.size(), 1);
3008
Teresa Charlin52664732020-06-29 16:27:03 +01003009 armnn::GatherDescriptor descriptor;
3010 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
3011
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003012 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01003013 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003014
3015 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003016 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3017
3018 RegisterInputSlots(graph, layerIndex, layer);
3019 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00003020}
3021
Teresa Charlin6966bfa2022-04-25 17:14:50 +01003022void IDeserializer::DeserializerImpl::ParseGatherNd(GraphPtr graph, unsigned int layerIndex)
3023{
3024 CHECK_LAYERS(graph, 0, layerIndex);
3025
3026 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
3027 CHECK_VALID_SIZE(inputs.size(), 2);
3028
3029 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
3030 CHECK_VALID_SIZE(outputs.size(), 1);
3031
3032 auto layerName = GetLayerName(graph, layerIndex);
3033 IConnectableLayer* layer = m_Network->AddGatherNdLayer(layerName.c_str());
3034
3035 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3036 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3037
3038 RegisterInputSlots(graph, layerIndex, layer);
3039 RegisterOutputSlots(graph, layerIndex, layer);
3040}
3041
Finn Williams85d36712021-01-26 22:30:06 +00003042void IDeserializer::DeserializerImpl::ParseMean(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003043{
3044 CHECK_LAYERS(graph, 0, layerIndex);
3045
Finn Williams85d36712021-01-26 22:30:06 +00003046 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003047 CHECK_VALID_SIZE(inputs.size(), 1);
3048
Finn Williams85d36712021-01-26 22:30:06 +00003049 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Sadik Armaganac97c8c2019-03-04 17:44:21 +00003050 CHECK_VALID_SIZE(outputs.size(), 1);
3051
3052 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
3053 auto flatBufferAxis = flatBufferDescriptor->axis();
3054 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
3055
3056 armnn::MeanDescriptor descriptor;
3057 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
3058 descriptor.m_KeepDims = flatBufferKeepDims;
3059
3060 auto layerName = GetLayerName(graph, layerIndex);
3061 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
3062
3063 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3064 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3065
3066 RegisterInputSlots(graph, layerIndex, layer);
3067 RegisterOutputSlots(graph, layerIndex, layer);
3068}
3069
Finn Williams85d36712021-01-26 22:30:06 +00003070void IDeserializer::DeserializerImpl::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
Jim Flynn18ce3382019-03-08 11:08:30 +00003071{
3072 CHECK_LAYERS(graph, 0, layerIndex);
3073
Finn Williams85d36712021-01-26 22:30:06 +00003074 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003075 CHECK_VALID_SIZE(inputs.size(), 1);
3076
Finn Williams85d36712021-01-26 22:30:06 +00003077 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Jim Flynn18ce3382019-03-08 11:08:30 +00003078
3079 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
3080 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
3081 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
3082 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
3083 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
3084 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
3085
3086 // Check numViews and numDimensions corresponds to the ones already serialized ...
3087 // numViews == flatBufferViewSizes.size();
3088 // foreach: numDimensions == flatBufferViewSizes[x].size();
3089
3090 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
3091 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3092 {
3093 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
3094 {
3095 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
3096 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
3097 }
3098 }
3099
3100 auto layerName = GetLayerName(graph, layerIndex);
3101 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
3102
3103 // I could have as many outputs as views ...
3104 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
3105 {
3106 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
3107 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
3108 }
3109
3110 RegisterInputSlots(graph, layerIndex, layer);
3111 RegisterOutputSlots(graph, layerIndex, layer);
3112}
3113
Finn Williams85d36712021-01-26 22:30:06 +00003114armnn::LstmDescriptor IDeserializer::DeserializerImpl::GetLstmDescriptor(LstmDescriptorPtr lstmDescriptor)
Jim Flynn11af3752019-03-19 17:22:29 +00003115{
3116 armnn::LstmDescriptor desc;
3117
3118 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
3119 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
3120 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
3121 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
3122 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
3123 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01003124 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00003125
3126 return desc;
3127}
3128
Finn Williams85d36712021-01-26 22:30:06 +00003129void IDeserializer::DeserializerImpl::ParseLstm(GraphPtr graph, unsigned int layerIndex)
Jim Flynn11af3752019-03-19 17:22:29 +00003130{
3131 CHECK_LAYERS(graph, 0, layerIndex);
3132
3133 auto inputs = GetInputs(graph, layerIndex);
3134 CHECK_VALID_SIZE(inputs.size(), 3);
3135
3136 auto outputs = GetOutputs(graph, layerIndex);
3137 CHECK_VALID_SIZE(outputs.size(), 4);
3138
3139 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
3140 auto layerName = GetLayerName(graph, layerIndex);
3141 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3142 auto flatBufferInputParams = flatBufferLayer->inputParams();
3143
3144 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
3145
3146 armnn::LstmInputParams lstmInputParams;
3147
3148 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3149 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3150 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3151 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3152 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3153 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3154 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3155 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3156 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3157
3158 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3159 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3160 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3161 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3162 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3163 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3164 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3165 lstmInputParams.m_CellBias = &cellBias;
3166 lstmInputParams.m_OutputGateBias = &outputGateBias;
3167
3168 armnn::ConstTensor inputToInputWeights;
3169 armnn::ConstTensor recurrentToInputWeights;
3170 armnn::ConstTensor cellToInputWeights;
3171 armnn::ConstTensor inputGateBias;
3172 if (!lstmDescriptor.m_CifgEnabled)
3173 {
3174 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3175 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3176 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3177 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3178
3179 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3180 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3181 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3182 lstmInputParams.m_InputGateBias = &inputGateBias;
3183 }
3184
3185 armnn::ConstTensor projectionWeights;
3186 armnn::ConstTensor projectionBias;
3187 if (lstmDescriptor.m_ProjectionEnabled)
3188 {
3189 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3190 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3191
3192 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3193 lstmInputParams.m_ProjectionBias = &projectionBias;
3194 }
3195
3196 armnn::ConstTensor cellToForgetWeights;
3197 armnn::ConstTensor cellToOutputWeights;
3198 if (lstmDescriptor.m_PeepholeEnabled)
3199 {
3200 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3201 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3202
3203 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3204 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3205 }
3206
Jan Eilersf8c62972019-07-17 11:07:49 +01003207 armnn::ConstTensor inputLayerNormWeights;
3208 armnn::ConstTensor forgetLayerNormWeights;
3209 armnn::ConstTensor cellLayerNormWeights;
3210 armnn::ConstTensor outputLayerNormWeights;
3211 if (lstmDescriptor.m_LayerNormEnabled)
3212 {
3213 if (!lstmDescriptor.m_CifgEnabled)
3214 {
3215 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3216 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3217 }
3218 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3219 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3220 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3221
3222 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3223 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3224 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3225 }
3226
Jim Flynn11af3752019-03-19 17:22:29 +00003227 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
3228
3229 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3230 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3231
3232 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3233 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3234
3235 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
3236 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
3237
3238 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
3239 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
3240
3241 RegisterInputSlots(graph, layerIndex, layer);
3242 RegisterOutputSlots(graph, layerIndex, layer);
3243}
3244
Finn Williams85d36712021-01-26 22:30:06 +00003245armnn::QLstmDescriptor IDeserializer::DeserializerImpl::GetQLstmDescriptor(QLstmDescriptorPtr qLstmDescriptor)
James Conroy8d333182020-05-13 10:27:58 +01003246{
3247 armnn::QLstmDescriptor desc;
3248
3249 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
3250 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
3251 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
3252 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
3253
3254 desc.m_CellClip = qLstmDescriptor->cellClip();
3255 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
3256
3257 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
3258 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
3259 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
3260 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
3261
3262 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
3263 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
3264
3265 return desc;
3266}
3267
Finn Williams85d36712021-01-26 22:30:06 +00003268void IDeserializer::DeserializerImpl::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
James Conroy8d333182020-05-13 10:27:58 +01003269{
3270 CHECK_LAYERS(graph, 0, layerIndex);
3271
3272 auto inputs = GetInputs(graph, layerIndex);
3273 CHECK_VALID_SIZE(inputs.size(), 3);
3274
3275 auto outputs = GetOutputs(graph, layerIndex);
3276 CHECK_VALID_SIZE(outputs.size(), 3);
3277
3278 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
3279 auto layerName = GetLayerName(graph, layerIndex);
3280 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3281 auto flatBufferInputParams = flatBufferLayer->inputParams();
3282
3283 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
3284 armnn::LstmInputParams qLstmInputParams;
3285
3286 // Mandatory params
3287 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3288 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3289 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3290 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3291 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3292 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3293 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3294 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3295 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3296
3297 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3298 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3299 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3300 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3301 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3302 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3303 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
3304 qLstmInputParams.m_CellBias = &cellBias;
3305 qLstmInputParams.m_OutputGateBias = &outputGateBias;
3306
3307 // Optional CIFG params
3308 armnn::ConstTensor inputToInputWeights;
3309 armnn::ConstTensor recurrentToInputWeights;
3310 armnn::ConstTensor inputGateBias;
3311
3312 if (!qLstmDescriptor.m_CifgEnabled)
3313 {
3314 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3315 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3316 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3317
3318 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3319 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3320 qLstmInputParams.m_InputGateBias = &inputGateBias;
3321 }
3322
3323 // Optional projection params
3324 armnn::ConstTensor projectionWeights;
3325 armnn::ConstTensor projectionBias;
3326
3327 if (qLstmDescriptor.m_ProjectionEnabled)
3328 {
3329 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3330 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3331
3332 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
3333 qLstmInputParams.m_ProjectionBias = &projectionBias;
3334 }
3335
3336 // Optional peephole params
3337 armnn::ConstTensor cellToInputWeights;
3338 armnn::ConstTensor cellToForgetWeights;
3339 armnn::ConstTensor cellToOutputWeights;
3340
3341 if (qLstmDescriptor.m_PeepholeEnabled)
3342 {
3343 if (!qLstmDescriptor.m_CifgEnabled)
3344 {
3345 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3346 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3347 }
3348
3349 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3350 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3351
3352 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3353 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3354 }
3355
3356 // Optional layer norm params
3357 armnn::ConstTensor inputLayerNormWeights;
3358 armnn::ConstTensor forgetLayerNormWeights;
3359 armnn::ConstTensor cellLayerNormWeights;
3360 armnn::ConstTensor outputLayerNormWeights;
3361
3362 if (qLstmDescriptor.m_LayerNormEnabled)
3363 {
3364 if (!qLstmDescriptor.m_CifgEnabled)
3365 {
3366 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3367 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3368 }
3369
3370 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3371 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3372 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3373
3374 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3375 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3376 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3377 }
3378
3379 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
3380
3381 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
3382 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
3383
3384 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
3385 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
3386
3387 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
3388 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
3389
3390 RegisterInputSlots(graph, layerIndex, layer);
3391 RegisterOutputSlots(graph, layerIndex, layer);
3392}
3393
Finn Williams85d36712021-01-26 22:30:06 +00003394void IDeserializer::DeserializerImpl::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
Jan Eilers5b01a892019-07-23 09:47:43 +01003395{
3396 CHECK_LAYERS(graph, 0, layerIndex);
3397
3398 auto inputs = GetInputs(graph, layerIndex);
3399 CHECK_VALID_SIZE(inputs.size(), 3);
3400
3401 auto outputs = GetOutputs(graph, layerIndex);
3402 CHECK_VALID_SIZE(outputs.size(), 2);
3403
3404 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
3405 auto layerName = GetLayerName(graph, layerIndex);
3406 auto flatBufferInputParams = flatBufferLayer->inputParams();
3407
3408 armnn::QuantizedLstmInputParams lstmInputParams;
3409
3410 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3411 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3412 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3413 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3414 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3415 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3416 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3417 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3418 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3419 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3420 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3421 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3422
3423 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3424 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3425 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3426 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3427 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3428 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3429 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3430 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3431 lstmInputParams.m_InputGateBias = &inputGateBias;
3432 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3433 lstmInputParams.m_CellBias = &cellBias;
3434 lstmInputParams.m_OutputGateBias = &outputGateBias;
3435
3436 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
3437
3438 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
3439 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
3440
3441 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
3442 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
3443
3444 RegisterInputSlots(graph, layerIndex, layer);
3445 RegisterOutputSlots(graph, layerIndex, layer);
3446}
3447
Finn Williams85d36712021-01-26 22:30:06 +00003448void IDeserializer::DeserializerImpl::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003449{
3450 CHECK_LAYERS(graph, 0, layerIndex);
3451
Finn Williams85d36712021-01-26 22:30:06 +00003452 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003453 CHECK_VALID_SIZE(inputs.size(), 1);
3454
Finn Williams85d36712021-01-26 22:30:06 +00003455 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00003456 CHECK_VALID_SIZE(outputs.size(), 1);
3457
3458 const std::string layerName = GetLayerName(graph, layerIndex);
3459 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
3460
3461 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3462 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3463
3464 RegisterInputSlots(graph, layerIndex, layer);
3465 RegisterOutputSlots(graph, layerIndex, layer);
3466}
3467
Finn Williams85d36712021-01-26 22:30:06 +00003468void IDeserializer::DeserializerImpl::ParseMerge(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003469{
3470 CHECK_LAYERS(graph, 0, layerIndex);
3471
Finn Williams85d36712021-01-26 22:30:06 +00003472 TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003473 CHECK_VALID_SIZE(inputs.size(), 2);
3474
Finn Williams85d36712021-01-26 22:30:06 +00003475 TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01003476 CHECK_VALID_SIZE(outputs.size(), 1);
3477
3478 const std::string layerName = GetLayerName(graph, layerIndex);
3479 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
3480
3481 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3482 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3483
3484 RegisterInputSlots(graph, layerIndex, layer);
3485 RegisterOutputSlots(graph, layerIndex, layer);
3486}
3487
Finn Williams85d36712021-01-26 22:30:06 +00003488void IDeserializer::DeserializerImpl::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
Sadik Armaganeff363d2019-04-05 15:25:46 +01003489{
3490 CHECK_LAYERS(graph, 0, layerIndex);
3491 auto inputs = GetInputs(graph, layerIndex);
3492 CHECK_LOCATION();
3493 CHECK_VALID_SIZE(inputs.size(), 2);
3494
3495 auto outputs = GetOutputs(graph, layerIndex);
3496 CHECK_VALID_SIZE(outputs.size(), 2);
3497
3498 auto layerName = GetLayerName(graph, layerIndex);
3499 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
3500
3501 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
3502 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
3503
3504 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
3505 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
3506
3507 RegisterInputSlots(graph, layerIndex, layer);
3508 RegisterOutputSlots(graph, layerIndex, layer);
3509}
3510
Finn Williams85d36712021-01-26 22:30:06 +00003511void IDeserializer::DeserializerImpl::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01003512{
3513 CHECK_LAYERS(graph, 0, layerIndex);
3514 auto inputs = GetInputs(graph, layerIndex);
3515 CHECK_LOCATION();
3516 CHECK_VALID_SIZE(inputs.size(), 2);
3517
3518 auto outputs = GetOutputs(graph, layerIndex);
3519 CHECK_VALID_SIZE(outputs.size(), 1);
3520
3521 auto layerName = GetLayerName(graph, layerIndex);
3522 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
3523
3524 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3525 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3526
3527 RegisterInputSlots(graph, layerIndex, layer);
3528 RegisterOutputSlots(graph, layerIndex, layer);
3529}
3530
Finn Williams85d36712021-01-26 22:30:06 +00003531void IDeserializer::DeserializerImpl::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
Mike Kellyc9ea45a2020-02-28 18:11:58 +00003532{
3533 CHECK_LAYERS(graph, 0, layerIndex);
3534
3535 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
3536
3537 auto inputs = GetInputs(graph, layerIndex);
3538 CHECK_VALID_SIZE(inputs.size(), 1);
3539
3540 auto outputs = GetOutputs(graph, layerIndex);
3541 CHECK_VALID_SIZE(outputs.size(), 1);
3542 auto outputInfo = ToTensorInfo(outputs[0]);
3543
3544 auto layerName = GetLayerName(graph, layerIndex);
3545 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
3546
3547 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
3548 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
3549
3550 RegisterInputSlots(graph, layerIndex, layer);
3551 RegisterOutputSlots(graph, layerIndex, layer);
3552}
3553
Finn Williams85d36712021-01-26 22:30:06 +00003554void IDeserializer::DeserializerImpl::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003555{
3556 CHECK_LAYERS(graph, 0, layerIndex);
3557
3558 auto inputs = GetInputs(graph, layerIndex);
3559 CHECK_VALID_SIZE(inputs.size(), 1);
3560
3561 auto outputs = GetOutputs(graph, layerIndex);
3562 CHECK_VALID_SIZE(outputs.size(), 1);
3563
3564 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3565 auto layerName = GetLayerName(graph, layerIndex);
3566 auto serializerDescriptor = serializerLayer->descriptor();
3567
3568 armnn::TransposeConvolution2dDescriptor descriptor;
3569 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3570 descriptor.m_PadRight = serializerDescriptor->padRight();
3571 descriptor.m_PadTop = serializerDescriptor->padTop();
3572 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3573 descriptor.m_StrideX = serializerDescriptor->strideX();
3574 descriptor.m_StrideY = serializerDescriptor->strideY();;
3575 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3576 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3577
3578 // weights & biases
3579 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3580 armnn::Optional<armnn::ConstTensor> optionalBiases;
3581 if (descriptor.m_BiasEnabled)
3582 {
3583 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3584 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3585 }
3586
3587 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3588 weights,
3589 optionalBiases,
3590 layerName.c_str());
3591
3592 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3593 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3594
3595 RegisterInputSlots(graph, layerIndex, layer);
3596 RegisterOutputSlots(graph, layerIndex, layer);
3597}
3598
Finn Williams85d36712021-01-26 22:30:06 +00003599void IDeserializer::DeserializerImpl::ParseStack(GraphPtr graph, unsigned int layerIndex)
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003600{
3601 CHECK_LAYERS(graph, 0, layerIndex);
3602 auto inputs = GetInputs(graph, layerIndex);
3603
3604 auto outputs = GetOutputs(graph, layerIndex);
3605 CHECK_VALID_SIZE(outputs.size(), 1);
3606
3607 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3608 unsigned int axis = flatBufferDescriptor->axis();
3609 unsigned int numInputs = flatBufferDescriptor->numInputs();
3610 CHECK_VALID_SIZE(inputs.size(), numInputs);
3611
3612 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3613 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3614 flatBufferInputShape->begin() + flatBufferInputShape->size());
3615
3616 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3617 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3618
3619 for (unsigned int i=0; i<inputs.size(); ++i)
3620 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003621 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003622 if (descriptor.m_InputShape != inputShape)
3623 {
3624 std::stringstream ss;
3625 ss << "Shape of input "
3626 << i
3627 << " "
3628 << inputShape
3629 << " does not equal defined input shape "
3630 << descriptor.m_InputShape
3631 << ": "
3632 << CHECK_LOCATION().AsString();
3633 throw ParseException(ss.str());
3634 }
3635 }
3636
3637 auto layerName = GetLayerName(graph, layerIndex);
3638 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3639
3640 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3641 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3642
3643 RegisterInputSlots(graph, layerIndex, layer);
3644 RegisterOutputSlots(graph, layerIndex, layer);
3645}
3646
Finn Williams85d36712021-01-26 22:30:06 +00003647void IDeserializer::DeserializerImpl::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003648{
3649 CHECK_LAYERS(graph, 0, layerIndex);
3650
3651 auto inputs = GetInputs(graph, layerIndex);
3652 auto outputs = GetOutputs(graph, layerIndex);
3653
3654 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3655 auto fbDescriptor = fbLayer->descriptor();
3656
3657 armnn::StandInDescriptor descriptor;
3658 descriptor.m_NumInputs = fbDescriptor->numInputs();
3659 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3660
3661 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3662 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3663
3664 const std::string layerName = GetLayerName(graph, layerIndex);
3665 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3666
3667 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3668 {
3669 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3670 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3671 }
3672
3673 RegisterInputSlots(graph, layerIndex, layer);
3674 RegisterOutputSlots(graph, layerIndex, layer);
3675}
3676
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003677armnn::UnidirectionalSequenceLstmDescriptor IDeserializer::DeserializerImpl::GetUnidirectionalSequenceLstmDescriptor(
3678 UnidirectionalSequenceLstmDescriptorPtr descriptor)
3679{
3680 armnn::UnidirectionalSequenceLstmDescriptor desc;
3681
3682 desc.m_ActivationFunc = descriptor->activationFunc();
3683 desc.m_ClippingThresCell = descriptor->clippingThresCell();
3684 desc.m_ClippingThresProj = descriptor->clippingThresProj();
3685 desc.m_CifgEnabled = descriptor->cifgEnabled();
3686 desc.m_PeepholeEnabled = descriptor->peepholeEnabled();
3687 desc.m_ProjectionEnabled = descriptor->projectionEnabled();
3688 desc.m_LayerNormEnabled = descriptor->layerNormEnabled();
3689 desc.m_TimeMajor = descriptor->timeMajor();
3690
3691 return desc;
3692}
3693
3694void IDeserializer::DeserializerImpl::ParseUnidirectionalSequenceLstm(GraphPtr graph, unsigned int layerIndex)
3695{
3696 CHECK_LAYERS(graph, 0, layerIndex);
3697
3698 auto inputs = GetInputs(graph, layerIndex);
3699 CHECK_VALID_SIZE(inputs.size(), 3);
3700
3701 auto outputs = GetOutputs(graph, layerIndex);
Mike Kelly12994962022-04-21 11:57:09 +01003702 CHECK_VALID_SIZE(outputs.size(), 3);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003703
3704 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_UnidirectionalSequenceLstmLayer();
3705 auto layerName = GetLayerName(graph, layerIndex);
3706 auto flatBufferDescriptor = flatBufferLayer->descriptor();
3707 auto flatBufferInputParams = flatBufferLayer->inputParams();
3708
3709 auto descriptor = GetUnidirectionalSequenceLstmDescriptor(flatBufferDescriptor);
3710
3711 armnn::LstmInputParams lstmInputParams;
3712
3713 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
3714 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
3715 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
3716 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
3717 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
3718 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
3719 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
3720 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
3721 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
3722
3723 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
3724 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
3725 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
3726 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
3727 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
3728 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
3729 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
3730 lstmInputParams.m_CellBias = &cellBias;
3731 lstmInputParams.m_OutputGateBias = &outputGateBias;
3732
3733 armnn::ConstTensor inputToInputWeights;
3734 armnn::ConstTensor recurrentToInputWeights;
3735 armnn::ConstTensor cellToInputWeights;
3736 armnn::ConstTensor inputGateBias;
3737 if (!descriptor.m_CifgEnabled)
3738 {
3739 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
3740 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
3741 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
3742
3743 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
3744 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
3745 lstmInputParams.m_InputGateBias = &inputGateBias;
3746
3747 if (descriptor.m_PeepholeEnabled)
3748 {
3749 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
3750 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
3751 }
3752 }
3753
3754 armnn::ConstTensor projectionWeights;
3755 armnn::ConstTensor projectionBias;
3756 if (descriptor.m_ProjectionEnabled)
3757 {
3758 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
3759 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
3760
3761 lstmInputParams.m_ProjectionWeights = &projectionWeights;
3762 lstmInputParams.m_ProjectionBias = &projectionBias;
3763 }
3764
3765 armnn::ConstTensor cellToForgetWeights;
3766 armnn::ConstTensor cellToOutputWeights;
3767 if (descriptor.m_PeepholeEnabled)
3768 {
3769 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
3770 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
3771
3772 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
3773 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
3774 }
3775
3776 armnn::ConstTensor inputLayerNormWeights;
3777 armnn::ConstTensor forgetLayerNormWeights;
3778 armnn::ConstTensor cellLayerNormWeights;
3779 armnn::ConstTensor outputLayerNormWeights;
3780 if (descriptor.m_LayerNormEnabled)
3781 {
3782 if (!descriptor.m_CifgEnabled)
3783 {
3784 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
3785 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
3786 }
3787 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
3788 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
3789 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
3790
3791 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
3792 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
3793 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
3794 }
3795
3796 IConnectableLayer* layer = m_Network->AddUnidirectionalSequenceLstmLayer(descriptor,
3797 lstmInputParams,
3798 layerName.c_str());
3799
Mike Kelly12994962022-04-21 11:57:09 +01003800 armnn::TensorInfo outputTensorInfo0 = ToTensorInfo(outputs[0]);
3801 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo0);
3802
3803 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[1]);
3804 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo1);
3805
3806 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[2]);
3807 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo2);
Narumol Prangnawarata0162e12021-07-23 14:47:49 +01003808
3809 RegisterInputSlots(graph, layerIndex, layer);
3810 RegisterOutputSlots(graph, layerIndex, layer);
3811}
3812
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003813} // namespace armnnDeserializer