blob: 1bb004d449944c74a2ccd13392fa0734325ee883 [file] [log] [blame]
Kevin May43a799c2019-02-08 16:31:42 +00001//
Finn Williams2605b232020-06-10 15:53:46 +01002// Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
Kevin May43a799c2019-02-08 16:31:42 +00003// SPDX-License-Identifier: MIT
4//
5
Derek Lamberti0028d1b2019-02-20 13:57:42 +00006#include "Deserializer.hpp"
Kevin May43a799c2019-02-08 16:31:42 +00007
Matthew Benthamff130e22020-01-17 11:47:42 +00008#include <armnn/Descriptors.hpp>
Kevin May43a799c2019-02-08 16:31:42 +00009#include <armnn/Exceptions.hpp>
Matthew Benthamff130e22020-01-17 11:47:42 +000010#include <armnn/TypesUtils.hpp>
11#include <armnn/LstmParams.hpp>
12#include <armnn/QuantizedLstmParams.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000013
Matteo Martincighe011d202019-11-28 11:35:47 +000014#include <armnnUtils/Permute.hpp>
Mike Kellyc9ea45a2020-02-28 18:11:58 +000015#include <armnnUtils/Transpose.hpp>
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +010016#include <armnn/utility/Assert.hpp>
Jan Eilers8eb25602020-03-09 12:13:48 +000017#include <armnn/utility/IgnoreUnused.hpp>
Matthew Sloyan0663d662020-09-14 11:47:26 +010018#include <armnn/utility/NumericCast.hpp>
Matteo Martincighe011d202019-11-28 11:35:47 +000019
Kevin May43a799c2019-02-08 16:31:42 +000020#include <ParserHelper.hpp>
Kevin May43a799c2019-02-08 16:31:42 +000021#include <VerificationHelpers.hpp>
22
Colm Donelan5b5c2222020-09-09 12:48:16 +010023#include <fmt/format.h>
Kevin May43a799c2019-02-08 16:31:42 +000024
Kevin May43a799c2019-02-08 16:31:42 +000025#include <fstream>
Saoirse Stewart263829c2019-02-19 15:54:14 +000026#include <algorithm>
27#include <limits>
28#include <numeric>
Kevin May43a799c2019-02-08 16:31:42 +000029
30using armnn::ParseException;
31using namespace armnn;
Derek Lamberti0028d1b2019-02-20 13:57:42 +000032using namespace armnnSerializer;
Kevin May43a799c2019-02-08 16:31:42 +000033
Derek Lamberti0028d1b2019-02-20 13:57:42 +000034namespace armnnDeserializer
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000035{
Kevin May43a799c2019-02-08 16:31:42 +000036
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +000037namespace
38{
Kevin May43a799c2019-02-08 16:31:42 +000039
40const uint32_t VIRTUAL_LAYER_ID = std::numeric_limits<uint32_t>::max();
41
Derek Lamberti0028d1b2019-02-20 13:57:42 +000042 void CheckGraph(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000043 unsigned int layersIndex,
44 const CheckLocation& location)
45{
46 if (graph->layers() == nullptr)
47 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010048 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
49 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
50 "layers:{1} at {2}",
51 location.m_Function,
52 layersIndex,
53 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000054 }
55 else if (layersIndex >= graph->layers()->size())
56 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010057 throw ParseException(fmt::format("{0} was called with an invalid layers index. layers:{1} at {2}",
58 location.m_Function,
59 layersIndex,
60 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000061 }
62}
63
Derek Lamberti0028d1b2019-02-20 13:57:42 +000064void CheckLayers(const Deserializer::GraphPtr& graph,
Kevin May43a799c2019-02-08 16:31:42 +000065 unsigned int layersIndex,
66 unsigned int layerIndex,
67 const CheckLocation& location)
68{
69 if (graph->layers() == nullptr)
70 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010071 throw ParseException(fmt::format("{0} was called with invalid (null) graph. "
72 "Possible reason is that the graph is not yet loaded and Unpack(ed). "
73 "layers:{1} at {2}",
74 location.m_Function,
75 layersIndex,
76 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000077 }
78 else if (layersIndex >= graph->layers()->size())
79 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010080 throw ParseException(fmt::format("{0} was called with an invalid layers index. "
81 "layers:{1} at {2}",
82 location.m_Function,
83 layersIndex,
84 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000085 }
86 else if (layerIndex >= graph->layers()[layersIndex].size()
87 && layerIndex != VIRTUAL_LAYER_ID)
88 {
Colm Donelan5b5c2222020-09-09 12:48:16 +010089 throw ParseException(fmt::format("{0} was called with an invalid layer index. "
90 "layers:{1} layer:{2} at {3}",
91 location.m_Function,
92 layersIndex,
93 layerIndex,
94 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +000095 }
96}
97
Derek Lamberti0028d1b2019-02-20 13:57:42 +000098void CheckTensorPtr(Deserializer::TensorRawPtr rawPtr,
Kevin May43a799c2019-02-08 16:31:42 +000099 const CheckLocation& location)
100{
101 if (rawPtr == nullptr)
102 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100103 throw ParseException(fmt::format("{0} was called with a null tensor pointer. at {1}",
104 location.m_Function,
105 location.FileLine()));
Kevin May43a799c2019-02-08 16:31:42 +0000106 }
107}
108
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000109void CheckConstTensorPtr(Deserializer::ConstTensorRawPtr rawPtr,
Mike Kellya0766c32019-02-19 17:22:07 +0000110 const CheckLocation& location)
111{
112 if (rawPtr == nullptr)
113 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100114 throw ParseException(fmt::format("{0} was called with a null const tensor pointer. at {1}",
115 location.m_Function,
116 location.FileLine()));
Mike Kellya0766c32019-02-19 17:22:07 +0000117 }
118}
119
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000120void CheckConstTensorSize(const unsigned int constTensorSize,
121 const unsigned int tensorSize,
122 const CheckLocation& location)
123{
124 if (constTensorSize != tensorSize)
125 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100126 throw ParseException(fmt::format("{0} wrong number of components supplied to tensor. at:{1}",
127 location.m_Function,
128 location.FileLine()));
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000129 }
130}
131
Kevin May43a799c2019-02-08 16:31:42 +0000132#define CHECK_TENSOR_PTR(TENSOR_PTR) \
133 CheckTensorPtr(TENSOR_PTR, CHECK_LOCATION())
134
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000135#define CHECK_CONST_TENSOR_SIZE(CONST_TENSOR_SIZE, TENSOR_SIZE) \
136 CheckConstTensorSize(CONST_TENSOR_SIZE, TENSOR_SIZE, CHECK_LOCATION())
137
Mike Kellya0766c32019-02-19 17:22:07 +0000138#define CHECK_CONST_TENSOR_PTR(TENSOR_PTR) \
139 CheckConstTensorPtr(TENSOR_PTR, CHECK_LOCATION())
140
Kevin May43a799c2019-02-08 16:31:42 +0000141#define CHECK_LAYERS(GRAPH, LAYERS_INDEX, LAYER_INDEX) \
142 CheckLayers(GRAPH, LAYERS_INDEX, LAYER_INDEX, CHECK_LOCATION())
143
144#define CHECK_GRAPH(GRAPH, LAYERS_INDEX) \
145 CheckGraph(GRAPH, LAYERS_INDEX, CHECK_LOCATION())
146}
147
Saoirse Stewart263829c2019-02-19 15:54:14 +0000148bool CheckShape(const armnn::TensorShape& actual, const std::vector<uint32_t>& expected)
149{
150 const unsigned int actualSize = actual.GetNumDimensions();
151 if (actualSize != expected.size())
152 {
153 return false;
154 }
155
156 for (unsigned int i = 0u; i < actualSize; i++)
157 {
158 if (actual[i] != static_cast<unsigned int>(expected[i]))
159 {
160 return false;
161 }
162 }
163
164 return true;
165}
166
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000167Deserializer::Deserializer()
Kevin May43a799c2019-02-08 16:31:42 +0000168: m_Network(nullptr, nullptr),
169//May require LayerType_Max to be included
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000170m_ParserFunctions(Layer_MAX+1, &Deserializer::ParseUnsupportedLayer)
Kevin May43a799c2019-02-08 16:31:42 +0000171{
172 // register supported layers
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100173 m_ParserFunctions[Layer_AbsLayer] = &Deserializer::ParseAbs;
Mike Kellyaf484012019-02-20 16:53:11 +0000174 m_ParserFunctions[Layer_ActivationLayer] = &Deserializer::ParseActivation;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000175 m_ParserFunctions[Layer_AdditionLayer] = &Deserializer::ParseAdd;
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100176 m_ParserFunctions[Layer_ArgMinMaxLayer] = &Deserializer::ParseArgMinMax;
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000177 m_ParserFunctions[Layer_BatchToSpaceNdLayer] = &Deserializer::ParseBatchToSpaceNd;
ruoyan018e7fa232019-02-28 15:09:07 +0000178 m_ParserFunctions[Layer_BatchNormalizationLayer] = &Deserializer::ParseBatchNormalization;
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100179 m_ParserFunctions[Layer_ComparisonLayer] = &Deserializer::ParseComparison;
Jim Flynne242f2d2019-05-22 14:24:13 +0100180 m_ParserFunctions[Layer_ConcatLayer] = &Deserializer::ParseConcat;
Conor Kennedy76277882019-02-26 08:29:54 +0000181 m_ParserFunctions[Layer_ConstantLayer] = &Deserializer::ParseConstant;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000182 m_ParserFunctions[Layer_Convolution2dLayer] = &Deserializer::ParseConvolution2d;
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100183 m_ParserFunctions[Layer_DepthToSpaceLayer] = &Deserializer::ParseDepthToSpace;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000184 m_ParserFunctions[Layer_DepthwiseConvolution2dLayer] = &Deserializer::ParseDepthwiseConvolution2d;
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000185 m_ParserFunctions[Layer_DequantizeLayer] = &Deserializer::ParseDequantize;
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000186 m_ParserFunctions[Layer_DetectionPostProcessLayer] = &Deserializer::ParseDetectionPostProcess;
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000187 m_ParserFunctions[Layer_DivisionLayer] = &Deserializer::ParseDivision;
josh minor4a3c6102020-01-06 16:40:46 -0600188 m_ParserFunctions[Layer_ElementwiseUnaryLayer] = &Deserializer::ParseElementwiseUnary;
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000189 m_ParserFunctions[Layer_EqualLayer] = &Deserializer::ParseEqual;
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000190 m_ParserFunctions[Layer_FullyConnectedLayer] = &Deserializer::ParseFullyConnected;
Keith Davis300ad562020-06-04 16:34:23 +0100191 m_ParserFunctions[Layer_FillLayer] = &Deserializer::ParseFill;
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000192 m_ParserFunctions[Layer_FloorLayer] = &Deserializer::ParseFloor;
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000193 m_ParserFunctions[Layer_GatherLayer] = &Deserializer::ParseGather;
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000194 m_ParserFunctions[Layer_GreaterLayer] = &Deserializer::ParseGreater;
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100195 m_ParserFunctions[Layer_InstanceNormalizationLayer] = &Deserializer::ParseInstanceNormalization;
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000196 m_ParserFunctions[Layer_L2NormalizationLayer] = &Deserializer::ParseL2Normalization;
James Conroyaba90cd2020-11-06 16:28:18 +0000197 m_ParserFunctions[Layer_LogicalBinaryLayer] = &Deserializer::ParseLogicalBinary;
Sadik Armagan26257852019-10-14 13:00:47 +0100198 m_ParserFunctions[Layer_LogSoftmaxLayer] = &Deserializer::ParseLogSoftmax;
Jim Flynn11af3752019-03-19 17:22:29 +0000199 m_ParserFunctions[Layer_LstmLayer] = &Deserializer::ParseLstm;
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000200 m_ParserFunctions[Layer_MaximumLayer] = &Deserializer::ParseMaximum;
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000201 m_ParserFunctions[Layer_MeanLayer] = &Deserializer::ParseMean;
202 m_ParserFunctions[Layer_MinimumLayer] = &Deserializer::ParseMinimum;
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100203 m_ParserFunctions[Layer_MergeLayer] = &Deserializer::ParseMerge;
Jim Flynn906f9462019-05-10 13:55:21 +0100204 m_ParserFunctions[Layer_MergerLayer] = &Deserializer::ParseConcat;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000205 m_ParserFunctions[Layer_MultiplicationLayer] = &Deserializer::ParseMultiplication;
Nina Drozd57728782019-02-27 10:53:27 +0000206 m_ParserFunctions[Layer_NormalizationLayer] = &Deserializer::ParseNormalization;
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000207 m_ParserFunctions[Layer_PadLayer] = &Deserializer::ParsePad;
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000208 m_ParserFunctions[Layer_PermuteLayer] = &Deserializer::ParsePermute;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000209 m_ParserFunctions[Layer_Pooling2dLayer] = &Deserializer::ParsePooling2d;
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100210 m_ParserFunctions[Layer_PreluLayer] = &Deserializer::ParsePrelu;
James Conroy8d333182020-05-13 10:27:58 +0100211 m_ParserFunctions[Layer_QLstmLayer] = &Deserializer::ParseQLstm;
Derek Lamberti87acb272019-03-27 16:51:31 +0000212 m_ParserFunctions[Layer_QuantizeLayer] = &Deserializer::ParseQuantize;
Jan Eilers5b01a892019-07-23 09:47:43 +0100213 m_ParserFunctions[Layer_QuantizedLstmLayer] = &Deserializer::ParseQuantizedLstm;
Finn Williams2605b232020-06-10 15:53:46 +0100214 m_ParserFunctions[Layer_RankLayer] = &Deserializer::ParseRank;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000215 m_ParserFunctions[Layer_ReshapeLayer] = &Deserializer::ParseReshape;
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000216 m_ParserFunctions[Layer_ResizeBilinearLayer] = &Deserializer::ParseResizeBilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100217 m_ParserFunctions[Layer_ResizeLayer] = &Deserializer::ParseResize;
Sadik Armagan8b42a382019-03-01 14:24:49 +0000218 m_ParserFunctions[Layer_RsqrtLayer] = &Deserializer::ParseRsqrt;
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100219 m_ParserFunctions[Layer_SliceLayer] = &Deserializer::ParseSlice;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000220 m_ParserFunctions[Layer_SoftmaxLayer] = &Deserializer::ParseSoftmax;
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000221 m_ParserFunctions[Layer_SpaceToBatchNdLayer] = &Deserializer::ParseSpaceToBatchNd;
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100222 m_ParserFunctions[Layer_SpaceToDepthLayer] = &Deserializer::ParseSpaceToDepth;
Jim Flynn18ce3382019-03-08 11:08:30 +0000223 m_ParserFunctions[Layer_SplitterLayer] = &Deserializer::ParseSplitter;
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100224 m_ParserFunctions[Layer_StackLayer] = &Deserializer::ParseStack;
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100225 m_ParserFunctions[Layer_StandInLayer] = &Deserializer::ParseStandIn;
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000226 m_ParserFunctions[Layer_StridedSliceLayer] = &Deserializer::ParseStridedSlice;
Conor Kennedyda1f9752019-03-01 14:37:12 +0000227 m_ParserFunctions[Layer_SubtractionLayer] = &Deserializer::ParseSubtraction;
Sadik Armaganeff363d2019-04-05 15:25:46 +0100228 m_ParserFunctions[Layer_SwitchLayer] = &Deserializer::ParseSwitch;
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100229 m_ParserFunctions[Layer_TransposeConvolution2dLayer] = &Deserializer::ParseTransposeConvolution2d;
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000230 m_ParserFunctions[Layer_TransposeLayer] = &Deserializer::ParseTranspose;
Kevin May43a799c2019-02-08 16:31:42 +0000231}
232
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000233Deserializer::LayerBaseRawPtr Deserializer::GetBaseLayer(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000234{
235 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
236
237 switch(layerType)
238 {
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +0100239 case Layer::Layer_AbsLayer:
240 return graphPtr->layers()->Get(layerIndex)->layer_as_AbsLayer()->base();
Mike Kellyaf484012019-02-20 16:53:11 +0000241 case Layer::Layer_ActivationLayer:
242 return graphPtr->layers()->Get(layerIndex)->layer_as_ActivationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000243 case Layer::Layer_AdditionLayer:
244 return graphPtr->layers()->Get(layerIndex)->layer_as_AdditionLayer()->base();
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100245 case Layer::Layer_ArgMinMaxLayer:
246 return graphPtr->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer()->base();
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +0000247 case Layer::Layer_BatchToSpaceNdLayer:
248 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->base();
ruoyan018e7fa232019-02-28 15:09:07 +0000249 case Layer::Layer_BatchNormalizationLayer:
250 return graphPtr->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer()->base();
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100251 case Layer::Layer_ComparisonLayer:
252 return graphPtr->layers()->Get(layerIndex)->layer_as_ComparisonLayer()->base();
Jim Flynne242f2d2019-05-22 14:24:13 +0100253 case Layer::Layer_ConcatLayer:
254 return graphPtr->layers()->Get(layerIndex)->layer_as_ConcatLayer()->base();
Conor Kennedy76277882019-02-26 08:29:54 +0000255 case Layer::Layer_ConstantLayer:
256 return graphPtr->layers()->Get(layerIndex)->layer_as_ConstantLayer()->base();
Mike Kellya0766c32019-02-19 17:22:07 +0000257 case Layer::Layer_Convolution2dLayer:
258 return graphPtr->layers()->Get(layerIndex)->layer_as_Convolution2dLayer()->base();
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +0100259 case Layer::Layer_DepthToSpaceLayer:
260 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->base();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +0000261 case Layer::Layer_DepthwiseConvolution2dLayer:
262 return graphPtr->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer()->base();
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +0000263 case Layer::Layer_DequantizeLayer:
264 return graphPtr->layers()->Get(layerIndex)->layer_as_DequantizeLayer()->base();
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +0000265 case Layer::Layer_DetectionPostProcessLayer:
266 return graphPtr->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer()->base();
Éanna Ó Catháin58885892019-02-27 16:16:39 +0000267 case Layer::Layer_DivisionLayer:
268 return graphPtr->layers()->Get(layerIndex)->layer_as_DivisionLayer()->base();
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +0000269 case Layer::Layer_EqualLayer:
270 return graphPtr->layers()->Get(layerIndex)->layer_as_EqualLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000271 case Layer::Layer_ElementwiseUnaryLayer:
272 return graphPtr->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer()->base();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +0000273 case Layer::Layer_FullyConnectedLayer:
274 return graphPtr->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer()->base();
Keith Davis300ad562020-06-04 16:34:23 +0100275 case Layer::Layer_FillLayer:
276 return graphPtr->layers()->Get(layerIndex)->layer_as_FillLayer()->base();
Finn Williamsdd2ba7e2019-03-01 11:51:52 +0000277 case Layer::Layer_FloorLayer:
278 return graphPtr->layers()->Get(layerIndex)->layer_as_FloorLayer()->base();
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000279 case Layer::Layer_GatherLayer:
280 return graphPtr->layers()->Get(layerIndex)->layer_as_GatherLayer()->base();
Conor Kennedy79ffdf52019-03-01 14:24:54 +0000281 case Layer::Layer_GreaterLayer:
282 return graphPtr->layers()->Get(layerIndex)->layer_as_GreaterLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000283 case Layer::Layer_InputLayer:
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000284 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->base();
Aron Virginas-Tar781ced92019-10-03 11:15:39 +0100285 case Layer::Layer_InstanceNormalizationLayer:
286 return graphPtr->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer()->base();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +0000287 case Layer::Layer_L2NormalizationLayer:
288 return graphPtr->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer()->base();
James Conroyaba90cd2020-11-06 16:28:18 +0000289 case Layer::Layer_LogicalBinaryLayer:
290 return graphPtr->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer()->base();
Sadik Armagan26257852019-10-14 13:00:47 +0100291 case Layer::Layer_LogSoftmaxLayer:
292 return graphPtr->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->base();
Jim Flynn11af3752019-03-19 17:22:29 +0000293 case Layer::Layer_LstmLayer:
294 return graphPtr->layers()->Get(layerIndex)->layer_as_LstmLayer()->base();
Sadik Armaganac97c8c2019-03-04 17:44:21 +0000295 case Layer::Layer_MeanLayer:
296 return graphPtr->layers()->Get(layerIndex)->layer_as_MeanLayer()->base();
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +0000297 case Layer::Layer_MinimumLayer:
298 return graphPtr->layers()->Get(layerIndex)->layer_as_MinimumLayer()->base();
Aron Virginas-Tar377351e2019-02-27 14:42:31 +0000299 case Layer::Layer_MaximumLayer:
300 return graphPtr->layers()->Get(layerIndex)->layer_as_MaximumLayer()->base();
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +0100301 case Layer::Layer_MergeLayer:
302 return graphPtr->layers()->Get(layerIndex)->layer_as_MergeLayer()->base();
Jim Flynnac25a1b2019-02-28 10:40:49 +0000303 case Layer::Layer_MergerLayer:
304 return graphPtr->layers()->Get(layerIndex)->layer_as_MergerLayer()->base();
Sadik Armagan5f450272019-02-12 14:31:45 +0000305 case Layer::Layer_MultiplicationLayer:
306 return graphPtr->layers()->Get(layerIndex)->layer_as_MultiplicationLayer()->base();
Nina Drozd57728782019-02-27 10:53:27 +0000307 case Layer::Layer_NormalizationLayer:
308 return graphPtr->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000309 case Layer::Layer_OutputLayer:
310 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->base();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +0000311 case Layer::Layer_PadLayer:
312 return graphPtr->layers()->Get(layerIndex)->layer_as_PadLayer()->base();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +0000313 case Layer::Layer_PermuteLayer:
314 return graphPtr->layers()->Get(layerIndex)->layer_as_PermuteLayer()->base();
Saoirse Stewart3166c3e2019-02-18 15:24:53 +0000315 case Layer::Layer_Pooling2dLayer:
316 return graphPtr->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->base();
Ellen Norris-Thompson51982472019-06-19 11:46:21 +0100317 case Layer::Layer_PreluLayer:
318 return graphPtr->layers()->Get(layerIndex)->layer_as_PreluLayer()->base();
James Conroy8d333182020-05-13 10:27:58 +0100319 case Layer::Layer_QLstmLayer:
320 return graphPtr->layers()->Get(layerIndex)->layer_as_QLstmLayer()->base();
Derek Lamberti87acb272019-03-27 16:51:31 +0000321 case Layer::Layer_QuantizeLayer:
322 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizeLayer()->base();
Jan Eilers5b01a892019-07-23 09:47:43 +0100323 case Layer::Layer_QuantizedLstmLayer:
324 return graphPtr->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer()->base();
Finn Williams2605b232020-06-10 15:53:46 +0100325 case Layer::Layer_RankLayer:
326 return graphPtr->layers()->Get(layerIndex)->layer_as_RankLayer()->base();
Saoirse Stewart263829c2019-02-19 15:54:14 +0000327 case Layer::Layer_ReshapeLayer:
328 return graphPtr->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->base();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +0000329 case Layer::Layer_ResizeBilinearLayer:
330 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->base();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100331 case Layer::Layer_ResizeLayer:
332 return graphPtr->layers()->Get(layerIndex)->layer_as_ResizeLayer()->base();
Sadik Armagan8b42a382019-03-01 14:24:49 +0000333 case Layer::Layer_RsqrtLayer:
334 return graphPtr->layers()->Get(layerIndex)->layer_as_RsqrtLayer()->base();
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +0100335 case Layer::Layer_SliceLayer:
336 return graphPtr->layers()->Get(layerIndex)->layer_as_SliceLayer()->base();
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +0000337 case Layer::Layer_SoftmaxLayer:
338 return graphPtr->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->base();
Nattapat Chaimanowong45286992019-02-26 15:53:02 +0000339 case Layer::Layer_SpaceToBatchNdLayer:
340 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->base();
Aron Virginas-Taraa067142019-06-11 16:01:44 +0100341 case Layer::Layer_SpaceToDepthLayer:
342 return graphPtr->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->base();
Jim Flynn18ce3382019-03-08 11:08:30 +0000343 case Layer::Layer_SplitterLayer:
344 return graphPtr->layers()->Get(layerIndex)->layer_as_SplitterLayer()->base();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +0100345 case Layer::Layer_StackLayer:
346 return graphPtr->layers()->Get(layerIndex)->layer_as_StackLayer()->base();
Aron Virginas-Tar85121a22019-10-23 10:41:35 +0100347 case Layer::Layer_StandInLayer:
348 return graphPtr->layers()->Get(layerIndex)->layer_as_StandInLayer()->base();
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +0000349 case Layer::Layer_StridedSliceLayer:
350 return graphPtr->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->base();
Conor Kennedyda1f9752019-03-01 14:37:12 +0000351 case Layer::Layer_SubtractionLayer:
352 return graphPtr->layers()->Get(layerIndex)->layer_as_SubtractionLayer()->base();
Sadik Armaganeff363d2019-04-05 15:25:46 +0100353 case Layer::Layer_SwitchLayer:
354 return graphPtr->layers()->Get(layerIndex)->layer_as_SwitchLayer()->base();
Aron Virginas-Tarcb549302019-06-21 13:53:38 +0100355 case Layer::Layer_TransposeConvolution2dLayer:
356 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer()->base();
Mike Kellyc9ea45a2020-02-28 18:11:58 +0000357 case Layer::Layer_TransposeLayer:
358 return graphPtr->layers()->Get(layerIndex)->layer_as_TransposeLayer()->base();
Kevin May43a799c2019-02-08 16:31:42 +0000359 case Layer::Layer_NONE:
360 default:
Colm Donelan5b5c2222020-09-09 12:48:16 +0100361 throw ParseException(fmt::format("Layer type {} not recognized", layerType));
Kevin May43a799c2019-02-08 16:31:42 +0000362 }
363}
364
Éanna Ó Catháin633f8592019-02-25 16:26:29 +0000365std::string Deserializer::GetLayerName(const GraphPtr& graph, unsigned int index)
366{
367 auto layer = GetBaseLayer(graph, index);
368 assert(layer);
369 return layer->layerName()->str();
370}
371
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000372int32_t Deserializer::GetBindingLayerInfo(const GraphPtr& graphPtr, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000373{
374 auto layerType = graphPtr->layers()->Get(layerIndex)->layer_type();
375
376 if (layerType == Layer::Layer_InputLayer)
377 {
378 return graphPtr->layers()->Get(layerIndex)->layer_as_InputLayer()->base()->layerBindingId();
379 }
380 else if ( layerType == Layer::Layer_OutputLayer )
381 {
382 return graphPtr->layers()->Get(layerIndex)->layer_as_OutputLayer()->base()->layerBindingId();
383 }
384 return 0;
385}
386
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000387armnn::DataLayout ToDataLayout(armnnSerializer::DataLayout dataLayout)
Mike Kellya0766c32019-02-19 17:22:07 +0000388{
389 switch (dataLayout)
390 {
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000391 case armnnSerializer::DataLayout::DataLayout_NHWC:
Mike Kellya0766c32019-02-19 17:22:07 +0000392 return armnn::DataLayout::NHWC;
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000393 case armnnSerializer::DataLayout::DataLayout_NCHW:
Mike Kellya0766c32019-02-19 17:22:07 +0000394 default:
395 return armnn::DataLayout::NCHW;
396 }
397}
398
Mike Kellyaf484012019-02-20 16:53:11 +0000399armnn::ActivationFunction ToActivationFunction(armnnSerializer::ActivationFunction function)
400{
401 switch (function)
402 {
403 case armnnSerializer::ActivationFunction_Sigmoid:
404 return armnn::ActivationFunction::Sigmoid;
405 case armnnSerializer::ActivationFunction_TanH:
406 return armnn::ActivationFunction::TanH;
407 case armnnSerializer::ActivationFunction_Linear:
408 return armnn::ActivationFunction::Linear;
409 case armnnSerializer::ActivationFunction_ReLu:
410 return armnn::ActivationFunction::ReLu;
411 case armnnSerializer::ActivationFunction_BoundedReLu:
412 return armnn::ActivationFunction::BoundedReLu;
413 case armnnSerializer::ActivationFunction_LeakyReLu:
414 return armnn::ActivationFunction::LeakyReLu;
415 case armnnSerializer::ActivationFunction_Abs:
416 return armnn::ActivationFunction::Abs;
417 case armnnSerializer::ActivationFunction_Sqrt:
418 return armnn::ActivationFunction::Sqrt;
419 case armnnSerializer::ActivationFunction_Square:
420 return armnn::ActivationFunction::Square;
David Monahan3b3c3812020-02-25 09:03:29 +0000421 case armnnSerializer::ActivationFunction_Elu:
422 return armnn::ActivationFunction::Elu;
Colm Donelan03fbeaf2020-02-26 15:39:23 +0000423 case armnnSerializer::ActivationFunction_HardSwish:
424 return armnn::ActivationFunction::HardSwish;
Mike Kellyaf484012019-02-20 16:53:11 +0000425 default:
426 return armnn::ActivationFunction::Sigmoid;
427 }
428}
429
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +0100430armnn::ArgMinMaxFunction ToArgMinMaxFunction(armnnSerializer::ArgMinMaxFunction function)
431{
432 switch (function)
433 {
434 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Max:
435 return armnn::ArgMinMaxFunction::Max;
436 case armnnSerializer::ArgMinMaxFunction::ArgMinMaxFunction_Min:
437 default:
438 return armnn::ArgMinMaxFunction::Min;
439 }
440}
441
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +0100442armnn::ComparisonOperation ToComparisonOperation(armnnSerializer::ComparisonOperation operation)
443{
444 switch (operation)
445 {
446 case armnnSerializer::ComparisonOperation::ComparisonOperation_Equal:
447 return armnn::ComparisonOperation::Equal;
448 case armnnSerializer::ComparisonOperation::ComparisonOperation_Greater:
449 return armnn::ComparisonOperation::Greater;
450 case armnnSerializer::ComparisonOperation::ComparisonOperation_GreaterOrEqual:
451 return armnn::ComparisonOperation::GreaterOrEqual;
452 case armnnSerializer::ComparisonOperation::ComparisonOperation_Less:
453 return armnn::ComparisonOperation::Less;
454 case armnnSerializer::ComparisonOperation::ComparisonOperation_LessOrEqual:
455 return armnn::ComparisonOperation::LessOrEqual;
456 case armnnSerializer::ComparisonOperation::ComparisonOperation_NotEqual:
457 default:
458 return armnn::ComparisonOperation::NotEqual;
459 }
460}
461
James Conroyaba90cd2020-11-06 16:28:18 +0000462armnn::LogicalBinaryOperation ToLogicalBinaryOperation(armnnSerializer::LogicalBinaryOperation operation)
463{
464 switch (operation)
465 {
466 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalAnd:
467 return armnn::LogicalBinaryOperation::LogicalAnd;
468 case armnnSerializer::LogicalBinaryOperation::LogicalBinaryOperation_LogicalOr:
469 return armnn::LogicalBinaryOperation::LogicalOr;
470 default:
471 throw armnn::InvalidArgumentException("Logical Binary operation unknown");
472 }
473}
474
josh minor4a3c6102020-01-06 16:40:46 -0600475armnn::UnaryOperation ToUnaryOperation(armnnSerializer::UnaryOperation operation)
476{
477 switch (operation)
478 {
479 case armnnSerializer::UnaryOperation::UnaryOperation_Abs:
480 return armnn::UnaryOperation::Abs;
481 case armnnSerializer::UnaryOperation::UnaryOperation_Rsqrt:
482 return armnn::UnaryOperation::Rsqrt;
483 case armnnSerializer::UnaryOperation::UnaryOperation_Sqrt:
484 return armnn::UnaryOperation::Sqrt;
485 case armnnSerializer::UnaryOperation::UnaryOperation_Exp:
486 return armnn::UnaryOperation::Exp;
487 case armnnSerializer::UnaryOperation::UnaryOperation_Neg:
488 return armnn::UnaryOperation::Neg;
James Conroyaba90cd2020-11-06 16:28:18 +0000489 case armnnSerializer::UnaryOperation::UnaryOperation_LogicalNot:
490 return armnn::UnaryOperation::LogicalNot;
josh minor4a3c6102020-01-06 16:40:46 -0600491 default:
492 throw armnn::InvalidArgumentException("Unary operation unknown");
493 }
494}
495
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100496armnn::ResizeMethod ToResizeMethod(armnnSerializer::ResizeMethod method)
497{
498 switch (method)
499 {
500 case armnnSerializer::ResizeMethod_NearestNeighbor:
501 return armnn::ResizeMethod::NearestNeighbor;
502 case armnnSerializer::ResizeMethod_Bilinear:
Aron Virginas-Tar3c9b2702019-10-31 13:45:16 +0000503 return armnn::ResizeMethod::Bilinear;
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +0100504 default:
505 return armnn::ResizeMethod::NearestNeighbor;
506 }
507}
508
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000509armnn::TensorInfo ToTensorInfo(Deserializer::TensorRawPtr tensorPtr)
Kevin May43a799c2019-02-08 16:31:42 +0000510{
511 armnn::DataType type;
512 CHECK_TENSOR_PTR(tensorPtr);
513
514 switch (tensorPtr->dataType())
515 {
Francis Murtaghddb1d062020-03-10 13:51:45 +0000516 case DataType_QAsymmS8:
517 type = armnn::DataType::QAsymmS8;
518 break;
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000519 case DataType_QSymmS8:
520 type = armnn::DataType::QSymmS8;
521 break;
Kevin May43a799c2019-02-08 16:31:42 +0000522 case DataType_QuantisedAsymm8:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000523 case DataType_QAsymmU8:
524 type = armnn::DataType::QAsymmU8;
Kevin May43a799c2019-02-08 16:31:42 +0000525 break;
Derek Lambertif90c56d2020-01-10 17:14:08 +0000526 case DataType_QSymmS16:
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000527 case DataType_QuantisedSymm16:
Derek Lambertif90c56d2020-01-10 17:14:08 +0000528 type = armnn::DataType::QSymmS16;
Nattapat Chaimanowongcd5ac232019-03-19 12:26:36 +0000529 break;
Mike Kellya0766c32019-02-19 17:22:07 +0000530 case DataType_Signed32:
531 type = armnn::DataType::Signed32;
532 break;
Kevin May43a799c2019-02-08 16:31:42 +0000533 case DataType_Float32:
534 type = armnn::DataType::Float32;
535 break;
536 case DataType_Float16:
537 type = armnn::DataType::Float16;
538 break;
539 case DataType_Boolean:
540 type = armnn::DataType::Boolean;
541 break;
542 default:
543 {
544 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100545 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
546 tensorPtr->dataType(),
547 EnumNameDataType(tensorPtr->dataType()),
548 location.AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000549 }
550 }
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000551
Finn Williams2605b232020-06-10 15:53:46 +0100552 if (tensorPtr->dimensionality() == static_cast<unsigned int>(Dimensionality::Scalar))
553 {
554 float quantizationScale = tensorPtr->quantizationScale();
555 int32_t quantizationOffset = tensorPtr->quantizationOffset();
556
557 return armnn::TensorInfo(armnn::TensorShape{armnn::Dimensionality::Scalar},
558 type,
559 quantizationScale,
560 quantizationOffset);
561 }
Kevin May43a799c2019-02-08 16:31:42 +0000562
563 auto dimensions = tensorPtr->dimensions();
564 unsigned int size = dimensions->size();
565 std::vector<unsigned int> outputDims(dimensions->begin(), dimensions->begin() + size);
566
Sadik Armagan1a84fe32020-03-27 15:56:57 +0000567 auto quantizationScales = tensorPtr->quantizationScales();
568
569 if (quantizationScales)
570 {
571 unsigned int quantizationScalesSize = quantizationScales->size();
572 std::vector<float> scales(quantizationScales->begin(), quantizationScales->begin() + quantizationScalesSize);
573 unsigned int quantizationDim = tensorPtr->quantizationDim();
574 armnn::TensorInfo result(size,
575 outputDims.data(),
576 type,
577 scales,
578 quantizationDim);
579 return result;
580 }
581
582 float quantizationScale = tensorPtr->quantizationScale();
583 int32_t quantizationOffset = tensorPtr->quantizationOffset();
584
Kevin May43a799c2019-02-08 16:31:42 +0000585 // two statements (on purpose) for easier debugging:
586 armnn::TensorInfo result(size,
587 outputDims.data(),
588 type,
589 quantizationScale,
590 quantizationOffset);
591 return result;
592}
593
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000594armnn::ConstTensor ToConstTensor(Deserializer::ConstTensorRawPtr constTensorPtr)
Mike Kellya0766c32019-02-19 17:22:07 +0000595{
596 CHECK_CONST_TENSOR_PTR(constTensorPtr);
597 armnn::TensorInfo tensorInfo = ToTensorInfo(constTensorPtr->info());
598
599 switch (constTensorPtr->data_type())
600 {
601 case ConstTensorData_ByteData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000602 {
603 auto byteData = constTensorPtr->data_as_ByteData()->data();
604 CHECK_CONST_TENSOR_SIZE(byteData->size(), tensorInfo.GetNumElements());
605 return armnn::ConstTensor(tensorInfo, byteData->data());
606 }
Mike Kellya0766c32019-02-19 17:22:07 +0000607 case ConstTensorData_ShortData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000608 {
609 auto shortData = constTensorPtr->data_as_ShortData()->data();
610 CHECK_CONST_TENSOR_SIZE(shortData->size(), tensorInfo.GetNumElements());
611 return armnn::ConstTensor(tensorInfo, shortData->data());
612 }
Mike Kellya0766c32019-02-19 17:22:07 +0000613 case ConstTensorData_IntData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000614 {
615 auto intData = constTensorPtr->data_as_IntData()->data();
616 CHECK_CONST_TENSOR_SIZE(intData->size(), tensorInfo.GetNumElements());
617 return armnn::ConstTensor(tensorInfo, intData->data());
618 }
Mike Kellya0766c32019-02-19 17:22:07 +0000619 case ConstTensorData_LongData:
Saoirse Stewartf11bab52019-02-25 09:22:58 +0000620 {
621 auto longData = constTensorPtr->data_as_LongData()->data();
622 CHECK_CONST_TENSOR_SIZE(longData->size(), tensorInfo.GetNumElements());
623 return armnn::ConstTensor(tensorInfo, longData->data());
624 }
Mike Kellya0766c32019-02-19 17:22:07 +0000625 default:
626 {
627 CheckLocation location = CHECK_LOCATION();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100628 throw ParseException(fmt::format("Unsupported data type {0} = {1}. {2}",
629 constTensorPtr->data_type(),
630 EnumNameConstTensorData(constTensorPtr->data_type()),
631 location.AsString()));
Mike Kellya0766c32019-02-19 17:22:07 +0000632 }
633 }
634}
635
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000636Deserializer::TensorRawPtrVector Deserializer::GetInputs(const GraphPtr& graphPtr,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000637 unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000638{
639 CHECK_LAYERS(graphPtr, 0, layerIndex);
640 auto layer = GetBaseLayer(graphPtr, layerIndex);
641 const auto& numInputs = layer->inputSlots()->size();
642
643 TensorRawPtrVector result(numInputs);
644
645 for (unsigned int i=0; i<numInputs; ++i)
646 {
647 auto inputId = CHECKED_NON_NEGATIVE(static_cast<int32_t>
648 (layer->inputSlots()->Get(i)->connection()->sourceLayerIndex()));
649 result[i] = GetBaseLayer(graphPtr, inputId)->outputSlots()->Get(0)->tensorInfo();
650 }
651 return result;
652}
653
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000654Deserializer::TensorRawPtrVector Deserializer::GetOutputs(const GraphPtr& graphPtr,
Kevin May43a799c2019-02-08 16:31:42 +0000655 unsigned int layerIndex)
656{
657 CHECK_LAYERS(graphPtr, 0, layerIndex);
658 auto layer = GetBaseLayer(graphPtr, layerIndex);
659 const auto& numOutputs = layer->outputSlots()->size();
660
661 TensorRawPtrVector result(numOutputs);
662
663 for (unsigned int i=0; i<numOutputs; ++i)
664 {
665 result[i] = layer->outputSlots()->Get(i)->tensorInfo();
666 }
667 return result;
668}
669
Derek Lamberti8ddae332019-02-21 16:29:43 +0000670void Deserializer::ParseUnsupportedLayer(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +0000671{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000672 CHECK_LAYERS(graph, 0, layerIndex);
673 const auto layerName = GetBaseLayer(graph, layerIndex)->layerName()->c_str();
Colm Donelan5b5c2222020-09-09 12:48:16 +0100674 throw ParseException(fmt::format("Layer not supported. layerIndex: {0} "
675 "layerName: {1} / {2}",
676 layerIndex,
677 layerName,
678 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000679}
680
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000681void Deserializer::ResetParser()
Kevin May43a799c2019-02-08 16:31:42 +0000682{
683 m_Network = armnn::INetworkPtr(nullptr, nullptr);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000684 m_InputBindings.clear();
685 m_OutputBindings.clear();
Kevin May43a799c2019-02-08 16:31:42 +0000686}
687
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000688IDeserializer* IDeserializer::CreateRaw()
Kevin May43a799c2019-02-08 16:31:42 +0000689{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000690 return new Deserializer();
Kevin May43a799c2019-02-08 16:31:42 +0000691}
692
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000693IDeserializerPtr IDeserializer::Create()
Kevin May43a799c2019-02-08 16:31:42 +0000694{
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000695 return IDeserializerPtr(CreateRaw(), &IDeserializer::Destroy);
Kevin May43a799c2019-02-08 16:31:42 +0000696}
697
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000698void IDeserializer::Destroy(IDeserializer* parser)
Kevin May43a799c2019-02-08 16:31:42 +0000699{
700 delete parser;
701}
702
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000703INetworkPtr Deserializer::CreateNetworkFromBinary(const std::vector<uint8_t>& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000704{
705 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000706 GraphPtr graph = LoadGraphFromBinary(binaryContent.data(), binaryContent.size());
707 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000708}
709
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000710armnn::INetworkPtr Deserializer::CreateNetworkFromBinary(std::istream& binaryContent)
Kevin May43a799c2019-02-08 16:31:42 +0000711{
Derek Lamberti2b183fb2019-02-18 16:36:57 +0000712 ResetParser();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000713 std::vector<uint8_t> content((std::istreambuf_iterator<char>(binaryContent)), std::istreambuf_iterator<char>());
714 GraphPtr graph = LoadGraphFromBinary(content.data(), content.size());
715 return CreateNetworkFromGraph(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000716}
717
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000718Deserializer::GraphPtr Deserializer::LoadGraphFromBinary(const uint8_t* binaryContent, size_t len)
Kevin May43a799c2019-02-08 16:31:42 +0000719{
720 if (binaryContent == nullptr)
721 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100722 throw InvalidArgumentException(fmt::format("Invalid (null) binary content {}",
723 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000724 }
725 flatbuffers::Verifier verifier(binaryContent, len);
726 if (verifier.VerifyBuffer<SerializedGraph>() == false)
727 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100728 throw ParseException(fmt::format("Buffer doesn't conform to the expected Armnn "
729 "flatbuffers format. size:{0} {1}",
730 len,
731 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000732 }
733 return GetSerializedGraph(binaryContent);
734}
735
Derek Lamberti8ddae332019-02-21 16:29:43 +0000736INetworkPtr Deserializer::CreateNetworkFromGraph(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000737{
738 m_Network = INetwork::Create();
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100739 ARMNN_ASSERT(graph != nullptr);
Kevin May43a799c2019-02-08 16:31:42 +0000740 unsigned int layerIndex = 0;
Derek Lamberti8ddae332019-02-21 16:29:43 +0000741 for (AnyLayer const* layer : *graph->layers())
Kevin May43a799c2019-02-08 16:31:42 +0000742 {
743 if (layer->layer_type() != Layer_InputLayer &&
744 layer->layer_type() != Layer_OutputLayer)
745 {
746 // lookup and call the parser function
747 auto& parserFunction = m_ParserFunctions[layer->layer_type()];
Derek Lamberti8ddae332019-02-21 16:29:43 +0000748 (this->*parserFunction)(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000749 }
750 ++layerIndex;
751 }
752
Derek Lamberti8ddae332019-02-21 16:29:43 +0000753 SetupInputLayers(graph);
754 SetupOutputLayers(graph);
Kevin May43a799c2019-02-08 16:31:42 +0000755
756 // establish the connections from the layer outputs to the inputs of the subsequent layers
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100757 for (auto&& graphIt : m_GraphConnections)
Kevin May43a799c2019-02-08 16:31:42 +0000758 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100759 Connections& connections = graphIt.second;
760 for (auto&& outputIt : connections.outputSlots)
Kevin May43a799c2019-02-08 16:31:42 +0000761 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100762 const unsigned int outputSlotIndex = outputIt.first;
763 IOutputSlot* outputSlot = outputIt.second;
764 if (connections.inputSlots.find(outputSlotIndex) != connections.inputSlots.end())
Kevin May43a799c2019-02-08 16:31:42 +0000765 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100766 for (IInputSlot* inputSlot : connections.inputSlots[outputSlotIndex])
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000767 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100768 outputSlot->Connect(*inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +0000769 }
Kevin May43a799c2019-02-08 16:31:42 +0000770 }
771 }
772 }
773
774 return std::move(m_Network);
775}
776
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000777BindingPointInfo Deserializer::GetNetworkInputBindingInfo(unsigned int layerIndex,
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +0000778 const std::string& name) const
Kevin May43a799c2019-02-08 16:31:42 +0000779{
Jan Eilers8eb25602020-03-09 12:13:48 +0000780 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000781 for (auto inputBinding : m_InputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000782 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000783 if (inputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000784 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000785 return inputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000786 }
787 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100788 throw ParseException(fmt::format("No input binding found for layer:{0} / {1}",
789 name,
790 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000791}
792
Derek Lamberti0028d1b2019-02-20 13:57:42 +0000793BindingPointInfo Deserializer::GetNetworkOutputBindingInfo(unsigned int layerIndex,
Kevin May43a799c2019-02-08 16:31:42 +0000794 const std::string& name) const
795{
Jan Eilers8eb25602020-03-09 12:13:48 +0000796 IgnoreUnused(layerIndex);
Derek Lamberti8ddae332019-02-21 16:29:43 +0000797 for (auto outputBinding : m_OutputBindings)
Kevin May43a799c2019-02-08 16:31:42 +0000798 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000799 if (outputBinding.first == name)
Kevin May43a799c2019-02-08 16:31:42 +0000800 {
Derek Lamberti8ddae332019-02-21 16:29:43 +0000801 return outputBinding.second;
Kevin May43a799c2019-02-08 16:31:42 +0000802 }
803 }
Colm Donelan5b5c2222020-09-09 12:48:16 +0100804 throw ParseException(fmt::format("No output binding found for layer:{0} / {1}",
805 name,
806 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000807}
808
Tee Jungaa920c52019-11-05 10:48:25 +0000809unsigned int Deserializer::GetInputLayerInVector(GraphPtr graph, int targetId)
810{
811 for (unsigned int i = 0; i < graph->layers()->size(); i++)
812 {
813 auto layer = graph->layers()->Get(i);
814 if (layer->layer_type() == Layer::Layer_InputLayer)
815 {
816 auto layerBindingId = layer->layer_as_InputLayer()->base()->layerBindingId();
817 if (layerBindingId == targetId)
818 {
819 return i;
820 }
821 }
822 }
823 throw ParseException("Input layer with given layerBindingId not found");
824}
825
826unsigned int Deserializer::GetOutputLayerInVector(GraphPtr graph, int targetId)
827{
828 for (unsigned int i = 0; i < graph->layers()->size(); i++)
829 {
830 auto layer = graph->layers()->Get(i);
831 if (layer->layer_type() == Layer::Layer_OutputLayer)
832 {
833 auto layerBindingId = layer->layer_as_OutputLayer()->base()->layerBindingId();
834 if (layerBindingId == targetId)
835 {
836 return i;
837 }
838 }
839 }
840 throw ParseException("Output layer with given layerBindingId not found");
841}
842
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100843unsigned int Deserializer::GetLayerIndexInVector(GraphPtr graph, unsigned int targetIndex)
844{
845 for (unsigned int i = 0; i < graph->layers()->size(); i++)
846 {
847 LayerBaseRawPtr layer = GetBaseLayer(graph, i);
848 if (layer->index() == targetIndex)
849 {
850 return i;
851 }
852 }
853 throw ParseException("Layer with given index not found");
854}
855
Tee Jungaa920c52019-11-05 10:48:25 +0000856Deserializer::FeatureVersions Deserializer::GetFeatureVersions(GraphPtr graph)
857{
858 Deserializer::FeatureVersions versions;
859
860 if (graph->featureVersions())
861 {
862 versions.m_BindingIdScheme = graph->featureVersions()->bindingIdsScheme();
863 }
864
865 return versions;
866}
867
Derek Lamberti8ddae332019-02-21 16:29:43 +0000868void Deserializer::SetupInputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000869{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000870 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100871 const unsigned int numInputs = graph->inputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000872 m_InputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100873 m_InputBindings.reserve(numInputs);
874
875 for (unsigned int i = 0; i < numInputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000876 {
Tee Jungaa920c52019-11-05 10:48:25 +0000877 unsigned int inputLayerIndex = 0xFFFFFFFF;
878 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
879 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100880 const unsigned int inputId = armnn::numeric_cast<unsigned int>(graph->inputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000881 inputLayerIndex = GetLayerIndexInVector(graph, inputId);
882 }
883 else
884 {
885 const int inputId = graph->inputIds()->Get(i);
886 inputLayerIndex = GetInputLayerInVector(graph, inputId);
887 }
888
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100889 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, inputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000890
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100891 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
892 LayerBindingId bindingId = GetBindingLayerInfo(graph, inputLayerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100893 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Input has no name.");
Kevin May43a799c2019-02-08 16:31:42 +0000894
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100895 IConnectableLayer* inputLayer =
896 m_Network->AddInputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000897
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100898 const armnn::TensorInfo& tensorInfo = ToTensorInfo(baseLayer->outputSlots()->Get(0)->tensorInfo());
899 inputLayer->GetOutputSlot(0).SetTensorInfo(tensorInfo);
900 RegisterOutputSlots(graph, inputLayerIndex, inputLayer);
901
Derek Lamberti8ddae332019-02-21 16:29:43 +0000902 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100903 m_InputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000904 }
905}
906
Derek Lamberti8ddae332019-02-21 16:29:43 +0000907void Deserializer::SetupOutputLayers(GraphPtr graph)
Kevin May43a799c2019-02-08 16:31:42 +0000908{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000909 CHECK_GRAPH(graph, 0);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100910 const unsigned int numOutputs = graph->outputIds()->size();
Derek Lamberti8ddae332019-02-21 16:29:43 +0000911 m_OutputBindings.clear();
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100912 m_OutputBindings.reserve(numOutputs);
913
914 for (unsigned int i = 0; i < numOutputs; i++)
Kevin May43a799c2019-02-08 16:31:42 +0000915 {
Tee Jungaa920c52019-11-05 10:48:25 +0000916 unsigned int outputLayerIndex = 0xFFFFFFFF;
917 if (GetFeatureVersions(graph).m_BindingIdScheme == 0)
918 {
Matthew Sloyan0663d662020-09-14 11:47:26 +0100919 const unsigned int outputId = armnn::numeric_cast<unsigned int>(graph->outputIds()->Get(i));
Tee Jungaa920c52019-11-05 10:48:25 +0000920 outputLayerIndex = GetLayerIndexInVector(graph, outputId);
921 }
922 else
923 {
924 const int outputId = graph->outputIds()->Get(i);
925 outputLayerIndex = GetOutputLayerInVector(graph, outputId);
926 }
927
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100928 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, outputLayerIndex);
Kevin May43a799c2019-02-08 16:31:42 +0000929
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100930 // GetBindingLayerInfo expect the index to be index in the vector not index property on each layer base
931 LayerBindingId bindingId = GetBindingLayerInfo(graph, outputLayerIndex);
Josh Minor4c10dfc2020-06-17 13:56:20 -0500932 ARMNN_ASSERT_MSG(baseLayer->layerName()->c_str(), "Output has no name.");
Derek Lamberti8ddae332019-02-21 16:29:43 +0000933
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100934 IConnectableLayer* outputLayer =
935 m_Network->AddOutputLayer(bindingId, baseLayer->layerName()->c_str());
Derek Lamberti8ddae332019-02-21 16:29:43 +0000936
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100937 RegisterInputSlots(graph, outputLayerIndex, outputLayer);
938
939 unsigned int sourceLayerIndex =
940 GetLayerIndexInVector(graph, baseLayer->inputSlots()->Get(0)->connection()->sourceLayerIndex());
941 LayerBaseRawPtr sourceBaseLayer = GetBaseLayer(graph, sourceLayerIndex);
Josh Minor4c10dfc2020-06-17 13:56:20 -0500942 const armnn::TensorInfo& tensorInfo = ToTensorInfo(sourceBaseLayer->outputSlots()->Get(i)->tensorInfo());
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100943
Derek Lamberti8ddae332019-02-21 16:29:43 +0000944 BindingPointInfo bindingInfo = {bindingId, tensorInfo};
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100945 m_OutputBindings.push_back(std::make_pair(baseLayer->layerName()->c_str(), bindingInfo));
Kevin May43a799c2019-02-08 16:31:42 +0000946 }
947}
948
Derek Lamberti8ddae332019-02-21 16:29:43 +0000949void Deserializer::RegisterOutputSlots(GraphPtr graph,
950 uint32_t layerIndex,
951 IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000952{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000953 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100954 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100955 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
956 if (baseLayer->outputSlots()->size() != layer->GetNumOutputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000957 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100958 throw ParseException(fmt::format("The number of outputslots ({0}) does not match the number expected ({1})"
959 " for layer index: {2} {3}",
960 baseLayer->outputSlots()->size(),
961 layer->GetNumOutputSlots(),
962 layerIndex,
963 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000964 }
965
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100966 for (unsigned int i = 0; i < layer->GetNumOutputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000967 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100968 const unsigned int slotIndex = baseLayer->outputSlots()->Get(i)->index();
969 armnn::IOutputSlot* outputSlot = &(layer->GetOutputSlot(slotIndex));
970 // layerIndex is not necessarily the same as baseLayer->index(). The latter is needed here
971 RegisterOutputSlotOfConnection(baseLayer->index(), slotIndex, outputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000972 }
973}
974
Derek Lamberti8ddae332019-02-21 16:29:43 +0000975void Deserializer::RegisterInputSlots(GraphPtr graph,
976 uint32_t layerIndex,
977 armnn::IConnectableLayer* layer)
Kevin May43a799c2019-02-08 16:31:42 +0000978{
Derek Lamberti8ddae332019-02-21 16:29:43 +0000979 CHECK_LAYERS(graph, 0, layerIndex);
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +0100980 ARMNN_ASSERT(layer != nullptr);
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100981 LayerBaseRawPtr baseLayer = GetBaseLayer(graph, layerIndex);
982 if (baseLayer->inputSlots()->size() != layer->GetNumInputSlots())
Kevin May43a799c2019-02-08 16:31:42 +0000983 {
Colm Donelan5b5c2222020-09-09 12:48:16 +0100984 throw ParseException(fmt::format("The number of inputslots ({0}) does not match the number expected ({1})"
985 " for layer index:{2} {3}",
986 baseLayer->inputSlots()->size(),
987 layer->GetNumInputSlots(),
988 layerIndex,
989 CHECK_LOCATION().AsString()));
Kevin May43a799c2019-02-08 16:31:42 +0000990 }
991
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100992 for (unsigned int i = 0; i < layer->GetNumInputSlots(); ++i)
Kevin May43a799c2019-02-08 16:31:42 +0000993 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +0100994 auto fbInputSlot = baseLayer->inputSlots()->Get(i);
995 auto fbConnection = fbInputSlot->connection();
996 armnn::IInputSlot* inputSlot = &(layer->GetInputSlot(fbInputSlot->index()));
997 RegisterInputSlotOfConnection(fbConnection->sourceLayerIndex(), fbConnection->outputSlotIndex(), inputSlot);
Kevin May43a799c2019-02-08 16:31:42 +0000998 }
999}
1000
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001001void Deserializer::RegisterInputSlotOfConnection(uint32_t sourceLayerIndex,
1002 uint32_t outputSlotIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001003 armnn::IInputSlot* inputSlot)
Kevin May43a799c2019-02-08 16:31:42 +00001004{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001005 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
Kevin May43a799c2019-02-08 16:31:42 +00001006 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001007 m_GraphConnections[sourceLayerIndex] = Connections();
1008 }
1009
1010 Connections& connections = m_GraphConnections[sourceLayerIndex];
1011 if (connections.inputSlots.find(outputSlotIndex) == connections.inputSlots.end())
1012 {
1013 connections.inputSlots[outputSlotIndex] = {inputSlot};
Kevin May43a799c2019-02-08 16:31:42 +00001014 }
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001015 else
1016 {
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001017 connections.inputSlots[outputSlotIndex].push_back(inputSlot);
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001018 }
1019}
Kevin May43a799c2019-02-08 16:31:42 +00001020
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001021void Deserializer::RegisterOutputSlotOfConnection(uint32_t sourceLayerIndex,
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001022 uint32_t outputSlotIndex,
1023 armnn::IOutputSlot* outputSlot)
Nattapat Chaimanowongd469faf2019-03-04 17:10:40 +00001024{
Nattapat Chaimanowongaf000a92019-05-16 16:32:35 +01001025 if (m_GraphConnections.find(sourceLayerIndex) == m_GraphConnections.end())
1026 {
1027 m_GraphConnections[sourceLayerIndex] = Connections();
1028 }
1029
1030 Connections& connections = m_GraphConnections[sourceLayerIndex];
1031 if (connections.outputSlots.find(outputSlotIndex) != connections.outputSlots.end())
1032 {
1033 throw ParseException("Same output slot index processed twice");
1034 }
1035
1036 connections.outputSlots[outputSlotIndex] = outputSlot;
Kevin May43a799c2019-02-08 16:31:42 +00001037}
1038
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001039void Deserializer::ParseAbs(armnnDeserializer::Deserializer::GraphPtr graph, unsigned int layerIndex)
1040{
1041 CHECK_LAYERS(graph, 0, layerIndex);
1042 auto inputs = GetInputs(graph, layerIndex);
1043 CHECK_LOCATION();
1044 CHECK_VALID_SIZE(inputs.size(), 1);
1045
1046 auto outputs = GetOutputs(graph, layerIndex);
1047 CHECK_VALID_SIZE(outputs.size(), 1);
1048
1049 auto layerName = GetLayerName(graph, layerIndex);
1050
josh minor4a3c6102020-01-06 16:40:46 -06001051 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Abs);
1052 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
FinnWilliamsArm4ffcc8f2019-09-05 14:34:20 +01001053 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1054 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1055
1056 RegisterInputSlots(graph, layerIndex, layer);
1057 RegisterOutputSlots(graph, layerIndex, layer);
1058}
1059
Derek Lamberti8ddae332019-02-21 16:29:43 +00001060void Deserializer::ParseActivation(GraphPtr graph, unsigned int layerIndex)
Mike Kellyaf484012019-02-20 16:53:11 +00001061{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001062 CHECK_LAYERS(graph, 0, layerIndex);
1063 auto inputs = GetInputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001064 CHECK_LOCATION();
1065 CHECK_VALID_SIZE(inputs.size(), 1);
1066
Derek Lamberti8ddae332019-02-21 16:29:43 +00001067 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001068 CHECK_VALID_SIZE(outputs.size(), 1);
1069
Derek Lamberti8ddae332019-02-21 16:29:43 +00001070 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ActivationLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001071 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellyaf484012019-02-20 16:53:11 +00001072 auto serializerDescriptor = serializerLayer->descriptor();
1073
1074 armnn::ActivationDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001075 descriptor.m_Function = ToActivationFunction(serializerDescriptor->activationFunction());
Mike Kellyaf484012019-02-20 16:53:11 +00001076 descriptor.m_A = serializerDescriptor->a();
1077 descriptor.m_B = serializerDescriptor->b();
1078
1079 IConnectableLayer* layer = m_Network->AddActivationLayer(descriptor,
1080 layerName.c_str());
1081 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1082 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1083
Derek Lamberti8ddae332019-02-21 16:29:43 +00001084 RegisterInputSlots(graph, layerIndex, layer);
1085 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellyaf484012019-02-20 16:53:11 +00001086}
1087
Derek Lamberti8ddae332019-02-21 16:29:43 +00001088void Deserializer::ParseAdd(GraphPtr graph, unsigned int layerIndex)
Kevin May43a799c2019-02-08 16:31:42 +00001089{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001090 CHECK_LAYERS(graph, 0, layerIndex);
1091 auto inputs = GetInputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001092 CHECK_LOCATION();
1093 CHECK_VALID_SIZE(inputs.size(), 2);
1094
Derek Lamberti8ddae332019-02-21 16:29:43 +00001095 auto outputs = GetOutputs(graph, layerIndex);
Kevin May43a799c2019-02-08 16:31:42 +00001096 CHECK_VALID_SIZE(outputs.size(), 1);
1097
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001098 auto layerName = GetLayerName(graph, layerIndex);
1099 IConnectableLayer* layer = m_Network->AddAdditionLayer(layerName.c_str());
Kevin May43a799c2019-02-08 16:31:42 +00001100
1101 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1102 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1103
Derek Lamberti8ddae332019-02-21 16:29:43 +00001104 RegisterInputSlots(graph, layerIndex, layer);
1105 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00001106}
1107
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001108void Deserializer::ParseArgMinMax(GraphPtr graph, unsigned int layerIndex)
1109{
1110 CHECK_LAYERS(graph, 0, layerIndex);
1111 auto inputs = GetInputs(graph, layerIndex);
1112 CHECK_LOCATION();
1113 CHECK_VALID_SIZE(inputs.size(), 1);
1114
1115 auto outputs = GetOutputs(graph, layerIndex);
1116 CHECK_VALID_SIZE(outputs.size(), 1);
1117
1118 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ArgMinMaxLayer();
1119 auto serializerDescriptor = serializerLayer->descriptor();
1120
1121 armnn::ArgMinMaxDescriptor descriptor;
Tee Jung86bc3d82019-10-01 11:25:56 +09001122 descriptor.m_Function = ToArgMinMaxFunction(serializerDescriptor->argMinMaxFunction());
Narumol Prangnawarat0cfcf232019-09-09 17:16:24 +01001123 descriptor.m_Axis = serializerDescriptor->axis();
1124 auto layerName = GetLayerName(graph, layerIndex);
1125 IConnectableLayer* layer = m_Network->AddArgMinMaxLayer(descriptor, layerName.c_str());
1126
1127 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1128 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1129
1130 RegisterInputSlots(graph, layerIndex, layer);
1131 RegisterOutputSlots(graph, layerIndex, layer);
1132}
1133
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001134void Deserializer::ParseBatchToSpaceNd(GraphPtr graph, unsigned int layerIndex)
1135{
1136 CHECK_LAYERS(graph, 0, layerIndex);
1137
1138 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1139 CHECK_VALID_SIZE(inputs.size(), 1);
1140
1141 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1142 CHECK_VALID_SIZE(outputs.size(), 1);
1143
1144 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_BatchToSpaceNdLayer()->descriptor();
1145 auto flatBufferCrops = flatBufferDescriptor->crops();
1146 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
1147
1148 if (flatBufferCrops->Length() % 2 != 0)
1149 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001150 throw ParseException(fmt::format("The size of crops must be divisible by 2 {}", CHECK_LOCATION().AsString()));
Nattapat Chaimanowong6b4ed982019-02-26 17:24:13 +00001151 }
1152
1153 std::vector<std::pair<unsigned int, unsigned int>> crops;
1154 crops.reserve(flatBufferCrops->Length() / 2);
1155 for (unsigned int i = 0; i < flatBufferCrops->Length() - 1; i += 2)
1156 {
1157 crops.emplace_back(flatBufferCrops->Get(i), flatBufferCrops->Get(i+1));
1158 }
1159
1160 armnn::BatchToSpaceNdDescriptor descriptor;
1161 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
1162 descriptor.m_BlockShape =
1163 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
1164 descriptor.m_Crops = crops;
1165
1166 auto layerName = GetLayerName(graph, layerIndex);
1167 IConnectableLayer* layer = m_Network->AddBatchToSpaceNdLayer(descriptor, layerName.c_str());
1168
1169 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1170 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1171
1172 RegisterInputSlots(graph, layerIndex, layer);
1173 RegisterOutputSlots(graph, layerIndex, layer);
1174}
1175
ruoyan018e7fa232019-02-28 15:09:07 +00001176void Deserializer::ParseBatchNormalization(GraphPtr graph, unsigned int layerIndex)
1177{
1178 CHECK_LAYERS(graph, 0, layerIndex);
1179
1180 auto inputs = GetInputs(graph, layerIndex);
1181 CHECK_VALID_SIZE(inputs.size(), 1);
1182
1183 auto outputs = GetOutputs(graph, layerIndex);
1184 CHECK_VALID_SIZE(outputs.size(), 1);
1185 auto outputInfo = ToTensorInfo(outputs[0]);
1186
ruoyan015c7ab052019-03-04 14:48:02 +00001187 auto layerName = GetLayerName(graph, layerIndex);
ruoyan018e7fa232019-02-28 15:09:07 +00001188
1189 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_BatchNormalizationLayer();
1190 auto serializerDescriptor = serializerLayer->descriptor();
1191
1192 armnn::BatchNormalizationDescriptor descriptor;
1193 descriptor.m_Eps = serializerDescriptor->eps();
1194 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1195
1196 armnn::ConstTensor mean = ToConstTensor(serializerLayer->mean());
1197 armnn::ConstTensor variance = ToConstTensor(serializerLayer->variance());
1198 armnn::ConstTensor beta = ToConstTensor(serializerLayer->beta());
1199 armnn::ConstTensor gamma = ToConstTensor(serializerLayer->gamma());
1200
1201 IConnectableLayer* layer = m_Network->AddBatchNormalizationLayer(descriptor,
1202 mean,
1203 variance,
1204 beta,
1205 gamma,
1206 layerName.c_str());
1207 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1208
1209 RegisterInputSlots(graph, layerIndex, layer);
1210 RegisterOutputSlots(graph, layerIndex, layer);
1211}
1212
Conor Kennedy76277882019-02-26 08:29:54 +00001213void Deserializer::ParseConstant(GraphPtr graph, unsigned int layerIndex)
1214{
1215 CHECK_LAYERS(graph, 0, layerIndex);
1216 CHECK_LOCATION();
1217
1218 auto outputs = GetOutputs(graph, layerIndex);
1219 CHECK_VALID_SIZE(outputs.size(), 1);
1220
1221 auto layerName = GetLayerName(graph, layerIndex);
1222
1223 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_ConstantLayer();
1224 auto serializerInput = serializerLayer->input();
1225
1226 armnn::ConstTensor input = ToConstTensor(serializerInput);
1227
1228 IConnectableLayer* layer = m_Network->AddConstantLayer(input, layerName.c_str());
1229
1230 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1231 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1232
1233 RegisterOutputSlots(graph, layerIndex, layer);
1234}
1235
Derek Lamberti8ddae332019-02-21 16:29:43 +00001236void Deserializer::ParseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Mike Kellya0766c32019-02-19 17:22:07 +00001237{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001238 CHECK_LAYERS(graph, 0, layerIndex);
1239 auto inputs = GetInputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001240 CHECK_LOCATION();
1241 CHECK_VALID_SIZE(inputs.size(), 1);
1242
Derek Lamberti8ddae332019-02-21 16:29:43 +00001243 auto outputs = GetOutputs(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001244 CHECK_VALID_SIZE(outputs.size(), 1);
1245
Derek Lamberti8ddae332019-02-21 16:29:43 +00001246 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_Convolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001247 auto layerName = GetLayerName(graph, layerIndex);
Mike Kellya0766c32019-02-19 17:22:07 +00001248 auto serializerDescriptor = serializerLayer->descriptor();
1249
1250 armnn::Convolution2dDescriptor descriptor;
1251 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1252 descriptor.m_PadRight = serializerDescriptor->padRight();
1253 descriptor.m_PadTop = serializerDescriptor->padTop();
1254 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1255 descriptor.m_StrideX = serializerDescriptor->strideX();
1256 descriptor.m_StrideY = serializerDescriptor->strideY();;
Matthew Benthamacad04e2019-05-13 10:02:45 +01001257 descriptor.m_DilationX = serializerDescriptor->dilationX();
1258 descriptor.m_DilationY = serializerDescriptor->dilationY();;
Mike Kellya0766c32019-02-19 17:22:07 +00001259 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1260 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1261
1262 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1263 armnn::ConstTensor biases;
1264
Matteo Martincighfc598e12019-05-14 10:36:13 +01001265 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Mike Kellya0766c32019-02-19 17:22:07 +00001266 if (descriptor.m_BiasEnabled)
1267 {
1268 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001269 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Mike Kellya0766c32019-02-19 17:22:07 +00001270 }
1271 IConnectableLayer* layer = m_Network->AddConvolution2dLayer(descriptor,
1272 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001273 optionalBiases,
Mike Kellya0766c32019-02-19 17:22:07 +00001274 layerName.c_str());
1275 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1276 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1277
Derek Lamberti8ddae332019-02-21 16:29:43 +00001278 RegisterInputSlots(graph, layerIndex, layer);
1279 RegisterOutputSlots(graph, layerIndex, layer);
Mike Kellya0766c32019-02-19 17:22:07 +00001280}
1281
Aron Virginas-Tarda9d2d32019-09-20 10:42:02 +01001282void Deserializer::ParseDepthToSpace(GraphPtr graph, unsigned int layerIndex)
1283{
1284 CHECK_LAYERS(graph, 0, layerIndex);
1285
1286 auto inputs = GetInputs(graph, layerIndex);
1287 CHECK_VALID_SIZE(inputs.size(), 1);
1288
1289 auto outputs = GetOutputs(graph, layerIndex);
1290 CHECK_VALID_SIZE(outputs.size(), 1);
1291
1292 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_DepthToSpaceLayer()->descriptor();
1293
1294 armnn::DepthToSpaceDescriptor descriptor;
1295 descriptor.m_BlockSize = fbDescriptor->blockSize();
1296 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1297
1298 auto layerName = GetLayerName(graph, layerIndex);
1299 IConnectableLayer* layer = m_Network->AddDepthToSpaceLayer(descriptor, layerName.c_str());
1300
1301 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1302 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1303
1304 RegisterInputSlots(graph, layerIndex, layer);
1305 RegisterOutputSlots(graph, layerIndex, layer);
1306}
1307
Derek Lamberti8ddae332019-02-21 16:29:43 +00001308void Deserializer::ParseDepthwiseConvolution2d(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001309{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001310 CHECK_LAYERS(graph, 0, layerIndex);
1311 auto inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001312 CHECK_LOCATION();
1313 CHECK_VALID_SIZE(inputs.size(), 1);
1314
Derek Lamberti8ddae332019-02-21 16:29:43 +00001315 auto outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001316 CHECK_VALID_SIZE(outputs.size(), 1);
1317
Derek Lamberti8ddae332019-02-21 16:29:43 +00001318 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_DepthwiseConvolution2dLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001319 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001320 auto serializerDescriptor = serializerLayer->descriptor();
1321
1322 armnn::DepthwiseConvolution2dDescriptor descriptor;
1323 descriptor.m_PadLeft = serializerDescriptor->padLeft();
1324 descriptor.m_PadRight = serializerDescriptor->padRight();
1325 descriptor.m_PadTop = serializerDescriptor->padTop();
1326 descriptor.m_PadBottom = serializerDescriptor->padBottom();
1327 descriptor.m_StrideX = serializerDescriptor->strideX();
Aron Virginas-Tar5e1b0cf2019-06-21 14:20:11 +01001328 descriptor.m_StrideY = serializerDescriptor->strideY();
1329 descriptor.m_DilationX = serializerDescriptor->dilationX();
1330 descriptor.m_DilationY = serializerDescriptor->dilationY();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001331 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
1332 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
1333
1334 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
1335 armnn::ConstTensor biases;
1336
Matteo Martincighfc598e12019-05-14 10:36:13 +01001337 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001338 if (descriptor.m_BiasEnabled)
1339 {
1340 biases = ToConstTensor(serializerLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001341 optionalBiases = armnn::Optional<armnn::ConstTensor>(biases);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001342 }
1343 IConnectableLayer* layer = m_Network->AddDepthwiseConvolution2dLayer(descriptor,
1344 weights,
Matteo Martincighfc598e12019-05-14 10:36:13 +01001345 optionalBiases,
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001346 layerName.c_str());
1347
1348 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1349 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1350
Derek Lamberti8ddae332019-02-21 16:29:43 +00001351 RegisterInputSlots(graph, layerIndex, layer);
1352 RegisterOutputSlots(graph, layerIndex, layer);
Aron Virginas-Tarc04125f2019-02-19 16:31:08 +00001353}
1354
Nattapat Chaimanowong3e14a9d2019-03-18 12:37:06 +00001355void Deserializer::ParseDetectionPostProcess(GraphPtr graph, unsigned int layerIndex)
1356{
1357 CHECK_LAYERS(graph, 0, layerIndex);
1358 auto inputs = GetInputs(graph, layerIndex);
1359 CHECK_LOCATION();
1360 CHECK_VALID_SIZE(inputs.size(), 2);
1361
1362 auto outputs = GetOutputs(graph, layerIndex);
1363 CHECK_VALID_SIZE(outputs.size(), 4);
1364
1365 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_DetectionPostProcessLayer();
1366 auto layerName = GetLayerName(graph, layerIndex);
1367 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1368
1369 armnn::DetectionPostProcessDescriptor descriptor;
1370 descriptor.m_MaxDetections = flatBufferDescriptor->maxDetections();
1371 descriptor.m_MaxClassesPerDetection = flatBufferDescriptor->maxClassesPerDetection();
1372 descriptor.m_DetectionsPerClass = flatBufferDescriptor->detectionsPerClass();
1373 descriptor.m_NmsScoreThreshold = flatBufferDescriptor->nmsScoreThreshold();
1374 descriptor.m_NmsIouThreshold = flatBufferDescriptor->nmsIouThreshold();
1375 descriptor.m_NumClasses = flatBufferDescriptor->numClasses();
1376 descriptor.m_UseRegularNms = flatBufferDescriptor->useRegularNms();
1377 descriptor.m_ScaleX = flatBufferDescriptor->scaleX();
1378 descriptor.m_ScaleY = flatBufferDescriptor->scaleY();
1379 descriptor.m_ScaleW = flatBufferDescriptor->scaleW();
1380 descriptor.m_ScaleH = flatBufferDescriptor->scaleH();
1381
1382 armnn::ConstTensor anchors = ToConstTensor(flatBufferLayer->anchors());
1383
1384 IConnectableLayer* layer = m_Network->AddDetectionPostProcessLayer(descriptor,
1385 anchors,
1386 layerName.c_str());
1387
1388 for (unsigned int i = 0; i < 4; i++)
1389 {
1390 layer->GetOutputSlot(i).SetTensorInfo(ToTensorInfo(outputs[i]));
1391 }
1392
1393 RegisterInputSlots(graph, layerIndex, layer);
1394 RegisterOutputSlots(graph, layerIndex, layer);
1395}
1396
Éanna Ó Catháin58885892019-02-27 16:16:39 +00001397void Deserializer::ParseDivision(GraphPtr graph, unsigned int layerIndex)
1398{
1399 CHECK_LAYERS(graph, 0, layerIndex);
1400 auto inputs = GetInputs(graph, layerIndex);
1401 CHECK_LOCATION();
1402 CHECK_VALID_SIZE(inputs.size(), 2);
1403
1404 auto outputs = GetOutputs(graph, layerIndex);
1405 CHECK_VALID_SIZE(outputs.size(), 1);
1406
1407 auto layerName = GetLayerName(graph, layerIndex);
1408 IConnectableLayer* layer = m_Network->AddDivisionLayer(layerName.c_str());
1409
1410 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1411 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1412
1413 RegisterInputSlots(graph, layerIndex, layer);
1414 RegisterOutputSlots(graph, layerIndex, layer);
1415}
1416
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001417void Deserializer::ParseEqual(GraphPtr graph, unsigned int layerIndex)
1418{
1419 CHECK_LAYERS(graph, 0, layerIndex);
1420 auto inputs = GetInputs(graph, layerIndex);
1421 CHECK_LOCATION();
1422 CHECK_VALID_SIZE(inputs.size(), 2);
1423
1424 auto outputs = GetOutputs(graph, layerIndex);
1425 CHECK_VALID_SIZE(outputs.size(), 1);
1426
1427 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001428 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Equal);
1429 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong235cea52019-02-28 16:27:30 +00001430
1431 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1432 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1433
1434 RegisterInputSlots(graph, layerIndex, layer);
1435 RegisterOutputSlots(graph, layerIndex, layer);
1436}
1437
Keith Davis300ad562020-06-04 16:34:23 +01001438void Deserializer::ParseFill(GraphPtr graph, unsigned int layerIndex)
1439{
1440 CHECK_LAYERS(graph, 0, layerIndex);
1441 auto inputs = GetInputs(graph, layerIndex);
1442 CHECK_LOCATION();
1443 CHECK_VALID_SIZE(inputs.size(), 1);
1444
1445 auto outputs = GetOutputs(graph, layerIndex);
1446 CHECK_VALID_SIZE(outputs.size(), 1);
1447
1448 auto layerName = GetLayerName(graph, layerIndex);
1449 armnn::FillDescriptor descriptor(1.0f);
1450 IConnectableLayer* layer = m_Network->AddFillLayer(descriptor, layerName.c_str());
1451
1452 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1453 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1454
1455 RegisterInputSlots(graph, layerIndex, layer);
1456 RegisterOutputSlots(graph, layerIndex, layer);
1457}
1458
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001459void Deserializer::ParseGreater(GraphPtr graph, unsigned int layerIndex)
1460{
1461 CHECK_LAYERS(graph, 0, layerIndex);
1462 auto inputs = GetInputs(graph, layerIndex);
1463 CHECK_LOCATION();
1464 CHECK_VALID_SIZE(inputs.size(), 2);
1465
1466 auto outputs = GetOutputs(graph, layerIndex);
1467 CHECK_VALID_SIZE(outputs.size(), 1);
1468
1469 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar77bfb5e2019-10-16 17:45:38 +01001470 armnn::ComparisonDescriptor descriptor(armnn::ComparisonOperation::Greater);
1471 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
Conor Kennedy79ffdf52019-03-01 14:24:54 +00001472
1473 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1474 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1475
1476 RegisterInputSlots(graph, layerIndex, layer);
1477 RegisterOutputSlots(graph, layerIndex, layer);
1478}
1479
Aron Virginas-Tar781ced92019-10-03 11:15:39 +01001480void Deserializer::ParseInstanceNormalization(GraphPtr graph, unsigned int layerIndex)
1481{
1482 CHECK_LAYERS(graph, 0, layerIndex);
1483
1484 auto inputs = GetInputs(graph, layerIndex);
1485 CHECK_VALID_SIZE(inputs.size(), 1);
1486
1487 auto outputs = GetOutputs(graph, layerIndex);
1488 CHECK_VALID_SIZE(outputs.size(), 1);
1489
1490 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_InstanceNormalizationLayer();
1491 auto fbDescriptor = fbLayer->descriptor();
1492
1493 armnn::InstanceNormalizationDescriptor descriptor;
1494 descriptor.m_Gamma = fbDescriptor->gamma();
1495 descriptor.m_Beta = fbDescriptor->beta();
1496 descriptor.m_Eps = fbDescriptor->eps();
1497 descriptor.m_DataLayout = ToDataLayout(fbDescriptor->dataLayout());
1498
1499 const std::string layerName = GetLayerName(graph, layerIndex);
1500 const armnn::TensorInfo outputInfo = ToTensorInfo(outputs[0]);
1501
1502 IConnectableLayer* layer = m_Network->AddInstanceNormalizationLayer(descriptor, layerName.c_str());
1503 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1504
1505 RegisterInputSlots(graph, layerIndex, layer);
1506 RegisterOutputSlots(graph, layerIndex, layer);
1507}
1508
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001509void Deserializer::ParseL2Normalization(GraphPtr graph, unsigned int layerIndex)
1510{
1511 CHECK_LAYERS(graph, 0, layerIndex);
1512
1513 auto inputs = GetInputs(graph, layerIndex);
1514 CHECK_VALID_SIZE(inputs.size(), 1);
1515
1516 auto outputs = GetOutputs(graph, layerIndex);
1517 CHECK_VALID_SIZE(outputs.size(), 1);
1518 auto outputInfo = ToTensorInfo(outputs[0]);
1519
1520 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_L2NormalizationLayer();
1521 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1522
1523 auto layerName = GetLayerName(graph, layerIndex);
1524 armnn::L2NormalizationDescriptor descriptor;
1525 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
Ferran Balaguer0dcffec2019-06-18 16:25:06 +01001526 descriptor.m_Eps = flatBufferDescriptor->eps();
Narumol Prangnawarat495701f2019-03-07 17:31:34 +00001527
1528 IConnectableLayer* layer = m_Network->AddL2NormalizationLayer(descriptor, layerName.c_str());
1529 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1530
1531 RegisterInputSlots(graph, layerIndex, layer);
1532 RegisterOutputSlots(graph, layerIndex, layer);
1533}
1534
James Conroyaba90cd2020-11-06 16:28:18 +00001535void Deserializer::ParseLogicalBinary(GraphPtr graph, unsigned int layerIndex)
1536{
1537 CHECK_LAYERS(graph, 0, layerIndex);
1538 CHECK_LOCATION();
1539
1540 auto inputs = GetInputs(graph, layerIndex);
1541 CHECK_VALID_SIZE(inputs.size(), 2);
1542
1543 auto outputs = GetOutputs(graph, layerIndex);
1544 CHECK_VALID_SIZE(outputs.size(), 1);
1545
1546 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_LogicalBinaryLayer();
1547 auto fbDescriptor = fbLayer->descriptor();
1548
1549 armnn::LogicalBinaryDescriptor descriptor;
1550 descriptor.m_Operation = ToLogicalBinaryOperation(fbDescriptor->operation());
1551
1552 const std::string& layerName = GetLayerName(graph, layerIndex);
1553 IConnectableLayer* layer = m_Network->AddLogicalBinaryLayer(descriptor, layerName.c_str());
1554
1555 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1556 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1557
1558 RegisterInputSlots(graph, layerIndex, layer);
1559 RegisterOutputSlots(graph, layerIndex, layer);
1560}
1561
Sadik Armagan26257852019-10-14 13:00:47 +01001562void Deserializer::ParseLogSoftmax(GraphPtr graph, unsigned int layerIndex)
1563{
1564 CHECK_LAYERS(graph, 0, layerIndex);
1565
1566 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1567 CHECK_VALID_SIZE(inputs.size(), 1);
1568
1569 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1570 CHECK_VALID_SIZE(outputs.size(), 1);
1571
1572 armnn::LogSoftmaxDescriptor descriptor;
1573 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->beta();
1574 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_LogSoftmaxLayer()->descriptor()->axis();
1575 auto layerName = GetLayerName(graph, layerIndex);
1576
1577 IConnectableLayer* layer = m_Network->AddLogSoftmaxLayer(descriptor, layerName.c_str());
1578
1579 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1580 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1581
1582 RegisterInputSlots(graph, layerIndex, layer);
1583 RegisterOutputSlots(graph, layerIndex, layer);
1584}
1585
Aron Virginas-Tar0fe32452019-02-28 13:12:47 +00001586void Deserializer::ParseMinimum(GraphPtr graph, unsigned int layerIndex)
1587{
1588 CHECK_LAYERS(graph, 0, layerIndex);
1589 auto inputs = GetInputs(graph, layerIndex);
1590 CHECK_LOCATION();
1591 CHECK_VALID_SIZE(inputs.size(), 2);
1592
1593 auto outputs = GetOutputs(graph, layerIndex);
1594 CHECK_VALID_SIZE(outputs.size(), 1);
1595
1596 auto layerName = GetLayerName(graph, layerIndex);
1597 IConnectableLayer* layer = m_Network->AddMinimumLayer(layerName.c_str());
1598
1599 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1600 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1601
1602 RegisterInputSlots(graph, layerIndex, layer);
1603 RegisterOutputSlots(graph, layerIndex, layer);
1604}
1605
Aron Virginas-Tar377351e2019-02-27 14:42:31 +00001606void Deserializer::ParseMaximum(GraphPtr graph, unsigned int layerIndex)
1607{
1608 CHECK_LAYERS(graph, 0, layerIndex);
1609 auto inputs = GetInputs(graph, layerIndex);
1610 CHECK_LOCATION();
1611 CHECK_VALID_SIZE(inputs.size(), 2);
1612
1613 auto outputs = GetOutputs(graph, layerIndex);
1614 CHECK_VALID_SIZE(outputs.size(), 1);
1615
1616 auto layerName = GetLayerName(graph, layerIndex);
1617 IConnectableLayer* layer = m_Network->AddMaximumLayer(layerName.c_str());
1618
1619 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1620 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1621
1622 RegisterInputSlots(graph, layerIndex, layer);
1623 RegisterOutputSlots(graph, layerIndex, layer);
1624}
1625
Jim Flynne242f2d2019-05-22 14:24:13 +01001626const armnnSerializer::OriginsDescriptor* GetOriginsDescriptor(const armnnSerializer::SerializedGraph* graph,
1627 unsigned int layerIndex)
1628{
1629 auto layerType = graph->layers()->Get(layerIndex)->layer_type();
1630
1631 switch (layerType)
1632 {
1633 case Layer::Layer_ConcatLayer:
1634 return graph->layers()->Get(layerIndex)->layer_as_ConcatLayer()->descriptor();
1635 case Layer::Layer_MergerLayer:
1636 return graph->layers()->Get(layerIndex)->layer_as_MergerLayer()->descriptor();
1637 default:
1638 throw armnn::Exception("unknown layer type, should be concat or merger");
1639 }
1640}
1641
Aron Virginas-Tare80ebd12019-10-17 16:11:54 +01001642void Deserializer::ParseComparison(GraphPtr graph, unsigned int layerIndex)
1643{
1644 CHECK_LAYERS(graph, 0, layerIndex);
1645 CHECK_LOCATION();
1646
1647 auto inputs = GetInputs(graph, layerIndex);
1648 CHECK_VALID_SIZE(inputs.size(), 2);
1649
1650 auto outputs = GetOutputs(graph, layerIndex);
1651 CHECK_VALID_SIZE(outputs.size(), 1);
1652
1653 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ComparisonLayer();
1654 auto fbDescriptor = fbLayer->descriptor();
1655
1656 armnn::ComparisonDescriptor descriptor;
1657 descriptor.m_Operation = ToComparisonOperation(fbDescriptor->operation());
1658
1659 const std::string& layerName = GetLayerName(graph, layerIndex);
1660 IConnectableLayer* layer = m_Network->AddComparisonLayer(descriptor, layerName.c_str());
1661
1662 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1663 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1664
1665 RegisterInputSlots(graph, layerIndex, layer);
1666 RegisterOutputSlots(graph, layerIndex, layer);
1667}
1668
josh minor4a3c6102020-01-06 16:40:46 -06001669void Deserializer::ParseElementwiseUnary(GraphPtr graph, unsigned int layerIndex)
1670{
1671 CHECK_LAYERS(graph, 0, layerIndex);
1672 CHECK_LOCATION();
1673
1674 auto inputs = GetInputs(graph, layerIndex);
1675 CHECK_VALID_SIZE(inputs.size(), 1);
1676
1677 auto outputs = GetOutputs(graph, layerIndex);
1678 CHECK_VALID_SIZE(outputs.size(), 1);
1679
1680 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_ElementwiseUnaryLayer();
1681 auto fbDescriptor = fbLayer->descriptor();
1682
1683 armnn::ElementwiseUnaryDescriptor descriptor;
1684 descriptor.m_Operation = ToUnaryOperation(fbDescriptor->operation());
1685
1686 const std::string& layerName = GetLayerName(graph, layerIndex);
1687 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
1688
1689 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1690 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1691
1692 RegisterInputSlots(graph, layerIndex, layer);
1693 RegisterOutputSlots(graph, layerIndex, layer);
1694}
1695
Jim Flynn906f9462019-05-10 13:55:21 +01001696void Deserializer::ParseConcat(GraphPtr graph, unsigned int layerIndex)
Jim Flynnac25a1b2019-02-28 10:40:49 +00001697{
1698 CHECK_LAYERS(graph, 0, layerIndex);
1699 CHECK_LOCATION();
1700
1701 auto outputs = GetOutputs(graph, layerIndex);
1702 CHECK_VALID_SIZE(outputs.size(), 1);
1703
Jim Flynnac25a1b2019-02-28 10:40:49 +00001704 auto layerName = GetLayerName(graph, layerIndex);
Jim Flynne242f2d2019-05-22 14:24:13 +01001705 auto originsDescriptor = GetOriginsDescriptor(graph, layerIndex);
1706 unsigned int numViews = originsDescriptor->numViews();
1707 unsigned int numDimensions = originsDescriptor->numDimensions();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001708
1709 // can now check the number of inputs == number of views
1710 auto inputs = GetInputs(graph, layerIndex);
1711 CHECK_VALID_SIZE(inputs.size(), numViews);
1712
1713 armnn::OriginsDescriptor descriptor(numViews, numDimensions);
Jim Flynne242f2d2019-05-22 14:24:13 +01001714 auto originsPtr = originsDescriptor->viewOrigins();
Jim Flynnac25a1b2019-02-28 10:40:49 +00001715 for (unsigned int v = 0; v < numViews; ++v)
1716 {
1717 auto originPtr = originsPtr->Get(v);
1718 for (unsigned int d = 0; d < numDimensions; ++d)
1719 {
1720 uint32_t value = originPtr->data()->Get(d);
1721 descriptor.SetViewOriginCoord(v, d, value);
1722 }
1723 }
Jim Flynne242f2d2019-05-22 14:24:13 +01001724 descriptor.SetConcatAxis(originsDescriptor->concatAxis());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001725
Jim Flynn906f9462019-05-10 13:55:21 +01001726 IConnectableLayer* layer = m_Network->AddConcatLayer(descriptor, layerName.c_str());
Jim Flynnac25a1b2019-02-28 10:40:49 +00001727 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1728 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1729
1730 RegisterInputSlots(graph, layerIndex, layer);
1731 RegisterOutputSlots(graph, layerIndex, layer);
1732}
1733
Derek Lamberti8ddae332019-02-21 16:29:43 +00001734void Deserializer::ParseMultiplication(GraphPtr graph, unsigned int layerIndex)
Sadik Armagan5f450272019-02-12 14:31:45 +00001735{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001736 CHECK_LAYERS(graph, 0, layerIndex);
1737 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001738 CHECK_LOCATION();
1739 CHECK_VALID_SIZE(inputs.size(), 2);
1740
Derek Lamberti8ddae332019-02-21 16:29:43 +00001741 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagan5f450272019-02-12 14:31:45 +00001742 CHECK_VALID_SIZE(outputs.size(), 1);
1743
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001744 auto layerName = GetLayerName(graph, layerIndex);
1745 IConnectableLayer* layer = m_Network->AddMultiplicationLayer(layerName.c_str());
Sadik Armagan5f450272019-02-12 14:31:45 +00001746
1747 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1748 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1749
Derek Lamberti8ddae332019-02-21 16:29:43 +00001750 RegisterInputSlots(graph, layerIndex, layer);
1751 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagan5f450272019-02-12 14:31:45 +00001752}
1753
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001754void Deserializer::ParseFloor(GraphPtr graph, unsigned int layerIndex)
1755{
1756 CHECK_LAYERS(graph, 0, layerIndex);
1757 CHECK_LOCATION();
1758
1759 auto inputs = GetInputs(graph, layerIndex);
1760 CHECK_VALID_SIZE(inputs.size(), 1);
1761
1762 auto outputs = GetOutputs(graph, layerIndex);
1763 CHECK_VALID_SIZE(outputs.size(), 1);
1764
1765 auto layerName = GetLayerName(graph, layerIndex);
1766
1767 armnn::IConnectableLayer* layer;
1768
Nattapat Chaimanowongc192f352019-03-05 17:35:28 +00001769 layer = m_Network->AddFloorLayer(layerName.c_str());
Finn Williamsdd2ba7e2019-03-01 11:51:52 +00001770
1771 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1772 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1773
1774 RegisterInputSlots(graph, layerIndex, layer);
1775 RegisterOutputSlots(graph, layerIndex, layer);
1776}
1777
Derek Lamberti8ddae332019-02-21 16:29:43 +00001778void Deserializer::ParseFullyConnected(GraphPtr graph, unsigned int layerIndex)
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001779{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001780 CHECK_LAYERS(graph, 0, layerIndex);
1781 auto inputs = GetInputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001782 CHECK_LOCATION();
1783 CHECK_VALID_SIZE(inputs.size(), 1);
1784
Derek Lamberti8ddae332019-02-21 16:29:43 +00001785 auto outputs = GetOutputs(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001786 CHECK_VALID_SIZE(outputs.size(), 1);
1787
Derek Lamberti8ddae332019-02-21 16:29:43 +00001788 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_FullyConnectedLayer();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001789 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001790 auto flatBufferDescriptor = flatBufferLayer->descriptor();
1791
1792 armnn::FullyConnectedDescriptor fullyConnectedDescriptor;
1793 fullyConnectedDescriptor.m_BiasEnabled = flatBufferDescriptor->biasEnabled();
1794 fullyConnectedDescriptor.m_TransposeWeightMatrix = flatBufferDescriptor->transposeWeightsMatrix();
1795
1796 armnn::ConstTensor weightsTensor = ToConstTensor(flatBufferLayer->weights());
1797
1798 armnn::IConnectableLayer* layer;
Matteo Martincighfc598e12019-05-14 10:36:13 +01001799 armnn::Optional<armnn::ConstTensor> optionalBiases = armnn::EmptyOptional();
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001800 if (flatBufferDescriptor->biasEnabled())
1801 {
1802 armnn::ConstTensor biasTensorData = ToConstTensor(flatBufferLayer->biases());
Matteo Martincighfc598e12019-05-14 10:36:13 +01001803 optionalBiases = armnn::Optional<armnn::ConstTensor>(biasTensorData);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001804 }
Matteo Martincighfc598e12019-05-14 10:36:13 +01001805 layer = m_Network->AddFullyConnectedLayer(fullyConnectedDescriptor,
1806 weightsTensor,
1807 optionalBiases,
1808 layerName.c_str());
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001809
1810 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1811 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1812
Derek Lamberti8ddae332019-02-21 16:29:43 +00001813 RegisterInputSlots(graph, layerIndex, layer);
1814 RegisterOutputSlots(graph, layerIndex, layer);
Sadik Armagandbb0c0c2019-02-21 09:01:41 +00001815}
1816
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001817void Deserializer::ParsePad(GraphPtr graph, unsigned int layerIndex)
1818{
1819 CHECK_LAYERS(graph, 0, layerIndex);
1820
1821 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
1822 CHECK_VALID_SIZE(inputs.size(), 1);
1823
1824 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
1825 CHECK_VALID_SIZE(outputs.size(), 1);
1826
1827 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_PadLayer()->descriptor();
1828 auto flatBufferPadList = flatBufferDescriptor->padList();
David Monahan34757812019-06-19 11:47:21 +01001829 float padValue = flatBufferDescriptor->padValue();
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001830
1831 if (flatBufferPadList->Length() % 2 != 0)
1832 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01001833 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
1834 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001835 }
1836
1837 std::vector<std::pair<unsigned int, unsigned int>> padList;
1838 padList.reserve(flatBufferPadList->Length() / 2);
1839 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
1840 {
1841 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
1842 }
1843
David Monahan34757812019-06-19 11:47:21 +01001844 armnn::PadDescriptor descriptor(padList, padValue);
Nattapat Chaimanowongebb0f9c2019-03-01 12:14:06 +00001845
1846 auto layerName = GetLayerName(graph, layerIndex);
1847 IConnectableLayer* layer = m_Network->AddPadLayer(descriptor, layerName.c_str());
1848
1849 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
1850 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
1851
1852 RegisterInputSlots(graph, layerIndex, layer);
1853 RegisterOutputSlots(graph, layerIndex, layer);
1854}
1855
Derek Lamberti8ddae332019-02-21 16:29:43 +00001856void Deserializer::ParsePermute(GraphPtr graph, unsigned int layerIndex)
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001857{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001858 CHECK_LAYERS(graph, 0, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001859
1860 auto dimsMapping =
Derek Lamberti8ddae332019-02-21 16:29:43 +00001861 graph->layers()->Get(layerIndex)->layer_as_PermuteLayer()->descriptor()->dimMappings();
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001862
Derek Lamberti8ddae332019-02-21 16:29:43 +00001863 auto inputs = GetInputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001864 CHECK_VALID_SIZE(inputs.size(), 1);
1865
Derek Lamberti8ddae332019-02-21 16:29:43 +00001866 auto outputs = GetOutputs(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001867 CHECK_VALID_SIZE(outputs.size(), 1);
1868 auto outputInfo = ToTensorInfo(outputs[0]);
1869
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001870 auto layerName = GetLayerName(graph, layerIndex);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001871 const armnn::PermuteDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
1872
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001873 IConnectableLayer* layer = m_Network->AddPermuteLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001874 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1875
Derek Lamberti8ddae332019-02-21 16:29:43 +00001876 RegisterInputSlots(graph, layerIndex, layer);
1877 RegisterOutputSlots(graph, layerIndex, layer);
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001878}
1879
Derek Lamberti0028d1b2019-02-20 13:57:42 +00001880armnn::Pooling2dDescriptor Deserializer::GetPoolingDescriptor(Deserializer::PoolingDescriptor pooling2dDesc,
Nattapat Chaimanowong30b00202019-02-20 17:31:34 +00001881 unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001882{
Jan Eilers8eb25602020-03-09 12:13:48 +00001883 IgnoreUnused(layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001884 armnn::Pooling2dDescriptor desc;
1885
1886 switch (pooling2dDesc->poolType())
1887 {
1888 case PoolingAlgorithm_Average:
1889 {
1890 desc.m_PoolType = armnn::PoolingAlgorithm::Average;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001891 break;
1892 }
1893 case PoolingAlgorithm_Max:
1894 {
1895 desc.m_PoolType = armnn::PoolingAlgorithm::Max;
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001896 break;
1897 }
1898 default:
1899 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001900 ARMNN_ASSERT_MSG(false, "Unsupported pooling algorithm");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001901 }
1902 }
1903
1904 switch (pooling2dDesc->outputShapeRounding())
1905 {
1906 case OutputShapeRounding_Floor:
1907 {
1908 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Floor;
1909 break;
1910 }
1911 case OutputShapeRounding_Ceiling:
1912 {
1913 desc.m_OutputShapeRounding = armnn::OutputShapeRounding::Ceiling;
1914 break;
1915 }
1916 default:
1917 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001918 ARMNN_ASSERT_MSG(false, "Unsupported output shape rounding");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001919 }
1920 }
1921
1922 switch (pooling2dDesc->paddingMethod())
1923 {
1924 case PaddingMethod_Exclude:
1925 {
1926 desc.m_PaddingMethod = armnn::PaddingMethod::Exclude;
1927 break;
1928 }
1929 case PaddingMethod_IgnoreValue:
1930 {
1931 desc.m_PaddingMethod = armnn::PaddingMethod::IgnoreValue;
1932 break;
1933 }
1934 default:
1935 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001936 ARMNN_ASSERT_MSG(false, "Unsupported padding method");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001937 }
1938 }
1939
1940 switch (pooling2dDesc->dataLayout())
1941 {
1942 case DataLayout_NCHW:
1943 {
1944 desc.m_DataLayout = armnn::DataLayout::NCHW;
1945 break;
1946 }
1947 case DataLayout_NHWC:
1948 {
1949 desc.m_DataLayout = armnn::DataLayout::NHWC;
1950 break;
1951 }
1952 default:
1953 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01001954 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001955 }
1956 }
1957
1958 desc.m_PadRight = pooling2dDesc->padRight();
1959 desc.m_PadLeft = pooling2dDesc->padLeft();
1960 desc.m_PadBottom = pooling2dDesc->padBottom();
1961 desc.m_PadTop = pooling2dDesc->padTop();
1962 desc.m_StrideX = pooling2dDesc->strideX();
1963 desc.m_StrideY = pooling2dDesc->strideY();
1964 desc.m_PoolWidth = pooling2dDesc->poolWidth();
1965 desc.m_PoolHeight = pooling2dDesc->poolHeight();
1966
1967 return desc;
1968}
1969
Derek Lamberti8ddae332019-02-21 16:29:43 +00001970void Deserializer::ParsePooling2d(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001971{
Derek Lamberti8ddae332019-02-21 16:29:43 +00001972 CHECK_LAYERS(graph, 0, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001973
Derek Lamberti8ddae332019-02-21 16:29:43 +00001974 auto pooling2dDes = graph->layers()->Get(layerIndex)->layer_as_Pooling2dLayer()->descriptor();
Derek Lamberti8ddae332019-02-21 16:29:43 +00001975 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001976 CHECK_VALID_SIZE(inputs.size(), 1);
1977
Derek Lamberti8ddae332019-02-21 16:29:43 +00001978 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001979 CHECK_VALID_SIZE(outputs.size(), 1);
1980 auto outputInfo = ToTensorInfo(outputs[0]);
1981
1982 auto pooling2dDescriptor = GetPoolingDescriptor(pooling2dDes, layerIndex);
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00001983 auto layerName = GetLayerName(graph, layerIndex);
1984 IConnectableLayer* layer = m_Network->AddPooling2dLayer(pooling2dDescriptor, layerName.c_str());
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001985 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
1986
Derek Lamberti8ddae332019-02-21 16:29:43 +00001987 RegisterInputSlots(graph, layerIndex, layer);
1988 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart3166c3e2019-02-18 15:24:53 +00001989}
1990
Derek Lamberti87acb272019-03-27 16:51:31 +00001991void Deserializer::ParseQuantize(GraphPtr graph, unsigned int layerIndex)
1992{
1993 CHECK_LAYERS(graph, 0, layerIndex);
1994
1995 auto inputs = GetInputs(graph, layerIndex);
1996 CHECK_VALID_SIZE(inputs.size(), 1);
1997
1998 auto outputs = GetOutputs(graph, layerIndex);
1999 CHECK_VALID_SIZE(outputs.size(), 1);
2000 auto outputInfo = ToTensorInfo(outputs[0]);
2001
2002 auto layerName = GetLayerName(graph, layerIndex);
2003 IConnectableLayer* layer = m_Network->AddQuantizeLayer(layerName.c_str());
2004 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2005
2006 RegisterInputSlots(graph, layerIndex, layer);
2007 RegisterOutputSlots(graph, layerIndex, layer);
2008}
2009
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002010armnn::TensorInfo Deserializer::OutputShapeOfReshape(const armnn::TensorInfo& inputTensorInfo,
Saoirse Stewart263829c2019-02-19 15:54:14 +00002011 const std::vector<uint32_t>& targetDimsIn)
2012{
2013 std::vector<unsigned int> outputDims(targetDimsIn.begin(), targetDimsIn.end());
2014 const auto stretchDim = std::find(targetDimsIn.begin(), targetDimsIn.end(), -1);
2015
2016 if (stretchDim != targetDimsIn.end())
2017 {
2018 if (std::find(std::next(stretchDim), targetDimsIn.end(), -1) != targetDimsIn.end())
2019 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002020 throw ParseException(fmt::format("At most one component of shape can be -1 {}",
2021 CHECK_LOCATION().AsString()));
Saoirse Stewart263829c2019-02-19 15:54:14 +00002022 }
2023
2024 auto targetNumElements =
Matthew Sloyan0663d662020-09-14 11:47:26 +01002025 armnn::numeric_cast<unsigned int>(
Saoirse Stewart263829c2019-02-19 15:54:14 +00002026 std::accumulate(targetDimsIn.begin(), targetDimsIn.end(), -1, std::multiplies<int32_t>()));
2027
2028 auto stretchIndex = static_cast<size_t>(std::distance(targetDimsIn.begin(), stretchDim));
2029 outputDims[stretchIndex] = inputTensorInfo.GetNumElements() / targetNumElements;
2030 }
2031
2032 TensorShape outputShape = TensorShape(static_cast<unsigned int>(outputDims.size()), outputDims.data());
2033
2034 armnn::TensorInfo reshapeInfo = inputTensorInfo;
2035 reshapeInfo.SetShape(outputShape);
2036
2037 return reshapeInfo;
2038}
2039
Finn Williams2605b232020-06-10 15:53:46 +01002040void Deserializer::ParseRank(GraphPtr graph, unsigned int layerIndex)
2041{
2042 CHECK_LAYERS(graph, 0, layerIndex);
2043
2044 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2045 CHECK_VALID_SIZE(inputs.size(), 1);
2046
2047 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2048 CHECK_VALID_SIZE(outputs.size(), 1);
2049
2050 auto layerName = GetLayerName(graph, layerIndex);
2051 IConnectableLayer* layer = m_Network->AddRankLayer( layerName.c_str());
2052
2053 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2054 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2055
2056 RegisterInputSlots(graph, layerIndex, layer);
2057 RegisterOutputSlots(graph, layerIndex, layer);
2058}
2059
Derek Lamberti8ddae332019-02-21 16:29:43 +00002060void Deserializer::ParseReshape(GraphPtr graph, unsigned int layerIndex)
Saoirse Stewart263829c2019-02-19 15:54:14 +00002061{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002062 CHECK_LAYERS(graph, 0, layerIndex);
2063 auto inputs = GetInputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002064
Derek Lamberti8ddae332019-02-21 16:29:43 +00002065 auto outputs = GetOutputs(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002066 CHECK_VALID_SIZE(outputs.size(), 1);
2067
2068 armnn::TensorInfo inputTensorInfo = ToTensorInfo(inputs[0]);
2069 armnn::TensorInfo actualOutputTensorInfo = ToTensorInfo(outputs[0]);
2070
Derek Lamberti8ddae332019-02-21 16:29:43 +00002071 const auto targetDims = graph->layers()->Get(layerIndex)->layer_as_ReshapeLayer()->descriptor()->targetShape();
Saoirse Stewart263829c2019-02-19 15:54:14 +00002072 std::vector<uint32_t> outputDims(targetDims->begin(), targetDims->begin() + targetDims->size());
2073
Derek Lamberti0028d1b2019-02-20 13:57:42 +00002074 armnn::TensorInfo reshapeOutputTensorInfo = Deserializer::OutputShapeOfReshape(inputTensorInfo, outputDims);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002075 const armnn::TensorShape& reshapeOutputTensorShape = reshapeOutputTensorInfo.GetShape();
2076
2077 const std::vector<uint32_t> expectedDims(outputs[0]->dimensions()->begin(),
2078 outputs[0]->dimensions()->begin() + outputs[0]->dimensions()->size());
2079
2080 if (inputs.size() > 1 && !CheckShape(reshapeOutputTensorShape, expectedDims))
2081 {
2082 std::stringstream ss;
2083 ss << "New shape defined in reshape parameters "
2084 << reshapeOutputTensorShape
2085 << " does not equal output shape "
2086 << actualOutputTensorInfo.GetShape()
2087 << ": "
2088 << CHECK_LOCATION().AsString();
2089 throw ParseException(ss.str());
2090 }
2091
2092 armnn::ReshapeDescriptor reshapeDesc;
2093 reshapeDesc.m_TargetShape = reshapeOutputTensorShape;
2094
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002095 auto layerName = GetLayerName(graph, layerIndex);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002096 IConnectableLayer* layer = m_Network->AddReshapeLayer(reshapeDesc, layerName.c_str());
2097 layer->GetOutputSlot(0).SetTensorInfo(reshapeOutputTensorInfo);
2098
Derek Lamberti8ddae332019-02-21 16:29:43 +00002099 RegisterInputSlots(graph, layerIndex, layer);
2100 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewart263829c2019-02-19 15:54:14 +00002101}
2102
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002103void Deserializer::ParseResize(GraphPtr graph, unsigned int layerIndex)
2104{
2105 CHECK_LAYERS(graph, 0, layerIndex);
2106
2107 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2108 CHECK_VALID_SIZE(inputs.size(), 1);
2109
2110 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2111 CHECK_VALID_SIZE(outputs.size(), 1);
2112
2113 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeLayer()->descriptor();
2114
2115 armnn::ResizeDescriptor descriptor;
2116 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
2117 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
2118 descriptor.m_Method = ToResizeMethod(flatBufferDescriptor->method());
2119 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002120 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2121 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
FinnWilliamsArm6fb339a2019-06-28 15:07:10 +01002122
2123 auto layerName = GetLayerName(graph, layerIndex);
2124 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
2125
2126 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2127 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2128
2129 RegisterInputSlots(graph, layerIndex, layer);
2130 RegisterOutputSlots(graph, layerIndex, layer);
2131}
2132
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002133void Deserializer::ParseResizeBilinear(GraphPtr graph, unsigned int layerIndex)
2134{
2135 CHECK_LAYERS(graph, 0, layerIndex);
2136
2137 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2138 CHECK_VALID_SIZE(inputs.size(), 1);
2139
2140 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2141 CHECK_VALID_SIZE(outputs.size(), 1);
2142
2143 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_ResizeBilinearLayer()->descriptor();
2144
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002145 armnn::ResizeDescriptor descriptor;
2146 descriptor.m_TargetWidth = flatBufferDescriptor->targetWidth();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002147 descriptor.m_TargetHeight = flatBufferDescriptor->targetHeight();
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002148 descriptor.m_Method = armnn::ResizeMethod::Bilinear;
2149 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
David Monahan4a0c9b92020-05-30 09:48:39 +01002150 descriptor.m_AlignCorners = flatBufferDescriptor->alignCorners();
2151 descriptor.m_HalfPixelCenters = flatBufferDescriptor->halfPixelCenters();
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002152
2153 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tar169d2f12019-07-01 19:01:44 +01002154 IConnectableLayer* layer = m_Network->AddResizeLayer(descriptor, layerName.c_str());
Nattapat Chaimanowong6522cdc2019-03-01 16:14:13 +00002155
2156 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2157 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2158
2159 RegisterInputSlots(graph, layerIndex, layer);
2160 RegisterOutputSlots(graph, layerIndex, layer);
2161}
2162
Derek Lamberti8ddae332019-02-21 16:29:43 +00002163void Deserializer::ParseSoftmax(GraphPtr graph, unsigned int layerIndex)
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002164{
Derek Lamberti8ddae332019-02-21 16:29:43 +00002165 CHECK_LAYERS(graph, 0, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002166
Derek Lamberti8ddae332019-02-21 16:29:43 +00002167 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002168 CHECK_VALID_SIZE(inputs.size(), 1);
2169
Derek Lamberti8ddae332019-02-21 16:29:43 +00002170 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002171 CHECK_VALID_SIZE(outputs.size(), 1);
2172
2173 armnn::SoftmaxDescriptor descriptor;
Derek Lamberti8ddae332019-02-21 16:29:43 +00002174 descriptor.m_Beta = graph->layers()->Get(layerIndex)->layer_as_SoftmaxLayer()->descriptor()->beta();
Éanna Ó Catháin633f8592019-02-25 16:26:29 +00002175 auto layerName = GetLayerName(graph, layerIndex);
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002176
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002177 IConnectableLayer* layer = m_Network->AddSoftmaxLayer(descriptor, layerName.c_str());
2178
2179 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2180 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2181
Derek Lamberti8ddae332019-02-21 16:29:43 +00002182 RegisterInputSlots(graph, layerIndex, layer);
2183 RegisterOutputSlots(graph, layerIndex, layer);
Kevin May43a799c2019-02-08 16:31:42 +00002184}
Aron Virginas-Tarfc413c02019-02-13 15:41:52 +00002185
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002186void Deserializer::ParseSpaceToBatchNd(GraphPtr graph, unsigned int layerIndex)
2187{
2188 CHECK_LAYERS(graph, 0, layerIndex);
2189
2190 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2191 CHECK_VALID_SIZE(inputs.size(), 1);
2192
2193 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2194 CHECK_VALID_SIZE(outputs.size(), 1);
2195
2196 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToBatchNdLayer()->descriptor();
2197 auto flatBufferPadList = flatBufferDescriptor->padList();
2198 auto flatBufferBlockShape = flatBufferDescriptor->blockShape();
2199
2200 if (flatBufferPadList->Length() % 2 != 0)
2201 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002202 throw ParseException(fmt::format("The size of the pad list must be divisible by 2 {}",
2203 CHECK_LOCATION().AsString()));
Nattapat Chaimanowong45286992019-02-26 15:53:02 +00002204 }
2205
2206 std::vector<std::pair<unsigned int, unsigned int>> padList;
2207 padList.reserve(flatBufferPadList->Length() / 2);
2208 for (unsigned int i = 0; i < flatBufferPadList->Length() - 1; i += 2)
2209 {
2210 padList.emplace_back(flatBufferPadList->Get(i), flatBufferPadList->Get(i+1));
2211 }
2212
2213 armnn::SpaceToBatchNdDescriptor descriptor;
2214 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2215 descriptor.m_BlockShape =
2216 std::vector<unsigned int>(flatBufferBlockShape->begin(), flatBufferBlockShape->end());
2217 descriptor.m_PadList = padList;
2218
2219 auto layerName = GetLayerName(graph, layerIndex);
2220 IConnectableLayer* layer = m_Network->AddSpaceToBatchNdLayer(descriptor, layerName.c_str());
2221
2222 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2223 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2224
2225 RegisterInputSlots(graph, layerIndex, layer);
2226 RegisterOutputSlots(graph, layerIndex, layer);
2227}
2228
Aron Virginas-Taraa067142019-06-11 16:01:44 +01002229void Deserializer::ParseSpaceToDepth(GraphPtr graph, unsigned int layerIndex)
2230{
2231 CHECK_LAYERS(graph, 0, layerIndex);
2232
2233 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2234 CHECK_VALID_SIZE(inputs.size(), 1);
2235
2236 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2237 CHECK_VALID_SIZE(outputs.size(), 1);
2238
2239 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_SpaceToDepthLayer()->descriptor();
2240
2241 armnn::SpaceToDepthDescriptor descriptor;
2242 descriptor.m_BlockSize = flatBufferDescriptor->blockSize();
2243 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2244
2245 auto layerName = GetLayerName(graph, layerIndex);
2246 IConnectableLayer* layer = m_Network->AddSpaceToDepthLayer(descriptor, layerName.c_str());
2247
2248 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2249 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2250
2251 RegisterInputSlots(graph, layerIndex, layer);
2252 RegisterOutputSlots(graph, layerIndex, layer);
2253}
2254
Nina Drozd57728782019-02-27 10:53:27 +00002255armnn::NormalizationDescriptor Deserializer::GetNormalizationDescriptor(
2256 Deserializer::NormalizationDescriptorPtr normalizationDescriptor,
2257 unsigned int layerIndex)
2258{
Jan Eilers8eb25602020-03-09 12:13:48 +00002259 IgnoreUnused(layerIndex);
Nina Drozd57728782019-02-27 10:53:27 +00002260 armnn::NormalizationDescriptor desc;
2261
2262 switch (normalizationDescriptor->normChannelType())
2263 {
2264 case NormalizationAlgorithmChannel_Across:
2265 {
2266 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Across;
2267 break;
2268 }
2269 case NormalizationAlgorithmChannel_Within:
2270 {
2271 desc.m_NormChannelType = armnn::NormalizationAlgorithmChannel::Within;
2272 break;
2273 }
2274 default:
2275 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002276 ARMNN_ASSERT_MSG(false, "Unsupported normalization channel type");
Nina Drozd57728782019-02-27 10:53:27 +00002277 }
2278 }
2279
2280 switch (normalizationDescriptor->normMethodType())
2281 {
2282 case NormalizationAlgorithmMethod_LocalBrightness:
2283 {
2284 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalBrightness;
2285 break;
2286 }
2287 case NormalizationAlgorithmMethod_LocalContrast:
2288 {
2289 desc.m_NormMethodType = armnn::NormalizationAlgorithmMethod::LocalContrast;
2290 break;
2291 }
2292 default:
2293 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002294 ARMNN_ASSERT_MSG(false, "Unsupported normalization method type");
Nina Drozd57728782019-02-27 10:53:27 +00002295 }
2296 }
2297
2298 switch (normalizationDescriptor->dataLayout())
2299 {
2300 case DataLayout_NCHW:
2301 {
2302 desc.m_DataLayout = armnn::DataLayout::NCHW;
2303 break;
2304 }
2305 case DataLayout_NHWC:
2306 {
2307 desc.m_DataLayout = armnn::DataLayout::NHWC;
2308 break;
2309 }
2310 default:
2311 {
Narumol Prangnawaratac2770a2020-04-01 16:51:23 +01002312 ARMNN_ASSERT_MSG(false, "Unsupported data layout");
Nina Drozd57728782019-02-27 10:53:27 +00002313 }
2314 }
2315
2316 desc.m_Alpha = normalizationDescriptor->alpha();
2317 desc.m_Beta = normalizationDescriptor->beta();
2318 desc.m_K = normalizationDescriptor->k();
2319 desc.m_NormSize = normalizationDescriptor->normSize();
2320
2321 return desc;
2322}
2323
2324void Deserializer::ParseNormalization(GraphPtr graph, unsigned int layerIndex)
2325{
2326 CHECK_LAYERS(graph, 0, layerIndex);
2327
2328 auto normalizationDes = graph->layers()->Get(layerIndex)->layer_as_NormalizationLayer()->descriptor();
2329
2330 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2331 CHECK_VALID_SIZE(inputs.size(), 1);
2332
2333 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2334 CHECK_VALID_SIZE(outputs.size(), 1);
2335
2336 auto outputInfo = ToTensorInfo(outputs[0]);
2337
2338 auto normalizationDescriptor = GetNormalizationDescriptor(normalizationDes, layerIndex);
2339 auto layerName = GetLayerName(graph, layerIndex);
2340
2341 IConnectableLayer* layer = m_Network->AddNormalizationLayer(normalizationDescriptor, layerName.c_str());
2342 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2343
2344 RegisterInputSlots(graph, layerIndex, layer);
2345 RegisterOutputSlots(graph, layerIndex, layer);
2346}
2347
Sadik Armagan8b42a382019-03-01 14:24:49 +00002348void Deserializer::ParseRsqrt(GraphPtr graph, unsigned int layerIndex)
2349{
2350 CHECK_LAYERS(graph, 0, layerIndex);
2351 auto inputs = GetInputs(graph, layerIndex);
2352 CHECK_LOCATION();
2353 CHECK_VALID_SIZE(inputs.size(), 1);
2354
2355 auto outputs = GetOutputs(graph, layerIndex);
2356 CHECK_VALID_SIZE(outputs.size(), 1);
2357
2358 auto layerName = GetLayerName(graph, layerIndex);
Sadik Armagan8b42a382019-03-01 14:24:49 +00002359
josh minor4a3c6102020-01-06 16:40:46 -06002360 armnn::ElementwiseUnaryDescriptor descriptor(armnn::UnaryOperation::Rsqrt);
2361 IConnectableLayer* layer = m_Network->AddElementwiseUnaryLayer(descriptor, layerName.c_str());
Sadik Armagan8b42a382019-03-01 14:24:49 +00002362 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2363 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2364
2365 RegisterInputSlots(graph, layerIndex, layer);
2366 RegisterOutputSlots(graph, layerIndex, layer);
2367}
2368
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002369void Deserializer::ParseSlice(GraphPtr graph, unsigned int layerIndex)
2370{
2371 CHECK_LAYERS(graph, 0, layerIndex);
2372
2373 auto inputs = GetInputs(graph, layerIndex);
2374 CHECK_VALID_SIZE(inputs.size(), 1);
2375
2376 auto outputs = GetOutputs(graph, layerIndex);
2377 CHECK_VALID_SIZE(outputs.size(), 1);
2378
2379 auto fbDescriptor = graph->layers()->Get(layerIndex)->layer_as_SliceLayer()->descriptor();
2380
2381 auto fbBegin = fbDescriptor->begin();
2382 auto fbSize = fbDescriptor->size();
2383
2384 if (fbBegin->Length() != fbSize->Length())
2385 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002386 throw ParseException(fmt::format("Begin and size descriptors must have the same length {}",
2387 CHECK_LOCATION().AsString()));
Aron Virginas-Tar2fda80b2019-09-18 13:36:52 +01002388 }
2389
2390 armnn::SliceDescriptor descriptor;
2391 descriptor.m_Begin.insert(descriptor.m_Begin.end(), fbBegin->begin(), fbBegin->end());
2392 descriptor.m_Size.insert(descriptor.m_Size.end(), fbSize->begin(), fbSize->end());
2393
2394 auto layerName = GetLayerName(graph, layerIndex);
2395 IConnectableLayer* layer = m_Network->AddSliceLayer(descriptor, layerName.c_str());
2396
2397 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2398 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2399
2400 RegisterInputSlots(graph, layerIndex, layer);
2401 RegisterOutputSlots(graph, layerIndex, layer);
2402}
2403
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002404void Deserializer::ParseStridedSlice(GraphPtr graph, unsigned int layerIndex)
2405{
2406 CHECK_LAYERS(graph, 0, layerIndex);
2407
2408 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2409 CHECK_VALID_SIZE(inputs.size(), 1);
2410
2411 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2412 CHECK_VALID_SIZE(outputs.size(), 1);
2413
2414 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StridedSliceLayer()->descriptor();
2415
2416 auto flatBufferBegin = flatBufferDescriptor->begin();
2417 auto flatBufferEnd = flatBufferDescriptor->end();
2418 auto flatBufferStride = flatBufferDescriptor->stride();
2419
2420 if (!(flatBufferBegin->Length() == flatBufferEnd->Length() &&
2421 flatBufferBegin->Length() == flatBufferStride->Length()))
2422 {
Colm Donelan5b5c2222020-09-09 12:48:16 +01002423 throw ParseException(fmt::format("The size of the begin, end, and stride must be equal {}",
2424 CHECK_LOCATION().AsString()));
Nattapat Chaimanowongb3485212019-03-04 12:35:39 +00002425 }
2426
2427 std::vector<int> begin(flatBufferBegin->begin(), flatBufferBegin->end());
2428 std::vector<int> end(flatBufferEnd->begin(), flatBufferEnd->end());
2429 std::vector<int> stride(flatBufferStride->begin(), flatBufferStride->end());
2430
2431 armnn::StridedSliceDescriptor descriptor(begin, end, stride);
2432 descriptor.m_BeginMask = flatBufferDescriptor->beginMask();
2433 descriptor.m_EndMask = flatBufferDescriptor->endMask();
2434 descriptor.m_ShrinkAxisMask = flatBufferDescriptor->shrinkAxisMask();
2435 descriptor.m_EllipsisMask = flatBufferDescriptor->ellipsisMask();
2436 descriptor.m_NewAxisMask = flatBufferDescriptor->newAxisMask();
2437 descriptor.m_DataLayout = ToDataLayout(flatBufferDescriptor->dataLayout());
2438
2439 auto layerName = GetLayerName(graph, layerIndex);
2440 IConnectableLayer* layer = m_Network->AddStridedSliceLayer(descriptor, layerName.c_str());
2441
2442 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2443 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2444
2445 RegisterInputSlots(graph, layerIndex, layer);
2446 RegisterOutputSlots(graph, layerIndex, layer);
2447}
2448
Conor Kennedyda1f9752019-03-01 14:37:12 +00002449void Deserializer::ParseSubtraction(GraphPtr graph, unsigned int layerIndex)
2450{
2451 CHECK_LAYERS(graph, 0, layerIndex);
2452 auto inputs = GetInputs(graph, layerIndex);
2453 CHECK_LOCATION();
2454 CHECK_VALID_SIZE(inputs.size(), 2);
2455
2456 auto outputs = GetOutputs(graph, layerIndex);
2457 CHECK_VALID_SIZE(outputs.size(), 1);
2458
2459 auto layerName = GetLayerName(graph, layerIndex);
2460 IConnectableLayer* layer = m_Network->AddSubtractionLayer(layerName.c_str());
2461
2462 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2463 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2464
2465 RegisterInputSlots(graph, layerIndex, layer);
2466 RegisterOutputSlots(graph, layerIndex, layer);
2467}
2468
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002469void Deserializer::ParseGather(GraphPtr graph, unsigned int layerIndex)
2470{
2471 CHECK_LAYERS(graph, 0, layerIndex);
2472
2473 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2474 CHECK_VALID_SIZE(inputs.size(), 2);
2475
2476 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2477 CHECK_VALID_SIZE(outputs.size(), 1);
2478
Teresa Charlin52664732020-06-29 16:27:03 +01002479 armnn::GatherDescriptor descriptor;
2480 descriptor.m_Axis = graph->layers()->Get(layerIndex)->layer_as_GatherLayer()->descriptor()->axis();
2481
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002482 auto layerName = GetLayerName(graph, layerIndex);
Teresa Charlin52664732020-06-29 16:27:03 +01002483 IConnectableLayer* layer = m_Network->AddGatherLayer(descriptor, layerName.c_str());
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002484
2485 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002486 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2487
2488 RegisterInputSlots(graph, layerIndex, layer);
2489 RegisterOutputSlots(graph, layerIndex, layer);
Saoirse Stewarta1ed73a2019-03-04 13:40:12 +00002490}
2491
Sadik Armaganac97c8c2019-03-04 17:44:21 +00002492void Deserializer::ParseMean(GraphPtr graph, unsigned int layerIndex)
2493{
2494 CHECK_LAYERS(graph, 0, layerIndex);
2495
2496 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2497 CHECK_VALID_SIZE(inputs.size(), 1);
2498
2499 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2500 CHECK_VALID_SIZE(outputs.size(), 1);
2501
2502 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_MeanLayer()->descriptor();
2503 auto flatBufferAxis = flatBufferDescriptor->axis();
2504 auto flatBufferKeepDims = flatBufferDescriptor->keepDims();
2505
2506 armnn::MeanDescriptor descriptor;
2507 descriptor.m_Axis = std::vector<unsigned int>(flatBufferAxis->begin(), flatBufferAxis->end());
2508 descriptor.m_KeepDims = flatBufferKeepDims;
2509
2510 auto layerName = GetLayerName(graph, layerIndex);
2511 IConnectableLayer* layer = m_Network->AddMeanLayer(descriptor, layerName.c_str());
2512
2513 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2514 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2515
2516 RegisterInputSlots(graph, layerIndex, layer);
2517 RegisterOutputSlots(graph, layerIndex, layer);
2518}
2519
Jim Flynn18ce3382019-03-08 11:08:30 +00002520void Deserializer::ParseSplitter(GraphPtr graph, unsigned int layerIndex)
2521{
2522 CHECK_LAYERS(graph, 0, layerIndex);
2523
2524 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2525 CHECK_VALID_SIZE(inputs.size(), 1);
2526
2527 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2528
2529 auto flatBufferViewsDescriptor = graph->layers()->Get(layerIndex)->layer_as_SplitterLayer()->descriptor();
2530 auto flatBufferViewSizes = flatBufferViewsDescriptor->viewSizes();
2531 auto flatBufferOriginsDescriptor = flatBufferViewsDescriptor->origins();
2532 auto flatBufferViewOrigins = flatBufferOriginsDescriptor->viewOrigins();
2533 uint32_t numViews = flatBufferOriginsDescriptor->numViews();
2534 uint32_t numDimensions = flatBufferOriginsDescriptor->numDimensions();
2535
2536 // Check numViews and numDimensions corresponds to the ones already serialized ...
2537 // numViews == flatBufferViewSizes.size();
2538 // foreach: numDimensions == flatBufferViewSizes[x].size();
2539
2540 armnn::ViewsDescriptor viewsDescriptor(numViews, numDimensions);
2541 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2542 {
2543 for (unsigned int dIdx = 0; dIdx < numDimensions; ++dIdx)
2544 {
2545 viewsDescriptor.SetViewSize(vIdx, dIdx, flatBufferViewSizes->Get(vIdx)->data()->Get(dIdx));
2546 viewsDescriptor.SetViewOriginCoord(vIdx, dIdx, flatBufferViewOrigins->Get(vIdx)->data()->Get(dIdx));
2547 }
2548 }
2549
2550 auto layerName = GetLayerName(graph, layerIndex);
2551 IConnectableLayer* layer = m_Network->AddSplitterLayer(viewsDescriptor, layerName.c_str());
2552
2553 // I could have as many outputs as views ...
2554 for(unsigned int vIdx = 0; vIdx < numViews; ++vIdx)
2555 {
2556 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[vIdx]);
2557 layer->GetOutputSlot(vIdx).SetTensorInfo(outputTensorInfo);
2558 }
2559
2560 RegisterInputSlots(graph, layerIndex, layer);
2561 RegisterOutputSlots(graph, layerIndex, layer);
2562}
2563
Jim Flynn11af3752019-03-19 17:22:29 +00002564armnn::LstmDescriptor Deserializer::GetLstmDescriptor(Deserializer::LstmDescriptorPtr lstmDescriptor)
2565{
2566 armnn::LstmDescriptor desc;
2567
2568 desc.m_ActivationFunc = lstmDescriptor->activationFunc();
2569 desc.m_ClippingThresCell = lstmDescriptor->clippingThresCell();
2570 desc.m_ClippingThresProj = lstmDescriptor->clippingThresProj();
2571 desc.m_CifgEnabled = lstmDescriptor->cifgEnabled();
2572 desc.m_PeepholeEnabled = lstmDescriptor->peepholeEnabled();
2573 desc.m_ProjectionEnabled = lstmDescriptor->projectionEnabled();
Jan Eilersf8c62972019-07-17 11:07:49 +01002574 desc.m_LayerNormEnabled = lstmDescriptor->layerNormEnabled();
Jim Flynn11af3752019-03-19 17:22:29 +00002575
2576 return desc;
2577}
2578
2579void Deserializer::ParseLstm(GraphPtr graph, unsigned int layerIndex)
2580{
2581 CHECK_LAYERS(graph, 0, layerIndex);
2582
2583 auto inputs = GetInputs(graph, layerIndex);
2584 CHECK_VALID_SIZE(inputs.size(), 3);
2585
2586 auto outputs = GetOutputs(graph, layerIndex);
2587 CHECK_VALID_SIZE(outputs.size(), 4);
2588
2589 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_LstmLayer();
2590 auto layerName = GetLayerName(graph, layerIndex);
2591 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2592 auto flatBufferInputParams = flatBufferLayer->inputParams();
2593
2594 auto lstmDescriptor = GetLstmDescriptor(flatBufferDescriptor);
2595
2596 armnn::LstmInputParams lstmInputParams;
2597
2598 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2599 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2600 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2601 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2602 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2603 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2604 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2605 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2606 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2607
2608 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2609 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2610 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2611 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2612 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2613 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2614 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2615 lstmInputParams.m_CellBias = &cellBias;
2616 lstmInputParams.m_OutputGateBias = &outputGateBias;
2617
2618 armnn::ConstTensor inputToInputWeights;
2619 armnn::ConstTensor recurrentToInputWeights;
2620 armnn::ConstTensor cellToInputWeights;
2621 armnn::ConstTensor inputGateBias;
2622 if (!lstmDescriptor.m_CifgEnabled)
2623 {
2624 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2625 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2626 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2627 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2628
2629 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2630 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2631 lstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2632 lstmInputParams.m_InputGateBias = &inputGateBias;
2633 }
2634
2635 armnn::ConstTensor projectionWeights;
2636 armnn::ConstTensor projectionBias;
2637 if (lstmDescriptor.m_ProjectionEnabled)
2638 {
2639 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2640 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2641
2642 lstmInputParams.m_ProjectionWeights = &projectionWeights;
2643 lstmInputParams.m_ProjectionBias = &projectionBias;
2644 }
2645
2646 armnn::ConstTensor cellToForgetWeights;
2647 armnn::ConstTensor cellToOutputWeights;
2648 if (lstmDescriptor.m_PeepholeEnabled)
2649 {
2650 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2651 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2652
2653 lstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2654 lstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2655 }
2656
Jan Eilersf8c62972019-07-17 11:07:49 +01002657 armnn::ConstTensor inputLayerNormWeights;
2658 armnn::ConstTensor forgetLayerNormWeights;
2659 armnn::ConstTensor cellLayerNormWeights;
2660 armnn::ConstTensor outputLayerNormWeights;
2661 if (lstmDescriptor.m_LayerNormEnabled)
2662 {
2663 if (!lstmDescriptor.m_CifgEnabled)
2664 {
2665 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2666 lstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2667 }
2668 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2669 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2670 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2671
2672 lstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2673 lstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2674 lstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2675 }
2676
Jim Flynn11af3752019-03-19 17:22:29 +00002677 IConnectableLayer* layer = m_Network->AddLstmLayer(lstmDescriptor, lstmInputParams, layerName.c_str());
2678
2679 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2680 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2681
2682 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2683 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2684
2685 armnn::TensorInfo outputTensorInfo3 = ToTensorInfo(outputs[2]);
2686 layer->GetOutputSlot(2).SetTensorInfo(outputTensorInfo3);
2687
2688 armnn::TensorInfo outputTensorInfo4 = ToTensorInfo(outputs[3]);
2689 layer->GetOutputSlot(3).SetTensorInfo(outputTensorInfo4);
2690
2691 RegisterInputSlots(graph, layerIndex, layer);
2692 RegisterOutputSlots(graph, layerIndex, layer);
2693}
2694
James Conroy8d333182020-05-13 10:27:58 +01002695armnn::QLstmDescriptor Deserializer::GetQLstmDescriptor(Deserializer::QLstmDescriptorPtr qLstmDescriptor)
2696{
2697 armnn::QLstmDescriptor desc;
2698
2699 desc.m_CifgEnabled = qLstmDescriptor->cifgEnabled();
2700 desc.m_PeepholeEnabled = qLstmDescriptor->peepholeEnabled();
2701 desc.m_ProjectionEnabled = qLstmDescriptor->projectionEnabled();
2702 desc.m_LayerNormEnabled = qLstmDescriptor->layerNormEnabled();
2703
2704 desc.m_CellClip = qLstmDescriptor->cellClip();
2705 desc.m_ProjectionClip = qLstmDescriptor->projectionClip();
2706
2707 desc.m_InputIntermediateScale = qLstmDescriptor->inputIntermediateScale();
2708 desc.m_ForgetIntermediateScale = qLstmDescriptor->forgetIntermediateScale();
2709 desc.m_CellIntermediateScale = qLstmDescriptor->cellIntermediateScale();
2710 desc.m_OutputIntermediateScale = qLstmDescriptor->outputIntermediateScale();
2711
2712 desc.m_HiddenStateScale = qLstmDescriptor->hiddenStateScale();
2713 desc.m_HiddenStateZeroPoint = qLstmDescriptor->hiddenStateZeroPoint();
2714
2715 return desc;
2716}
2717
2718void Deserializer::ParseQLstm(GraphPtr graph, unsigned int layerIndex)
2719{
2720 CHECK_LAYERS(graph, 0, layerIndex);
2721
2722 auto inputs = GetInputs(graph, layerIndex);
2723 CHECK_VALID_SIZE(inputs.size(), 3);
2724
2725 auto outputs = GetOutputs(graph, layerIndex);
2726 CHECK_VALID_SIZE(outputs.size(), 3);
2727
2728 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QLstmLayer();
2729 auto layerName = GetLayerName(graph, layerIndex);
2730 auto flatBufferDescriptor = flatBufferLayer->descriptor();
2731 auto flatBufferInputParams = flatBufferLayer->inputParams();
2732
2733 auto qLstmDescriptor = GetQLstmDescriptor(flatBufferDescriptor);
2734 armnn::LstmInputParams qLstmInputParams;
2735
2736 // Mandatory params
2737 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2738 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2739 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2740 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2741 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2742 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2743 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2744 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2745 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2746
2747 qLstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2748 qLstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2749 qLstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2750 qLstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2751 qLstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2752 qLstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2753 qLstmInputParams.m_ForgetGateBias = &forgetGateBias;
2754 qLstmInputParams.m_CellBias = &cellBias;
2755 qLstmInputParams.m_OutputGateBias = &outputGateBias;
2756
2757 // Optional CIFG params
2758 armnn::ConstTensor inputToInputWeights;
2759 armnn::ConstTensor recurrentToInputWeights;
2760 armnn::ConstTensor inputGateBias;
2761
2762 if (!qLstmDescriptor.m_CifgEnabled)
2763 {
2764 inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2765 recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2766 inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2767
2768 qLstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2769 qLstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2770 qLstmInputParams.m_InputGateBias = &inputGateBias;
2771 }
2772
2773 // Optional projection params
2774 armnn::ConstTensor projectionWeights;
2775 armnn::ConstTensor projectionBias;
2776
2777 if (qLstmDescriptor.m_ProjectionEnabled)
2778 {
2779 projectionWeights = ToConstTensor(flatBufferInputParams->projectionWeights());
2780 projectionBias = ToConstTensor(flatBufferInputParams->projectionBias());
2781
2782 qLstmInputParams.m_ProjectionWeights = &projectionWeights;
2783 qLstmInputParams.m_ProjectionBias = &projectionBias;
2784 }
2785
2786 // Optional peephole params
2787 armnn::ConstTensor cellToInputWeights;
2788 armnn::ConstTensor cellToForgetWeights;
2789 armnn::ConstTensor cellToOutputWeights;
2790
2791 if (qLstmDescriptor.m_PeepholeEnabled)
2792 {
2793 if (!qLstmDescriptor.m_CifgEnabled)
2794 {
2795 cellToInputWeights = ToConstTensor(flatBufferInputParams->cellToInputWeights());
2796 qLstmInputParams.m_CellToInputWeights = &cellToInputWeights;
2797 }
2798
2799 cellToForgetWeights = ToConstTensor(flatBufferInputParams->cellToForgetWeights());
2800 cellToOutputWeights = ToConstTensor(flatBufferInputParams->cellToOutputWeights());
2801
2802 qLstmInputParams.m_CellToForgetWeights = &cellToForgetWeights;
2803 qLstmInputParams.m_CellToOutputWeights = &cellToOutputWeights;
2804 }
2805
2806 // Optional layer norm params
2807 armnn::ConstTensor inputLayerNormWeights;
2808 armnn::ConstTensor forgetLayerNormWeights;
2809 armnn::ConstTensor cellLayerNormWeights;
2810 armnn::ConstTensor outputLayerNormWeights;
2811
2812 if (qLstmDescriptor.m_LayerNormEnabled)
2813 {
2814 if (!qLstmDescriptor.m_CifgEnabled)
2815 {
2816 inputLayerNormWeights = ToConstTensor(flatBufferInputParams->inputLayerNormWeights());
2817 qLstmInputParams.m_InputLayerNormWeights = &inputLayerNormWeights;
2818 }
2819
2820 forgetLayerNormWeights = ToConstTensor(flatBufferInputParams->forgetLayerNormWeights());
2821 cellLayerNormWeights = ToConstTensor(flatBufferInputParams->cellLayerNormWeights());
2822 outputLayerNormWeights = ToConstTensor(flatBufferInputParams->outputLayerNormWeights());
2823
2824 qLstmInputParams.m_ForgetLayerNormWeights = &forgetLayerNormWeights;
2825 qLstmInputParams.m_CellLayerNormWeights = &cellLayerNormWeights;
2826 qLstmInputParams.m_OutputLayerNormWeights = &outputLayerNormWeights;
2827 }
2828
2829 IConnectableLayer* layer = m_Network->AddQLstmLayer(qLstmDescriptor, qLstmInputParams, layerName.c_str());
2830
2831 armnn::TensorInfo outputStateOutInfo = ToTensorInfo(outputs[0]);
2832 layer->GetOutputSlot(0).SetTensorInfo(outputStateOutInfo);
2833
2834 armnn::TensorInfo cellStateOutInfo = ToTensorInfo(outputs[1]);
2835 layer->GetOutputSlot(1).SetTensorInfo(cellStateOutInfo);
2836
2837 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[2]);
2838 layer->GetOutputSlot(2).SetTensorInfo(outputInfo);
2839
2840 RegisterInputSlots(graph, layerIndex, layer);
2841 RegisterOutputSlots(graph, layerIndex, layer);
2842}
2843
Jan Eilers5b01a892019-07-23 09:47:43 +01002844void Deserializer::ParseQuantizedLstm(GraphPtr graph, unsigned int layerIndex)
2845{
2846 CHECK_LAYERS(graph, 0, layerIndex);
2847
2848 auto inputs = GetInputs(graph, layerIndex);
2849 CHECK_VALID_SIZE(inputs.size(), 3);
2850
2851 auto outputs = GetOutputs(graph, layerIndex);
2852 CHECK_VALID_SIZE(outputs.size(), 2);
2853
2854 auto flatBufferLayer = graph->layers()->Get(layerIndex)->layer_as_QuantizedLstmLayer();
2855 auto layerName = GetLayerName(graph, layerIndex);
2856 auto flatBufferInputParams = flatBufferLayer->inputParams();
2857
2858 armnn::QuantizedLstmInputParams lstmInputParams;
2859
2860 armnn::ConstTensor inputToInputWeights = ToConstTensor(flatBufferInputParams->inputToInputWeights());
2861 armnn::ConstTensor inputToForgetWeights = ToConstTensor(flatBufferInputParams->inputToForgetWeights());
2862 armnn::ConstTensor inputToCellWeights = ToConstTensor(flatBufferInputParams->inputToCellWeights());
2863 armnn::ConstTensor inputToOutputWeights = ToConstTensor(flatBufferInputParams->inputToOutputWeights());
2864 armnn::ConstTensor recurrentToInputWeights = ToConstTensor(flatBufferInputParams->recurrentToInputWeights());
2865 armnn::ConstTensor recurrentToForgetWeights = ToConstTensor(flatBufferInputParams->recurrentToForgetWeights());
2866 armnn::ConstTensor recurrentToCellWeights = ToConstTensor(flatBufferInputParams->recurrentToCellWeights());
2867 armnn::ConstTensor recurrentToOutputWeights = ToConstTensor(flatBufferInputParams->recurrentToOutputWeights());
2868 armnn::ConstTensor inputGateBias = ToConstTensor(flatBufferInputParams->inputGateBias());
2869 armnn::ConstTensor forgetGateBias = ToConstTensor(flatBufferInputParams->forgetGateBias());
2870 armnn::ConstTensor cellBias = ToConstTensor(flatBufferInputParams->cellBias());
2871 armnn::ConstTensor outputGateBias = ToConstTensor(flatBufferInputParams->outputGateBias());
2872
2873 lstmInputParams.m_InputToInputWeights = &inputToInputWeights;
2874 lstmInputParams.m_InputToForgetWeights = &inputToForgetWeights;
2875 lstmInputParams.m_InputToCellWeights = &inputToCellWeights;
2876 lstmInputParams.m_InputToOutputWeights = &inputToOutputWeights;
2877 lstmInputParams.m_RecurrentToInputWeights = &recurrentToInputWeights;
2878 lstmInputParams.m_RecurrentToForgetWeights = &recurrentToForgetWeights;
2879 lstmInputParams.m_RecurrentToCellWeights = &recurrentToCellWeights;
2880 lstmInputParams.m_RecurrentToOutputWeights = &recurrentToOutputWeights;
2881 lstmInputParams.m_InputGateBias = &inputGateBias;
2882 lstmInputParams.m_ForgetGateBias = &forgetGateBias;
2883 lstmInputParams.m_CellBias = &cellBias;
2884 lstmInputParams.m_OutputGateBias = &outputGateBias;
2885
2886 IConnectableLayer* layer = m_Network->AddQuantizedLstmLayer(lstmInputParams, layerName.c_str());
2887
2888 armnn::TensorInfo outputTensorInfo1 = ToTensorInfo(outputs[0]);
2889 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo1);
2890
2891 armnn::TensorInfo outputTensorInfo2 = ToTensorInfo(outputs[1]);
2892 layer->GetOutputSlot(1).SetTensorInfo(outputTensorInfo2);
2893
2894 RegisterInputSlots(graph, layerIndex, layer);
2895 RegisterOutputSlots(graph, layerIndex, layer);
2896}
2897
Nattapat Chaimanowonge4294fd2019-03-28 09:56:53 +00002898void Deserializer::ParseDequantize(GraphPtr graph, unsigned int layerIndex)
2899{
2900 CHECK_LAYERS(graph, 0, layerIndex);
2901
2902 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2903 CHECK_VALID_SIZE(inputs.size(), 1);
2904
2905 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2906 CHECK_VALID_SIZE(outputs.size(), 1);
2907
2908 const std::string layerName = GetLayerName(graph, layerIndex);
2909 IConnectableLayer* layer = m_Network->AddDequantizeLayer(layerName.c_str());
2910
2911 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2912 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2913
2914 RegisterInputSlots(graph, layerIndex, layer);
2915 RegisterOutputSlots(graph, layerIndex, layer);
2916}
2917
Nattapat Chaimanowong1f886302019-04-05 13:37:19 +01002918void Deserializer::ParseMerge(GraphPtr graph, unsigned int layerIndex)
2919{
2920 CHECK_LAYERS(graph, 0, layerIndex);
2921
2922 Deserializer::TensorRawPtrVector inputs = GetInputs(graph, layerIndex);
2923 CHECK_VALID_SIZE(inputs.size(), 2);
2924
2925 Deserializer::TensorRawPtrVector outputs = GetOutputs(graph, layerIndex);
2926 CHECK_VALID_SIZE(outputs.size(), 1);
2927
2928 const std::string layerName = GetLayerName(graph, layerIndex);
2929 IConnectableLayer* layer = m_Network->AddMergeLayer(layerName.c_str());
2930
2931 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2932 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2933
2934 RegisterInputSlots(graph, layerIndex, layer);
2935 RegisterOutputSlots(graph, layerIndex, layer);
2936}
2937
Sadik Armaganeff363d2019-04-05 15:25:46 +01002938void Deserializer::ParseSwitch(GraphPtr graph, unsigned int layerIndex)
2939{
2940 CHECK_LAYERS(graph, 0, layerIndex);
2941 auto inputs = GetInputs(graph, layerIndex);
2942 CHECK_LOCATION();
2943 CHECK_VALID_SIZE(inputs.size(), 2);
2944
2945 auto outputs = GetOutputs(graph, layerIndex);
2946 CHECK_VALID_SIZE(outputs.size(), 2);
2947
2948 auto layerName = GetLayerName(graph, layerIndex);
2949 IConnectableLayer* layer = m_Network->AddSwitchLayer(layerName.c_str());
2950
2951 armnn::TensorInfo output0TensorInfo = ToTensorInfo(outputs[0]);
2952 layer->GetOutputSlot(0).SetTensorInfo(output0TensorInfo);
2953
2954 armnn::TensorInfo output1TensorInfo = ToTensorInfo(outputs[1]);
2955 layer->GetOutputSlot(1).SetTensorInfo(output1TensorInfo);
2956
2957 RegisterInputSlots(graph, layerIndex, layer);
2958 RegisterOutputSlots(graph, layerIndex, layer);
2959}
2960
Ellen Norris-Thompson51982472019-06-19 11:46:21 +01002961void Deserializer::ParsePrelu(GraphPtr graph, unsigned int layerIndex)
2962{
2963 CHECK_LAYERS(graph, 0, layerIndex);
2964 auto inputs = GetInputs(graph, layerIndex);
2965 CHECK_LOCATION();
2966 CHECK_VALID_SIZE(inputs.size(), 2);
2967
2968 auto outputs = GetOutputs(graph, layerIndex);
2969 CHECK_VALID_SIZE(outputs.size(), 1);
2970
2971 auto layerName = GetLayerName(graph, layerIndex);
2972 IConnectableLayer* layer = m_Network->AddPreluLayer(layerName.c_str());
2973
2974 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
2975 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
2976
2977 RegisterInputSlots(graph, layerIndex, layer);
2978 RegisterOutputSlots(graph, layerIndex, layer);
2979}
2980
Mike Kellyc9ea45a2020-02-28 18:11:58 +00002981void Deserializer::ParseTranspose(GraphPtr graph, unsigned int layerIndex)
2982{
2983 CHECK_LAYERS(graph, 0, layerIndex);
2984
2985 auto dimsMapping = graph->layers()->Get(layerIndex)->layer_as_TransposeLayer()->descriptor()->dimMappings();
2986
2987 auto inputs = GetInputs(graph, layerIndex);
2988 CHECK_VALID_SIZE(inputs.size(), 1);
2989
2990 auto outputs = GetOutputs(graph, layerIndex);
2991 CHECK_VALID_SIZE(outputs.size(), 1);
2992 auto outputInfo = ToTensorInfo(outputs[0]);
2993
2994 auto layerName = GetLayerName(graph, layerIndex);
2995 const armnn::TransposeDescriptor descriptor(armnn::PermutationVector(dimsMapping->data(), dimsMapping->Length()));
2996
2997 IConnectableLayer* layer = m_Network->AddTransposeLayer(descriptor, layerName.c_str());
2998 layer->GetOutputSlot(0).SetTensorInfo(outputInfo);
2999
3000 RegisterInputSlots(graph, layerIndex, layer);
3001 RegisterOutputSlots(graph, layerIndex, layer);
3002}
3003
Aron Virginas-Tarcb549302019-06-21 13:53:38 +01003004void Deserializer::ParseTransposeConvolution2d(GraphPtr graph, unsigned int layerIndex)
3005{
3006 CHECK_LAYERS(graph, 0, layerIndex);
3007
3008 auto inputs = GetInputs(graph, layerIndex);
3009 CHECK_VALID_SIZE(inputs.size(), 1);
3010
3011 auto outputs = GetOutputs(graph, layerIndex);
3012 CHECK_VALID_SIZE(outputs.size(), 1);
3013
3014 auto serializerLayer = graph->layers()->Get(layerIndex)->layer_as_TransposeConvolution2dLayer();
3015 auto layerName = GetLayerName(graph, layerIndex);
3016 auto serializerDescriptor = serializerLayer->descriptor();
3017
3018 armnn::TransposeConvolution2dDescriptor descriptor;
3019 descriptor.m_PadLeft = serializerDescriptor->padLeft();
3020 descriptor.m_PadRight = serializerDescriptor->padRight();
3021 descriptor.m_PadTop = serializerDescriptor->padTop();
3022 descriptor.m_PadBottom = serializerDescriptor->padBottom();
3023 descriptor.m_StrideX = serializerDescriptor->strideX();
3024 descriptor.m_StrideY = serializerDescriptor->strideY();;
3025 descriptor.m_BiasEnabled = serializerDescriptor->biasEnabled();;
3026 descriptor.m_DataLayout = ToDataLayout(serializerDescriptor->dataLayout());
3027
3028 // weights & biases
3029 armnn::ConstTensor weights = ToConstTensor(serializerLayer->weights());
3030 armnn::Optional<armnn::ConstTensor> optionalBiases;
3031 if (descriptor.m_BiasEnabled)
3032 {
3033 armnn::ConstTensor biases = ToConstTensor(serializerLayer->biases());
3034 optionalBiases = armnn::MakeOptional<armnn::ConstTensor>(biases);
3035 }
3036
3037 IConnectableLayer* layer = m_Network->AddTransposeConvolution2dLayer(descriptor,
3038 weights,
3039 optionalBiases,
3040 layerName.c_str());
3041
3042 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3043 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3044
3045 RegisterInputSlots(graph, layerIndex, layer);
3046 RegisterOutputSlots(graph, layerIndex, layer);
3047}
3048
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003049void Deserializer::ParseStack(GraphPtr graph, unsigned int layerIndex)
3050{
3051 CHECK_LAYERS(graph, 0, layerIndex);
3052 auto inputs = GetInputs(graph, layerIndex);
3053
3054 auto outputs = GetOutputs(graph, layerIndex);
3055 CHECK_VALID_SIZE(outputs.size(), 1);
3056
3057 auto flatBufferDescriptor = graph->layers()->Get(layerIndex)->layer_as_StackLayer()->descriptor();
3058 unsigned int axis = flatBufferDescriptor->axis();
3059 unsigned int numInputs = flatBufferDescriptor->numInputs();
3060 CHECK_VALID_SIZE(inputs.size(), numInputs);
3061
3062 auto flatBufferInputShape = flatBufferDescriptor->inputShape();
3063 std::vector<uint32_t> vectorInputShape(flatBufferInputShape->begin(),
3064 flatBufferInputShape->begin() + flatBufferInputShape->size());
3065
3066 TensorShape inputShape(static_cast<unsigned int>(vectorInputShape.size()), vectorInputShape.data());
3067 armnn::StackDescriptor descriptor(axis, numInputs, inputShape);
3068
3069 for (unsigned int i=0; i<inputs.size(); ++i)
3070 {
Matthew Bentham75ae2b02019-09-19 12:04:13 +01003071 armnn::TensorShape inputShape = ToTensorInfo(inputs[i]).GetShape();
Matthew Jacksonb5433ee2019-07-11 15:54:20 +01003072 if (descriptor.m_InputShape != inputShape)
3073 {
3074 std::stringstream ss;
3075 ss << "Shape of input "
3076 << i
3077 << " "
3078 << inputShape
3079 << " does not equal defined input shape "
3080 << descriptor.m_InputShape
3081 << ": "
3082 << CHECK_LOCATION().AsString();
3083 throw ParseException(ss.str());
3084 }
3085 }
3086
3087 auto layerName = GetLayerName(graph, layerIndex);
3088 IConnectableLayer* layer = m_Network->AddStackLayer(descriptor, layerName.c_str());
3089
3090 armnn::TensorInfo outputTensorInfo = ToTensorInfo(outputs[0]);
3091 layer->GetOutputSlot(0).SetTensorInfo(outputTensorInfo);
3092
3093 RegisterInputSlots(graph, layerIndex, layer);
3094 RegisterOutputSlots(graph, layerIndex, layer);
3095}
3096
Aron Virginas-Tar85121a22019-10-23 10:41:35 +01003097void Deserializer::ParseStandIn(GraphPtr graph, unsigned int layerIndex)
3098{
3099 CHECK_LAYERS(graph, 0, layerIndex);
3100
3101 auto inputs = GetInputs(graph, layerIndex);
3102 auto outputs = GetOutputs(graph, layerIndex);
3103
3104 auto fbLayer = graph->layers()->Get(layerIndex)->layer_as_StandInLayer();
3105 auto fbDescriptor = fbLayer->descriptor();
3106
3107 armnn::StandInDescriptor descriptor;
3108 descriptor.m_NumInputs = fbDescriptor->numInputs();
3109 descriptor.m_NumOutputs = fbDescriptor->numOutputs();
3110
3111 CHECK_VALID_SIZE(inputs.size(), descriptor.m_NumInputs);
3112 CHECK_VALID_SIZE(outputs.size(), descriptor.m_NumOutputs);
3113
3114 const std::string layerName = GetLayerName(graph, layerIndex);
3115 armnn::IConnectableLayer* layer = m_Network->AddStandInLayer(descriptor, layerName.c_str());
3116
3117 for (unsigned int i = 0u; i < descriptor.m_NumOutputs; ++i)
3118 {
3119 armnn::TensorInfo outputInfo = ToTensorInfo(outputs[i]);
3120 layer->GetOutputSlot(i).SetTensorInfo(outputInfo);
3121 }
3122
3123 RegisterInputSlots(graph, layerIndex, layer);
3124 RegisterOutputSlots(graph, layerIndex, layer);
3125}
3126
Derek Lamberti0028d1b2019-02-20 13:57:42 +00003127} // namespace armnnDeserializer